text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
# This file is part of sydpy.
#
# Copyright (C) 2014-2015 Bogdan Vukobratovic
#
# sydpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# sydpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with sydpy. If not, see
# <http://www.gnu.org/licenses/>.
""" Module with utilility objects.
"""
import time
import inspect
import ast
def timeit(method):
""" Decorator for function execution timing """
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
print('%r (%r, %r) %2.2f sec' % (method.__name__, args, kw, te-ts))
return result
return timed
def class_load(cls):
"""Instatiate class given by path string
The format expected of the class path is:
path.to.the.module.ClassName
"""
if isinstance(cls, str):
class_path_partition = cls.rpartition('.')
class_name = class_path_partition[-1]
module_name = class_path_partition[0]
if not module_name:
module_name = "sydpy"
module = __import__(module_name, fromlist=[class_name])
class_ = getattr(module, class_name)
return class_
else:
return cls
def factory(cls, *args, **kwargs):
class_load(cls)(*args, **kwargs)
def key_repr(key):
if key is not None:
if isinstance(key, slice):
return '[{0}:{1}]'.format(key.start, key.stop)
else:
return '[{0}]'.format(key)
else:
return ''
def unif_enum(obj):
if hasattr(obj, '__iter__'):
for e in obj:
yield e
else:
yield obj
from tokenize import generate_tokens, untokenize, INDENT
from io import StringIO
def _dedent(s):
"""Dedent python code string."""
result = [t[:2] for t in generate_tokens(StringIO(s).readline)]
# set initial indent to 0 if any
if result[0][0] == INDENT:
result[0] = (INDENT, '')
return untokenize(result)
class _SigNameVisitor(ast.NodeVisitor):
def __init__(self, symdict):
self.inputs = set()
self.outputs = set()
self.symdict = symdict
self.ref_path = []
self.store = False
def visit_Store(self, node):
if not self.ref_path:
self.store = True
def visit_Load(self, node):
if not self.ref_path:
self.store = False
def visit_Name(self, node):
if (node.id == 'self') and (self.ref_path):
self.ref_path = list(reversed(self.ref_path))
if self.ref_path[0] in self.symdict:
intf = self.symdict[self.ref_path[0]]
for p in self.ref_path[1:]:
if hasattr(intf, p):
intf = getattr(intf, p)
else:
if p in ['write', 'push']:
self.store = True
break
if self.store:
self.outputs.add(intf)
else:
self.inputs.add(intf)
self.ref_path = []
def visit_Attribute(self, node):
self.visit(node.ctx)
self.ref_path.append(node.attr)
self.visit(node.value)
def get_arch_args(arch_func):
p = inspect.getfullargspec(arch_func)
arch_args = [a for a in p.args]
arch_args.pop(0) #Exclude self
if p.defaults:
arch_arg_defs = [d for d in p.defaults]
else:
arch_arg_defs = []
arch_port_map_len = len(arch_args) - len(arch_arg_defs)
arch_ports = arch_args[:arch_port_map_len]
arch_confs = arch_args[arch_port_map_len+1:]
arch_arg_defs[:0] = [None]*(arch_port_map_len)
return arch_args, arch_ports, arch_confs, arch_arg_defs, p.annotations
def getio_vars(func, intfs):
# varnames = func.__code__.co_varnames
# symdict = {intf.name: intf for _,intf in intfs.items()}
symdict = intfs
# try:
# if func.arch == True:
# (arch_args, arch_ports, arch_confs, arch_arg_defs, arch_annot) = get_arch_args(func)
#
# for p in arch_ports:
# symdict[p] = p
#
# except AttributeError:
# pass
if func.__code__.co_freevars:
for n, c in zip(func.__code__.co_freevars, func.__closure__):
try:
symdict[n] = c.cell_contents
except NameError:
raise NameError(n)
s = inspect.getsource(func)
tree = ast.parse(_dedent(s))
# from astmonkey import visitors, transformers
#
# node = transformers.ParentNodeTransformer().visit(tree)
# visitor = visitors.GraphNodeVisitor()
# visitor.visit(node)
#
# visitor.graph.write_png('graph.png')
v = _SigNameVisitor(symdict)
v.visit(tree)
return v.inputs, v.outputs
# @decorator
def arch(f):
f.inputs, f.outputs = getio_vars(f)
return f
def arch_def(f):
f.arch_def = True
return arch(f)
def fannotate(f, **kwargs):
if not hasattr(f, '__annotations__'):
f.__annotations__ = {}
f.__annotations__.update(kwargs)
return f
|
bogdanvuk/sydpy
|
sydpy/_util/_util.py
|
Python
|
lgpl-2.1
| 5,818
|
[
"VisIt"
] |
fa9ac03cf0c7085fc0bf89e945c80f90992773500e51e79538a0cca41c9c9fcf
|
import global_vars
import os
import exceptions
import smtplib
import email.mime.text
class Task_exception(exceptions.Exception):
def __init__(self,message):
self.message=message
def __str__(self):
return " Task error:"+message
class Task:
def __init__(self,db_row):
self.user_id = db_row[0]
self.task_id = db_row[1]
self.algorithm = db_row[2]
self.num_procs = db_row[3]
self.duration_in_minutes = db_row[4]
self.task_status = db_row[5]
self.host = db_row[6]
self.path = db_row[7]
self.user_on_mult = db_row[8]
self.email = db_row[9]
self.priority_run = db_row[10]
self.priority_max = db_row[11]
self.running_time = db_row[12]
self.queue_num = db_row[13]
self.db_set = db_row[14]
self.seq_type = ""
self.blast_outp_detail_lvl= 0
self.seq_simil_thrshld = 0
def init_blast_task(self,db_row):
if len(db_row)>0:
self.seq_type = db_row[0]
self.blast_outp_detail_lvl = db_row[1]
self.seq_simil_thrshld = db_row[2]
def upload_data(self):
if self.algorithm == "FitProt":
run_fitprot="cd %s/%d/%d; /home/romanenkov/fitprot/search_substs.py -p %s/%d/%d/structure.pdb -s %s/%d/%d/selection.txt -o %s/%d/%d/tmp > /home/romanenkov/aligner/ssh_run/fit_log.txt; scp %s/%d/%d/energy_array %s@%s:%s/%d.en" %\
(
global_vars.data_path,
self.user_id,
self.task_id,
global_vars.data_path,
self.user_id,
self.task_id,
global_vars.data_path,
self.user_id,
self.task_id,
global_vars.data_path,
self.user_id,
self.task_id,
global_vars.data_path,
self.user_id,
self.task_id,
self.user_on_mult,
self.host,
self.path,
self.task_id
)
#test="echo %s" %\
#(
# run_fitprot
#)
#os.system(test)
print run_fitprot
os.system(run_fitprot)
#string4="scp %s/%d/%d/energy_array %s@%s:%s/energy_array" %\
#(
# global_vars.data_path,
# self.user_id,
# self.task_id,
# self.host,
# self.path
#)
#os.system(string4)
string1="scp %s/%d/%d/structure.pdb %s@%s:%s/%d.pdb" %\
(
global_vars.data_path,
self.user_id,
self.task_id,
self.user_on_mult,
self.host,
self.path,
self.task_id
)
string2="scp %s/%d/%d/selection.txt %s@%s:%s/%d.txt" %\
(
global_vars.data_path,
self.user_id,
self.task_id,
self.user_on_mult,
self.host,
self.path,
self.task_id
)
print " Task.upload_data(): %s" %string1
status1=os.system(string1)
print " Task.upload_data(): %s" %string2
status2=os.system(string2)
if status1 or status2:
raise Task_exception("scp failed!")
else:
if self.algorithm == "nhunt":
#run_nhunt="cd %s/%d/%d; /home/romanenkov/nhunt/nhunt -i %s/%d/%d/sequences.fasta -d /home/romanenkov/nhunt/db.fasta; scp %s/%d/%d/nhunt.out %s@%s:%s/%d.out" %\
#(
# global_vars.data_path,
# self.user_id,
# self.task_id,
# global_vars.data_path,
# self.user_id,
# self.task_id,
# global_vars.data_path,
# self.user_id,
# self.task_id,
# self.user_on_mult,
# self.host,
# self.path,
# self.task_id
#)
#print run_nhunt
#status3=os.system(run_nhunt)
#print status3
#if status3:
# raise Task_exception("11scp failed!11")
print "1 done"
string1="scp %s/%d/%d/sequences.fasta %s@%s:%s/%d.fasta" %\
(
global_vars.data_path,
self.user_id,
self.task_id,
self.user_on_mult,
self.host,
self.path,
self.task_id
)
print "ready1"
print " Task.upload_data(): %s" %string1
status1=os.system(string1)
#print status1
#print "ready2"
if status1:
raise Task_exception("scp failed!")
#print "2 done"
#string2="scp %s/../nhunt/db.fasta %s@%s:%s/db%d.fasta" %\
#(
# global_vars.data_path,
# self.user_on_mult,
# self.host,
# self.path,
# self.task_id
#)
#print " Task.upload_data(): %s" %string2
#status2=os.system(string2)
#if status2:
# raise Task_exception("scp failed!")
#print "3 done"
else:
string="scp %s/%d/%d/sequences.fasta %s@%s:%s/%d.fasta" %\
(
global_vars.data_path,
self.user_id,
self.task_id,
self.user_on_mult,
self.host,
self.path,
self.task_id
)
print " Task.upload_data(): %s" %string
status=os.system(string)
if status:
raise Task_exception("scp failed!")
def run(self):
if self.algorithm == "nhunt":
string="ssh %s@%s \"cd %s; ./scheduler_make_align.sh %d %d %d.fasta %d '%s' %d \"" %\
(
self.user_on_mult,
self.host,
self.path,
self.task_id,
self.num_procs,
self.task_id,
self.duration_in_minutes,
self.algorithm,
self.db_set
)
else:
string="ssh %s@%s \"cd %s; ./scheduler_make_align.sh %d %d %d.fasta %d '%s'\"" %\
(
self.user_on_mult,
self.host,
self.path,
self.task_id,
self.num_procs,
self.task_id,
self.duration_in_minutes,
self.algorithm
)
print " Task.run(): %s" %string
status=os.system(string)
return status / 256
def check(self):
string="ssh %s@%s \"cd %s; ./scheduler_check_align.sh %d '%s'\"" %\
(
self.user_on_mult,
self.host,
self.path,
self.task_id,
self.algorithm
)
print string
status=os.system(string) / 256
print " Task.check(): status length = %d" % status
return status
def download_data(self):
string="scp -r %s@%s:%s/%d/\* %s/%d/%d/" %\
(
self.user_on_mult,
self.host,
self.path,
self.task_id,
global_vars.data_path,
self.user_id,
self.task_id
)
print " Task.download_data(): %s" %string
status=os.system(string)
if status:
raise Task_exception("scp failed!")
string="chmod -Rf g+wrX %s/%d/%d" %\
(
global_vars.data_path,
self.user_id,
self.task_id
)
status=os.system(string)
string="chgrp -Rf %s %s/%d/%d" %\
(
global_vars.local_group,
global_vars.data_path,
self.user_id,
self.task_id
)
status=os.system(string)
def clear_remote_data(self):
string="ssh %s@%s \"cd %s; ./clear_data_align.sh %d\"" %\
(
self.user_on_mult,
self.host,
self.path,
self.task_id,
)
print " ",string
status=os.system(string)
return status / 256
def remote_task_delete(self):
string="ssh %s@%s \"cd %s; ./scheduler_delete_align.sh %d\"" %\
(
self.user_on_mult,
self.host,
self.path,
self.task_id,
)
print " ",string
status=os.system(string)
return status / 256
def email_notify_on_finish(self,status):
msg_text=\
"""
Dear user, your task with ID %d was finished on multiprocessor with status '%s'.
Please, visit page
https://%s/%s/pages/edit_task.php?task_id=%d
""" %\
(
self.task_id,
status,
global_vars.site_address,
global_vars.path_on_site,
self.task_id
)
msg= email.mime.text.MIMEText(msg_text)
msg['Subject']= "Information about state of the task with number %d on the Aligner website" % (self.task_id)
from_str="\"Aligner site administration\" <webmaster@%s>" % (global_vars.site_address)
msg['From']= from_str
msg['To']=self.email
server = smtplib.SMTP('localhost')
server.sendmail("webmaster@%s" %global_vars.site_address,[self.email],msg.as_string())
server.quit()
|
Abi1ity/uniclust2.0
|
old_uniclust_model/task.py
|
Python
|
bsd-3-clause
| 7,430
|
[
"VisIt"
] |
14f7e845f9dbf6f838da85899a85dcb9468a3e330cdef917fffe7d5123c2d1ef
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/server/server_rpc.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import threading
from king_phisher import version
from king_phisher.server.database import manager as db_manager
from king_phisher.server.database import models as db_models
VIEW_ROW_COUNT = 50
"""The default number of rows to return when one of the /view methods are called."""
DATABASE_TABLES = db_models.DATABASE_TABLES
DATABASE_TABLE_OBJECTS = db_models.DATABASE_TABLE_OBJECTS
class KingPhisherRequestHandlerRPC(object):
"""
This superclass of :py:class:`.KingPhisherRequestHandler` maintains
all of the RPC call back functions.
:RPC API: :ref:`rpc-api-label`
"""
def install_handlers(self):
super(KingPhisherRequestHandlerRPC, self).install_handlers()
self.rpc_handler_map['/ping'] = self.rpc_ping
self.rpc_handler_map['/shutdown'] = self.rpc_shutdown
self.rpc_handler_map['/version'] = self.rpc_version
self.rpc_handler_map['/client/initialize'] = self.rpc_client_initialize
self.rpc_handler_map['/config/get'] = self.rpc_config_get
self.rpc_handler_map['/config/set'] = self.rpc_config_set
self.rpc_handler_map['/campaign/alerts/is_subscribed'] = self.rpc_campaign_alerts_is_subscribed
self.rpc_handler_map['/campaign/alerts/subscribe'] = self.rpc_campaign_alerts_subscribe
self.rpc_handler_map['/campaign/alerts/unsubscribe'] = self.rpc_campaign_alerts_unsubscribe
self.rpc_handler_map['/campaign/landing_page/new'] = self.rpc_campaign_landing_page_new
self.rpc_handler_map['/campaign/message/new'] = self.rpc_campaign_message_new
self.rpc_handler_map['/campaign/new'] = self.rpc_campaign_new
self.rpc_handler_map['/campaign/delete'] = self.rpc_campaign_delete
for table_name in DATABASE_TABLES.keys():
self.rpc_handler_map['/' + table_name + '/count'] = self.rpc_database_count_rows
self.rpc_handler_map['/' + table_name + '/delete'] = self.rpc_database_delete_row_by_id
self.rpc_handler_map['/' + table_name + '/get'] = self.rpc_database_get_row_by_id
self.rpc_handler_map['/' + table_name + '/insert'] = self.rpc_database_insert_row
self.rpc_handler_map['/' + table_name + '/set'] = self.rpc_database_set_row_value
self.rpc_handler_map['/' + table_name + '/view'] = self.rpc_database_get_rows
# Tables with a campaign_id field
for table_name in db_models.get_tables_with_column_id('campaign_id'):
self.rpc_handler_map['/campaign/' + table_name + '/count'] = self.rpc_database_count_rows
self.rpc_handler_map['/campaign/' + table_name + '/view'] = self.rpc_database_get_rows
# Tables with a message_id field
for table_name in db_models.get_tables_with_column_id('message_id'):
self.rpc_handler_map['/message/' + table_name + '/count'] = self.rpc_database_count_rows
self.rpc_handler_map['/message/' + table_name + '/view'] = self.rpc_database_get_rows
def rpc_ping(self):
"""
An RPC method that can be used by clients to assert the status
and responsiveness of this server.
:return: This method always returns True.
:rtype: bool
"""
return True
def rpc_client_initialize(self):
"""
Initialize any client information necessary.
:return: This method always returns True.
:rtype: bool
"""
username = self.basic_auth_user
if not username:
return True
session = db_manager.Session()
if not db_manager.get_row_by_id(session, db_models.User, username):
user = db_models.User(id=username)
session.add(user)
session.commit()
session.close()
return True
def rpc_shutdown(self):
"""
This method can be used to shut down the server. This function will
return, however no subsequent requests will be processed.
"""
shutdown_thread = threading.Thread(target=self.server.shutdown)
shutdown_thread.start()
return
def rpc_version(self):
"""
Get the version information of the server. This returns a
dictionary with keys of version, version_info and rpc_api_version.
These values are provided for the client to determine
compatibility.
:return: A dictionary with version information.
:rtype: dict
"""
vinfo = {'version': version.version, 'version_info': version.version_info._asdict()}
vinfo['rpc_api_version'] = version.rpc_api_version
return vinfo
def rpc_config_get(self, option_name):
"""
Retrieve a value from the server's configuration.
:param str option_name: The name of the configuration option.
:return: The option's value.
"""
if isinstance(option_name, (list, tuple)):
option_names = option_name
option_values = {}
for option_name in option_names:
if self.config.has_option(option_name):
option_values[option_name] = self.config.get(option_name)
return option_values
elif self.config.has_option(option_name):
return self.config.get(option_name)
return
def rpc_config_set(self, options):
"""
Set options in the server's configuration. Any changes to the
server's configuration are not written to disk.
:param dict options: A dictionary of option names and values
"""
for option_name, option_value in options.items():
self.config.set(option_name, option_value)
return
def rpc_campaign_new(self, name):
"""
Create a new King Phisher campaign and initialize the database
information.
:param str name: The new campaign's name.
:return: The ID of the new campaign.
:rtype: int
"""
session = db_manager.Session()
campaign = db_models.Campaign(name=name, user_id=self.basic_auth_user)
session.add(campaign)
session.commit()
return campaign.id
def rpc_campaign_alerts_is_subscribed(self, campaign_id):
"""
Check if the user is subscribed to alerts for the specified campaign.
:param int campaign_id: The ID of the campaign.
:return: The alert subscription status.
:rtype: bool
"""
username = self.basic_auth_user
session = db_manager.Session()
query = session.query(db_models.AlertSubscription)
query = query.filter_by(campaign_id=campaign_id, user_id=username)
result = query.count()
session.close()
return result
def rpc_campaign_alerts_subscribe(self, campaign_id):
"""
Subscribe to alerts for the specified campaign.
:param int campaign_id: The ID of the campaign.
"""
username = self.basic_auth_user
session = db_manager.Session()
query = session.query(db_models.AlertSubscription)
query = query.filter_by(campaign_id=campaign_id, user_id=username)
if query.count() == 0:
subscription = db_models.AlertSubscription(campaign_id=campaign_id, user_id=username)
session.add(subscription)
session.commit()
session.close()
return
def rpc_campaign_alerts_unsubscribe(self, campaign_id):
"""
Unsubscribe to alerts for the specified campaign.
:param int campaign_id: The ID of the campaign.
"""
username = self.basic_auth_user
session = db_manager.Session()
query = session.query(db_models.AlertSubscription)
query = query.filter_by(campaign_id=campaign_id, user_id=username)
subscription = query.first()
if subscription:
session.delete(subscription)
session.commit()
session.close()
return
def rpc_campaign_landing_page_new(self, campaign_id, hostname, page):
"""
Add a landing page for the specified campaign. Landing pages refer
to resources that when visited by a user should cause the visit
counter to be incremented.
:param int campaign_id: The ID of the campaign.
:param str hostname: The VHOST for the request.
:param str page: The request resource.
"""
page = page.lstrip('/')
session = db_manager.Session()
query = session.query(db_models.LandingPage)
query = query.filter_by(campaign_id=campaign_id, hostname=hostname, page=page)
if query.count() == 0:
landing_page = db_models.LandingPage(campaign_id=campaign_id, hostname=hostname, page=page)
session.add(landing_page)
session.commit()
session.close()
return
def rpc_campaign_message_new(self, campaign_id, email_id, target_email, company_name, first_name, last_name):
"""
Record a message that has been sent as part of a campaign. These
details can be retrieved later for value substitution in template
pages.
:param int campaign_id: The ID of the campaign.
:param str email_id: The message id of the sent email.
:param str target_email: The email address that the message was sent to.
:param str company_name: The company name value for the message.
:param str first_name: The first name of the message's recipient.
:param str last_name: The last name of the message's recipient.
"""
session = db_manager.Session()
message = db_models.Message()
message.id = email_id
message.campaign_id = campaign_id
message.target_email = target_email
message.company_name = company_name
message.first_name = first_name
message.last_name = last_name
session.add(message)
session.commit()
session.close()
return
def rpc_campaign_delete(self, campaign_id):
"""
Remove a campaign from the database and delete all associated
information with it.
.. warning::
This action can not be reversed and there is no confirmation before it
takes place.
"""
session = db_manager.Session()
session.delete(db_manager.get_row_by_id(session, db_models.Campaign, campaign_id))
session.commit()
session.close()
return
def rpc_database_count_rows(self, *args):
"""
Get a count of the rows in the specified table where the search
criteria matches.
:return: The number of matching rows.
:rtype: int
"""
args = list(args)
fields = self.path.split('/')[1:-2]
assert(len(fields) == len(args))
table = DATABASE_TABLE_OBJECTS.get(self.path.split('/')[-2])
assert(table)
session = db_manager.Session()
query = session.query(table)
query = query.filter_by(**dict(zip(map(lambda f: f + '_id', fields), args)))
result = query.count()
session.close()
return result
def rpc_database_get_rows(self, *args):
"""
Retrieve the rows from the specified table where the search
criteria matches.
:return: A dictionary with columns and rows keys.
:rtype: dict
"""
args = list(args)
offset = 0
fields = self.path.split('/')[1:-2]
if len(args) == (len(fields) + 1):
offset = (args.pop() * VIEW_ROW_COUNT)
assert(len(fields) == len(args))
table_name = self.path.split('/')[-2]
table = DATABASE_TABLE_OBJECTS.get(table_name)
assert(table)
columns = DATABASE_TABLES[table_name]
rows = []
session = db_manager.Session()
query = session.query(table)
query = query.filter_by(**dict(zip(map(lambda f: f + '_id', fields), args)))
for row in query[offset:offset + VIEW_ROW_COUNT]:
rows.append(list(map(lambda c: getattr(row, c), columns)))
session.close()
if not len(rows):
return None
return {'columns': columns, 'rows': rows}
def rpc_database_delete_row_by_id(self, row_id):
"""
Delete a row from a table with the specified value in the id column.
:param row_id: The id value.
"""
table = DATABASE_TABLE_OBJECTS.get(self.path.split('/')[-2])
assert(table)
session = db_manager.Session()
session.delete(db_manager.get_row_by_id(session, table, row_id))
session.commit()
session.close()
return
def rpc_database_get_row_by_id(self, row_id):
"""
Retrieve a row from a given table with the specified value in the
id column.
:param row_id: The id value.
:return: The specified row data.
:rtype: dict
"""
table_name = self.path.split('/')[-2]
table = DATABASE_TABLE_OBJECTS.get(table_name)
assert(table)
columns = DATABASE_TABLES[table_name]
session = db_manager.Session()
row = db_manager.get_row_by_id(session, table, row_id)
if row:
row = dict(zip(columns, map(lambda c: getattr(row, c), columns)))
session.close()
return row
def rpc_database_insert_row(self, keys, values):
"""
Insert a new row into the specified table.
:param tuple keys: The column names of *values*.
:param tuple values: The values to be inserted in the row.
"""
if not isinstance(keys, (list, tuple)):
keys = (keys,)
if not isinstance(values, (list, tuple)):
values = (values,)
assert(len(keys) == len(values))
table_name = self.path.split('/')[-2]
for key, value in zip(keys, values):
assert(key in DATABASE_TABLES[table_name])
table = DATABASE_TABLE_OBJECTS.get(table_name)
assert(table)
session = db_manager.Session()
row = table()
for key, value in zip(keys, values):
setattr(row, key, value)
session.add(row)
session.close()
return
def rpc_database_set_row_value(self, row_id, keys, values):
"""
Set values for a row in the specified table with an id of *row_id*.
:param tuple keys: The column names of *values*.
:param tuple values: The values to be updated in the row.
"""
if not isinstance(keys, (list, tuple)):
keys = (keys,)
if not isinstance(values, (list, tuple)):
values = (values,)
assert(len(keys) == len(values))
table_name = self.path.split('/')[-2]
for key, value in zip(keys, values):
assert(key in DATABASE_TABLES[table_name])
table = DATABASE_TABLE_OBJECTS.get(table_name)
assert(table)
session = db_manager.Session()
row = db_manager.get_row_by_id(session, table, row_id)
if not row:
session.close()
assert(row)
for key, value in zip(keys, values):
setattr(row, key, value)
session.commit()
session.close()
return
|
0x0mar/king-phisher
|
king_phisher/server/server_rpc.py
|
Python
|
bsd-3-clause
| 14,731
|
[
"VisIt"
] |
59047019088de3091b90df1d38f04343e683d624388830c48a12bd2e46907ce7
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2008 Jerome Rapinat
# Copyright (C) 2008 Benny Malengier
# Copyright (C) 2010 Gary Burton - derived from _HasGalleryBase.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Source/_HasRepository.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
# "People who have images"
#-------------------------------------------------------------------------
class HasRepository(Rule):
"""Objects which reference repositories"""
labels = [ _('Number of instances:'), _('Number must be:')]
name = _('Sources with <count> Repository references')
description = _("Matches sources with a certain number of repository references")
category = _('General filters')
def prepare(self, db, user):
# things we want to do just once, not for every handle
if self.list[1] == 'less than':
self.count_type = 0
elif self.list[1] == 'greater than':
self.count_type = 2
else:
self.count_type = 1 # "equal to"
self.userSelectedCount = int(self.list[0])
def apply(self, db, obj):
count = len(obj.get_reporef_list())
if self.count_type == 0: # "less than"
return count < self.userSelectedCount
elif self.count_type == 2: # "greater than"
return count > self.userSelectedCount
# "equal to"
return count == self.userSelectedCount
|
beernarrd/gramps
|
gramps/gen/filters/rules/source/_hasrepository.py
|
Python
|
gpl-2.0
| 2,659
|
[
"Brian"
] |
f1aa777f675651741b2c4f65052e88764582a0e66ac1befa9f5740269ec622e5
|
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from dipy.reconst.odf import OdfModel, OdfFit
from dipy.reconst.cache import Cache
from dipy.reconst.multi_voxel import multi_voxel_model
from dipy.reconst.shm import (sph_harm_ind_list,
real_sph_harm,
sph_harm_lookup,
lazy_index)
from dipy.data import get_sphere
from dipy.core.geometry import cart2sphere
from dipy.core.ndindex import ndindex
from dipy.sims.voxel import single_tensor
from scipy.special import lpn
@multi_voxel_model
class ConstrainedSphericalDeconvModel(OdfModel, Cache):
def __init__(self, gtab, response, reg_sphere=None, sh_order=8, lambda_=1, tau=0.1):
r""" Constrained Spherical Deconvolution (CSD) [1]_.
Spherical deconvolution computes a fiber orientation distribution (FOD), also
called fiber ODF (fODF) [2]_, as opposed to a diffusion ODF as the QballModel
or the CsaOdfModel. This results in a sharper angular profile with better
angular resolution that is the best object to be used for later deterministic
and probabilistic tractography [3]_.
A sharp fODF is obtained because a single fiber *response* function is injected
as *a priori* knowledge. The response function is often data-driven and thus,
comes as input to the ConstrainedSphericalDeconvModel. It will be used as deconvolution
kernel, as described in [1]_.
Parameters
----------
gtab : GradientTable
response : tuple or callable
If tuple, then it should have two elements. The first is the eigen-values as an (3,) ndarray
and the second is the signal value for the response function without diffusion weighting.
This is to be able to generate a single fiber synthetic signal. If callable then the function
should return an ndarray with the all the signal values for the response function. The response
function will be used as deconvolution kernel ([1]_)
reg_sphere : Sphere
sphere used to build the regularization B matrix
sh_order : int
maximal spherical harmonics order
lambda_ : float
weight given to the constrained-positivity regularization part of the
deconvolution equation (see [1]_)
tau : float
threshold controlling the amplitude below which the corresponding fODF is assumed to be zero.
Ideally, tau should be set to zero. However, to improve the stability of the algorithm, tau
is set to tau*100 % of the mean fODF amplitude (here, 10% by default) (see [1]_)
References
----------
.. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation
distribution in diffusion MRI: Non-negativity constrained super-resolved spherical
deconvolution
.. [2] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based
on Complex Fibre Orientation Distributions
.. [3] C\^ot\'e, M-A., et al. Medical Image Analysis 2013. Tractometer: Towards validation
of tractography pipelines
.. [4] Tournier, J.D, et al. Imaging Systems and Technology 2012. MRtrix: Diffusion
Tractography in Crossing Fiber Regions
"""
m, n = sph_harm_ind_list(sh_order)
self.m, self.n = m, n
self._where_b0s = lazy_index(gtab.b0s_mask)
self._where_dwi = lazy_index(~gtab.b0s_mask)
no_params = ((sh_order + 1) * (sh_order + 2)) / 2
if no_params > np.sum(gtab.b0s_mask == False):
msg = "Number of parameters required for the fit are more "
msg += "than the actual data points"
warnings.warn(msg, UserWarning)
x, y, z = gtab.gradients[self._where_dwi].T
r, theta, phi = cart2sphere(x, y, z)
# for the gradient sphere
self.B_dwi = real_sph_harm(m, n, theta[:, None], phi[:, None])
# for the sphere used in the regularization positivity constraint
if reg_sphere is None:
self.sphere = get_sphere('symmetric362')
else:
self.sphere = reg_sphere
r, theta, phi = cart2sphere(self.sphere.x, self.sphere.y, self.sphere.z)
self.B_reg = real_sph_harm(m, n, theta[:, None], phi[:, None])
if callable(response):
S_r = response
else:
if response is None:
S_r = estimate_response(gtab, np.array([0.0015, 0.0003, 0.0003]), 1)
else:
S_r = estimate_response(gtab, response[0], response[1])
r_sh = np.linalg.lstsq(self.B_dwi, S_r[self._where_dwi])[0]
r_rh = sh_to_rh(r_sh, sh_order)
self.R = forward_sdeconv_mat(r_rh, sh_order)
# scale lambda_ to account for differences in the number of
# SH coefficients and number of mapped directions
# This is exactly what is done in [4]_
self.lambda_ = lambda_ * self.R.shape[0] * r_rh[0] / self.B_reg.shape[0]
self.sh_order = sh_order
self.tau = tau
def fit(self, data):
s_sh = np.linalg.lstsq(self.B_dwi, data[self._where_dwi])[0]
shm_coeff, num_it = csdeconv(s_sh, self.sh_order, self.R, self.B_reg, self.lambda_, self.tau)
return ConstrainedSphericalDeconvFit(self, shm_coeff)
class ConstrainedSphericalDeconvFit(OdfFit):
def __init__(self, model, fodf_sh):
self.shm_coeff = fodf_sh
self.model = model
def odf(self, sphere):
sampling_matrix = self.model.cache_get("sampling_matrix", sphere)
if sampling_matrix is None:
phi = sphere.phi[:, np.newaxis] #sphere.phi.reshape((-1, 1))
theta = sphere.theta.reshape((-1, 1))
sampling_matrix = real_sph_harm(self.model.m, self.model.n, theta, phi)
self.model.cache_set("sampling_matrix", sphere, sampling_matrix)
return np.dot(self.shm_coeff, sampling_matrix.T)
@multi_voxel_model
class ConstrainedSDTModel(OdfModel, Cache):
def __init__(self, gtab, ratio, reg_sphere=None, sh_order=8, lambda_=1., tau=0.1):
r""" Spherical Deconvolution Transform (SDT) [1]_.
The SDT computes a fiber orientation distribution (FOD) as opposed to a diffusion
ODF as the QballModel or the CsaOdfModel. This results in a sharper angular
profile with better angular resolution. The Contrained SDTModel is similar
to the Constrained CSDModel but mathematically it deconvolves the q-ball ODF
as oppposed to the HARDI signal (see [1]_ for a comparison and a through discussion).
A sharp fODF is obtained because a single fiber *response* function is injected
as *a priori* knowledge. In the SDTModel, this response is a single fiber q-ball
ODF as opposed to a single fiber signal function for the CSDModel. The response function
will be used as deconvolution kernel.
Parameters
----------
gtab : GradientTable
ratio : float
ratio of the smallest vs the largest eigenvalue of the single prolate tensor response function
reg_sphere : Sphere
sphere used to build the regularization B matrix
sh_order : int
maximal spherical harmonics order
lambda_ : float
weight given to the constrained-positivity regularization part of the
deconvolution equation
tau : float
threshold (tau *mean(fODF)) controlling the amplitude below
which the corresponding fODF is assumed to be zero.
References
----------
.. [1] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based
on Complex Fibre Orientation Distributions.
"""
m, n = sph_harm_ind_list(sh_order)
self.m, self.n = m, n
self._where_b0s = lazy_index(gtab.b0s_mask)
self._where_dwi = lazy_index(~gtab.b0s_mask)
no_params = ((sh_order + 1) * (sh_order + 2)) / 2
if no_params > np.sum(gtab.b0s_mask == False):
msg = "Number of parameters required for the fit are more "
msg += "than the actual data points"
warnings.warn(msg, UserWarning)
x, y, z = gtab.gradients[self._where_dwi].T
r, theta, phi = cart2sphere(x, y, z)
# for the gradient sphere
self.B_dwi = real_sph_harm(m, n, theta[:, None], phi[:, None])
# for the odf sphere
if reg_sphere is None:
self.sphere = get_sphere('symmetric362')
else:
self.sphere = reg_sphere
r, theta, phi = cart2sphere(self.sphere.x, self.sphere.y, self.sphere.z)
self.B_reg = real_sph_harm(m, n, theta[:, None], phi[:, None])
self.R, self.P = forward_sdt_deconv_mat(ratio, sh_order)
# scale lambda_ to account for differences in the number of
# SH coefficients and number of mapped directions
self.lambda_ = lambda_ * self.R.shape[0] * self.R[0, 0] / self.B_reg.shape[0]
self.tau = tau
self.sh_order = sh_order
def fit(self, data):
s_sh = np.linalg.lstsq(self.B_dwi, data[self._where_dwi])[0]
# initial ODF estimation
odf_sh = np.dot(self.P, s_sh)
qball_odf = np.dot(self.B_reg, odf_sh)
Z = np.linalg.norm(qball_odf)
# normalize ODF
odf_sh /= Z
shm_coeff, num_it = odf_deconv(odf_sh, self.sh_order, self.R, self.B_reg, self.lambda_, self.tau)
# print 'SDT CSD converged after %d iterations' % num_it
return ConstrainedSDTFit(self, shm_coeff)
class ConstrainedSDTFit(OdfFit):
def __init__(self, model, fodf_sh):
self.shm_coeff = fodf_sh
self.model = model
def odf(self, sphere):
sampling_matrix = self.model.cache_get("sampling_matrix", sphere)
if sampling_matrix is None:
phi = sphere.phi[:, np.newaxis] #sphere.phi.reshape((-1, 1))
theta = sphere.theta.reshape((-1, 1))
sampling_matrix = real_sph_harm(self.model.m, self.model.n, theta, phi)
self.model.cache_set("sampling_matrix", sphere, sampling_matrix)
return np.dot(self.shm_coeff, sampling_matrix.T)
def estimate_response(gtab, evals, S0):
""" Estimate single fiber response function
Parameters
----------
gtab : GradientTable
evals : ndarray
S0 : float
non diffusion weighted
Returns
-------
S : estimated signal
"""
evecs = np.array([[0, 0, 1],
[0, 1, 0],
[1, 0, 0]])
return single_tensor(gtab, S0, evals, evecs, snr=None)
def sh_to_rh(r_sh, sh_order):
""" Spherical harmonics (SH) to rotational harmonics (RH)
Calculate the rotational harmonic decomposition up to
harmonic sh_order for an axially and antipodally
symmetric function. Note that all ``m != 0`` coefficients
will be ignored as axial symmetry is assumed. Hence, there
will be ``(sh_order/2 + 1)`` non-zero coefficients.
Parameters
----------
r_sh : ndarray (``sh_order/2 + 1``,)
ndarray of SH coefficients for the single fiber response function
sh_order : int
maximal SH order of the SH representation
Returns
-------
r_rh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,)
Rotational harmonics coefficients representing the input `r_sh`
References
----------
.. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation
distribution in diffusion MRI: Non-negativity constrained super-resolved spherical
deconvolution
"""
dirac_sh = gen_dirac(0, 0, sh_order)
k, = np.nonzero(dirac_sh)
r_rh = r_sh[k] / dirac_sh[k]
return r_rh
def gen_dirac(pol, azi, sh_order):
""" Generate Dirac delta function orientated in (theta, phi) = (azi, pol)
on the sphere. The spherical harmonics (SH) representation of this Dirac is
returned.
Parameters
----------
pol : float [0, pi]
The polar (colatitudinal) coordinate (phi)
az : float [0, 2*pi]
The azimuthal (longitudinal) coordinate (theta)
sh_order : int
maximal SH order of the SH representation
Returns
-------
dirac : ndarray (``(sh_order + 1)(sh_order + 2)/2``,)
SH coefficients representing the Dirac function
"""
m, n = sph_harm_ind_list(sh_order)
dirac = np.zeros(m.shape)
i = 0
for l in np.arange(0, sh_order + 1, 2):
for m in np.arange(-l, l + 1):
if m == 0:
dirac[i] = real_sph_harm(0, l, azi, pol)
i = i + 1
return dirac
def forward_sdeconv_mat(r_rh, sh_order):
""" Build forward spherical deconvolution matrix
Parameters
----------
r_rh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,)
ndarray of rotational harmonics coefficients for the single
fiber response function
sh_order : int
maximal SH order
Returns
-------
R : ndarray (``(sh_order + 1)*(sh_order + 2)/2``, ``(sh_order + 1)*(sh_order + 2)/2``)
"""
m, n = sph_harm_ind_list(sh_order)
b = np.zeros(m.shape)
i = 0
for l in np.arange(0, sh_order + 1, 2):
for m in np.arange(-l, l + 1):
b[i] = r_rh[l / 2]
i = i + 1
return np.diag(b)
def forward_sdt_deconv_mat(ratio, sh_order):
""" Build forward sharpening deconvolution transform (SDT) matrix
Parameters
----------
ratio : float
ratio = $\frac{\lambda_2}{\lambda_1}$ of the single fiber response function
sh_order : int
spherical harmonic order
Returns
-------
R : ndarray (``(sh_order + 1)*(sh_order + 2)/2``, ``(sh_order + 1)*(sh_order + 2)/2``)
SDT deconvolution matrix
P : ndarray (``(sh_order + 1)*(sh_order + 2)/2``, ``(sh_order + 1)*(sh_order + 2)/2``)
Funk-Radon Transform (FRT) matrix
"""
m, n = sph_harm_ind_list(sh_order)
sdt = np.zeros(m.shape) # SDT matrix
frt = np.zeros(m.shape) # FRT (Funk-Radon transform) q-ball matrix
b = np.zeros(m.shape)
bb = np.zeros(m.shape)
for l in np.arange(0, sh_order + 1, 2):
from scipy.integrate import quad
sharp = quad(lambda z: lpn(l, z)[0][-1] * np.sqrt(1 / (1 - (1 - ratio) * z * z)), -1., 1.)
sdt[l / 2] = sharp[0]
frt[l / 2] = 2 * np.pi * lpn(l, 0)[0][-1]
i = 0
for l in np.arange(0, sh_order + 1, 2):
for m in np.arange(-l, l + 1):
b[i] = sdt[l / 2]
bb[i] = frt[l / 2]
i = i + 1
return np.diag(b), np.diag(bb)
def csdeconv(s_sh, sh_order, R, B_reg, lambda_=1., tau=0.1):
r""" Constrained-regarized spherical deconvolution (CSD) [1]_
Deconvolves the axially symmetric single fiber response
function `r_rh` in rotational harmonics coefficients from the spherical function
`s_sh` in SH coefficients.
Parameters
----------
s_sh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,)
ndarray of SH coefficients for the spherical function to be deconvolved
sh_order : int
maximal SH order of the SH representation
R : ndarray (``(sh_order + 1)*(sh_order + 2)/2``, ``(sh_order + 1)*(sh_order + 2)/2``)
forward spherical harmonics matrix
B_reg : ndarray (``(sh_order + 1)*(sh_order + 2)/2``, ``(sh_order + 1)*(sh_order + 2)/2``)
SH basis matrix used for deconvolution
lambda_ : float
lambda parameter in minimization equation (default 1.0)
tau : float
threshold controlling the amplitude below which the corresponding fODF is assumed to be zero.
Ideally, tau should be set to zero. However, to improve the stability of the algorithm, tau
is set to tau*100 % of the max fODF amplitude (here, 10% by default). This is similar to peak
detection where peaks below 0.1 amplitude are usually considered noise peaks. Because SDT
is based on a q-ball ODF deconvolution, and not signal deconvolution, using the max instead
of mean (as in CSD), is more stable.
Returns
-------
fodf_sh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,)
Spherical harmonics coefficients of the constrained-regarized fiber ODF
num_it : int
Number of iterations in the constrained-regarization used for convergence
References
----------
.. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation
distribution in diffusion MRI: Non-negativity constrained super-resolved spherical
deconvolution
"""
# generate initial fODF estimate, truncated at SH order 4
fodf_sh = np.linalg.lstsq(R, s_sh)[0] #fodf_sh, = np.linalg.lstsq(R, s_sh) # R\s_sh
fodf_sh[15:] = 0
fodf = np.dot(B_reg, fodf_sh)
# set threshold on FOD amplitude used to identify 'negative' values
threshold = tau * np.mean(np.dot(B_reg, fodf_sh))
#print(np.min(fodf), np.max(fodf), np.mean(fodf), threshold, tau)
k = []
convergence = 50
for num_it in range(1, convergence + 1):
fodf = np.dot(B_reg, fodf_sh)
k2 = np.nonzero(fodf < threshold)[0]
if (k2.shape[0] + R.shape[0]) < B_reg.shape[1]:
warning.warn('too few negative directions identified - failed to converge')
return fodf_sh, num_it
if num_it > 1 and k.shape[0] == k2.shape[0]:
if (k == k2).all():
return fodf_sh, num_it
k = k2
# This is the super-resolved trick.
# Wherever there is a negative amplitude value on the fODF, it concatinates a value
# to the S vector so that the estimation can focus on trying to eliminate it.
# In a sense, this "adds" a measurement, which can help to better estimate the fodf_sh,
# even if you have more SH coeffcients to estimate than actual S measurements.
M = np.concatenate((R, lambda_ * B_reg[k, :]))
S = np.concatenate((s_sh, np.zeros(k.shape)))
fodf_sh = np.linalg.lstsq(M, S)[0]
warning.warn('maximum number of iterations exceeded - failed to converge')
return fodf_sh, num_it
def odf_deconv(odf_sh, sh_order, R, B_reg, lambda_=1., tau=0.1):
r""" ODF constrained-regularized sherical deconvolution using
the Sharpening Deconvolution Transform (SDT) [1]_, [2]_.
Parameters
----------
odf_sh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,)
ndarray of SH coefficients for the ODF spherical function to be deconvolved
sh_order : int
maximal SH order of the SH representation
R : ndarray (``(sh_order + 1)(sh_order + 2)/2``, ``(sh_order + 1)(sh_order + 2)/2``)
SDT matrix in SH basis
B_reg : ndarray (``(sh_order + 1)(sh_order + 2)/2``, ``(sh_order + 1)(sh_order + 2)/2``)
SH basis matrix used for deconvolution
lambda_ : float
lambda parameter in minimization equation (default 1.0)
tau : float
threshold (tau *max(fODF)) controlling the amplitude below
which the corresponding fODF is assumed to be zero.
Returns
-------
fodf_sh : ndarray (``(sh_order + 1)(sh_order + 2)/2``,)
Spherical harmonics coefficients of the constrained-regularized fiber ODF
num_it : int
Number of iterations in the constrained-regularization used for convergence
References
----------
.. [1] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based
on Complex Fibre Orientation Distributions
.. [2] Descoteaux, M, PhD thesis, INRIA Sophia-Antipolis, 2008.
"""
m, n = sph_harm_ind_list(sh_order)
# Generate initial fODF estimate, which is the ODF truncated at SH order 4
fodf_sh = np.linalg.lstsq(R, odf_sh)[0]
fodf_sh[15:] = 0
fodf = np.dot(B_reg, fodf_sh)
Z = np.linalg.norm(fodf)
fodf_sh /= Z
fodf = np.dot(B_reg, fodf_sh)
threshold = tau * np.max(np.dot(B_reg, fodf_sh))
#print(np.min(fodf), np.max(fodf), np.mean(fodf), threshold, tau)
k = []
convergence = 50
for num_it in range(1, convergence + 1):
A = np.dot(B_reg, fodf_sh)
k2 = np.nonzero(A < threshold)[0]
if (k2.shape[0] + R.shape[0]) < B_reg.shape[1]:
warning.warn('too few negative directions identified - failed to converge')
return fodf_sh, num_it
if num_it > 1 and k.shape[0] == k2.shape[0]:
if (k == k2).all():
return fodf_sh, num_it
k = k2
M = np.concatenate((R, lambda_ * B_reg[k, :]))
ODF = np.concatenate((odf_sh, np.zeros(k.shape)))
fodf_sh = np.linalg.lstsq(M, ODF)[0]
warning.warn('maximum number of iterations exceeded - failed to converge')
return fodf_sh, num_it
def odf_sh_to_sharp(odfs_sh, sphere, basis=None, ratio=3 / 15., sh_order=8, lambda_=1., tau=0.1):
r""" Sharpen odfs using the spherical deconvolution transform [1]_
This function can be used to sharpen any smooth ODF spherical function. In theory, this should
only be used to sharpen QballModel ODFs, but in practice, one can play with the deconvolution
ratio and sharpen almost any ODF-like spherical function. The constrained-regularization is stable
and will not only sharp the ODF peaks but also regularize the noisy peaks.
Parameters
----------
odfs_sh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``, )
array of odfs expressed as spherical harmonics coefficients
sphere : Sphere
sphere used to build the regularization matrix
basis : {None, 'mrtrix', 'fibernav'}
different spherical harmonic basis. None is the fibernav basis as well.
ratio : float,
ratio of the smallest vs the largest eigenvalue of the single prolate tensor response function
(:math:`\frac{\lambda_2}{\lambda_1}`)
sh_order : int
maximal SH order of the SH representation
lambda_ : float
lambda parameter (see odfdeconv) (default 1.0)
tau : float
tau parameter in the L matrix construction (see odfdeconv) (default 0.1)
Returns
-------
fodf_sh : ndarray
sharpened odf expressed as spherical harmonics coefficients
References
----------
.. [1] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based
on Complex Fibre Orientation Distributions
"""
m, n = sph_harm_ind_list(sh_order)
r, theta, phi = cart2sphere(sphere.x, sphere.y, sphere.z)
real_sym_sh = sph_harm_lookup[basis]
B_reg, m, n = real_sym_sh(sh_order, theta[:, None], phi[:, None])
R, P = forward_sdt_deconv_mat(ratio, sh_order)
# scale lambda to account for differences in the number of
# SH coefficients and number of mapped directions
lambda_ = lambda_ * R.shape[0] * R[0, 0] / B_reg.shape[0]
fodf_sh = np.zeros(odfs_sh.shape)
for index in ndindex(odfs_sh.shape[:-1]):
fodf_sh[index], num_it = odf_deconv(odfs_sh[index], sh_order, R, B_reg, lambda_=lambda_, tau=tau)
return fodf_sh
|
maurozucchelli/dipy
|
dipy/reconst/csdeconv.py
|
Python
|
bsd-3-clause
| 23,359
|
[
"DIRAC"
] |
1f935d280851c5d24905fbf0f8d166635ff8bc6ca3aa2f7cc0aa12e2637e210b
|
from ase import Atoms
from gpaw import GPAW
a = 2.87
bulk = Atoms('Fe2',
positions=[(0, 0, 0),
(a/2, a/2, a/2)],
cell=(a, a, a),
pbc=True)
calc = GPAW(kpts=(6, 6, 6),
h=0.20,
nbands=18,
eigensolver='cg',
txt='non.txt')
bulk.set_calculator(calc)
print bulk.get_potential_energy()
calc.write('non.gpw')
|
qsnake/gpaw
|
doc/exercises/iron/non.py
|
Python
|
gpl-3.0
| 415
|
[
"ASE",
"GPAW"
] |
e574f42435c03c9bbe9fe65392d6713647ffface5c56f3e9a598d53e695bcb06
|
'''TB Animation Tools is a toolset for animators
*******************************************************************************
License and Copyright
Copyright 2015-Tom Bailey
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
send issues/ requests to brimblashman@gmail.com
visit tb-animator.blogspot.com for "stuff"
usage
*******************************************************************************
'''
__author__ = 'tom.bailey'
import pymel.core as pm
from tb_objectInfo import mod_panel
class isolator():
def __init__(self):
pass
def toggle_isolate(self):
'''
import isolate as iso
reload (iso)
iso.isolate()
'''
panel = mod_panel().getModelPanel()
state = pm.isolateSelect(panel, query=True, state=True)
if state:
pm.isolateSelect(panel, state=0)
pm.isolateSelect(panel, removeSelected=True)
else:
pm.isolateSelect(panel, state=1)
pm.isolateSelect(panel, addSelected=True)
|
tb-animator/tbtools
|
apps/tb_isolator.py
|
Python
|
mit
| 1,668
|
[
"VisIt"
] |
8c10af6f30543fec7be8adb4df3b08e91c9888f51fb61701d23c2e178412e8ff
|
##
# Copyright (C) 2012 Jasper Snoek, Hugo Larochelle and Ryan P. Adams
#
# This code is written for research and educational purposes only to
# supplement the paper entitled
# "Practical Bayesian Optimization of Machine Learning Algorithms"
# by Snoek, Larochelle and Adams
# Advances in Neural Information Processing Systems, 2012
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import gp
import sys
import util
import tempfile
import copy
import numpy as np
import numpy.random as npr
import scipy.linalg as spla
import scipy.stats as sps
import scipy.optimize as spo
import cPickle
import multiprocessing
from Locker import *
def optimize_pt(c, b, comp, pend, vals, model):
ret = spo.fmin_l_bfgs_b(model.grad_optimize_ei_over_hypers,
c.flatten(), args=(comp, pend, vals),
bounds=b, disp=0)
return ret[0]
def init(expt_dir, arg_string):
args = util.unpack_args(arg_string)
return GPEIOptChooser(expt_dir, **args)
"""
Chooser module for the Gaussian process expected improvement (EI)
acquisition function where points are sampled densely in the unit
hypercube and then a subset of the points are optimized to maximize EI
over hyperparameter samples. Slice sampling is used to sample
Gaussian process hyperparameters.
"""
class GPEIOptChooser:
def __init__(self, expt_dir, covar="Matern52", mcmc_iters=10,
pending_samples=100, noiseless=False, burnin=100,
grid_subset=20):
self.cov_func = getattr(gp, covar)
self.locker = Locker()
self.state_pkl = os.path.join(expt_dir, self.__module__ + ".pkl")
self.stats_file = os.path.join(expt_dir,
self.__module__ + "_hyperparameters.txt")
self.mcmc_iters = int(mcmc_iters)
self.burnin = int(burnin)
self.needs_burnin = True
self.pending_samples = int(pending_samples)
self.D = -1
self.hyper_iters = 1
# Number of points to optimize EI over
self.grid_subset = int(grid_subset)
self.noiseless = bool(int(noiseless))
self.hyper_samples = []
self.noise_scale = 0.1 # horseshoe prior
self.amp2_scale = 1 # zero-mean log normal prior
self.max_ls = 10 # top-hat prior on length scales
def dump_hypers(self):
sys.stderr.write("Waiting to lock hyperparameter pickle...")
self.locker.lock_wait(self.state_pkl)
sys.stderr.write("...acquired\n")
# Write the hyperparameters out to a Pickle.
fh = tempfile.NamedTemporaryFile(mode='w', delete=False)
cPickle.dump({ 'dims' : self.D,
'ls' : self.ls,
'amp2' : self.amp2,
'noise' : self.noise,
'mean' : self.mean },
fh)
fh.close()
# Use an atomic move for better NFS happiness.
cmd = 'mv "%s" "%s"' % (fh.name, self.state_pkl)
os.system(cmd) # TODO: Should check system-dependent return status.
self.locker.unlock(self.state_pkl)
# Write the hyperparameters out to a human readable file as well
fh = open(self.stats_file, 'w')
fh.write('Mean Noise Amplitude <length scales>\n')
fh.write('-----------ALL SAMPLES-------------\n')
meanhyps = 0*np.hstack(self.hyper_samples[0])
for i in self.hyper_samples:
hyps = np.hstack(i)
meanhyps += (1/float(len(self.hyper_samples)))*hyps
for j in hyps:
fh.write(str(j) + ' ')
fh.write('\n')
fh.write('-----------MEAN OF SAMPLES-------------\n')
for j in meanhyps:
fh.write(str(j) + ' ')
fh.write('\n')
fh.close()
def _real_init(self, dims, values):
sys.stderr.write("Waiting to lock hyperparameter pickle...")
self.locker.lock_wait(self.state_pkl)
sys.stderr.write("...acquired\n")
self.randomstate = npr.get_state()
if os.path.exists(self.state_pkl):
fh = open(self.state_pkl, 'r')
state = cPickle.load(fh)
fh.close()
self.D = state['dims']
self.ls = state['ls']
self.amp2 = state['amp2']
self.noise = state['noise']
self.mean = state['mean']
self.needs_burnin = False
else:
# Input dimensionality.
self.D = dims
# Initial length scales.
self.ls = np.ones(self.D)
# Initial amplitude.
self.amp2 = np.std(values)
# Initial observation noise.
self.noise = 1e-3
# Initial mean.
self.mean = np.mean(values)
# Save hyperparameter samples
self.hyper_samples.append((self.mean, self.noise, self.amp2,
self.ls))
self.locker.unlock(self.state_pkl)
def cov(self, x1, x2=None):
if x2 is None:
return self.amp2 * (self.cov_func(self.ls, x1, None)
+ 1e-6*np.eye(x1.shape[0]))
else:
return self.amp2 * self.cov_func(self.ls, x1, x2)
# Given a set of completed 'experiments' in the unit hypercube with
# corresponding objective 'values', pick from the next experiment to
# run according to the acquisition function.
def next(self, grid, values, durations,
candidates, pending, complete):
# Don't bother using fancy GP stuff at first.
if complete.shape[0] < 2:
return int(candidates[0])
# Perform the real initialization.
if self.D == -1:
self._real_init(grid.shape[1], values[complete])
# Grab out the relevant sets.
comp = grid[complete,:]
cand = grid[candidates,:]
pend = grid[pending,:]
vals = values[complete]
numcand = cand.shape[0]
# Spray a set of candidates around the min so far
best_comp = np.argmin(vals)
cand2 = np.vstack((np.random.randn(10,comp.shape[1])*0.001 +
comp[best_comp,:], cand))
if self.mcmc_iters > 0:
# Possibly burn in.
if self.needs_burnin:
for mcmc_iter in xrange(self.burnin):
self.sample_hypers(comp, vals)
sys.stderr.write("BURN %d/%d] mean: %.2f amp: %.2f "
"noise: %.4f min_ls: %.4f max_ls: %.4f\n"
% (mcmc_iter+1, self.burnin, self.mean,
np.sqrt(self.amp2), self.noise,
np.min(self.ls), np.max(self.ls)))
self.needs_burnin = False
# Sample from hyperparameters.
# Adjust the candidates to hit ei peaks
self.hyper_samples = []
for mcmc_iter in xrange(self.mcmc_iters):
self.sample_hypers(comp, vals)
sys.stderr.write("%d/%d] mean: %.2f amp: %.2f noise: %.4f "
"min_ls: %.4f max_ls: %.4f\n"
% (mcmc_iter+1, self.mcmc_iters, self.mean,
np.sqrt(self.amp2), self.noise,
np.min(self.ls), np.max(self.ls)))
self.dump_hypers()
b = []# optimization bounds
for i in xrange(0, cand.shape[1]):
b.append((0, 1))
overall_ei = self.ei_over_hypers(comp,pend,cand2,vals)
inds = np.argsort(np.mean(overall_ei,axis=1))[-self.grid_subset:]
cand2 = cand2[inds,:]
# This is old code to optimize each point in parallel. Uncomment
# and replace if multiprocessing doesn't work
for i in xrange(0, cand2.shape[0]):
sys.stderr.write("Optimizing candidate %d/%d\n" % (i+1, cand2.shape[0]))
self.check_grad_ei(cand2[i, :].flatten(), comp, pend, vals)
ret = spo.fmin_l_bfgs_b(self.grad_optimize_ei_over_hypers, cand2[i,:].flatten(), args=(comp,pend,vals),
bounds=b, disp=0)
cand2[i, :] = ret[0]
cand = np.vstack((cand, cand2))
# Optimize each point in parallel
# pool = multiprocessing.Pool(self.grid_subset)
# results = [pool.apply_async(optimize_pt,args=(
# c,b,comp,pend,vals,copy.copy(self))) for c in cand2]
# for res in results:
# cand = np.vstack((cand, res.get(1e8)))
# pool.close()
overall_ei = self.ei_over_hypers(comp,pend,cand,vals)
best_cand = np.argmax(np.mean(overall_ei, axis=1))
if (best_cand >= numcand):
return (int(numcand), cand[best_cand,:])
return int(candidates[best_cand])
else:
# Optimize hyperparameters
self.optimize_hypers(comp, vals)
sys.stderr.write("mean: %.2f amp: %.2f noise: %.4f "
"min_ls: %.4f max_ls: %.4f\n"
% (self.mean, np.sqrt(self.amp2), self.noise,
np.min(self.ls), np.max(self.ls)))
# Optimize over EI
b = []# optimization bounds
for i in xrange(0, cand.shape[1]):
b.append((0, 1))
for i in xrange(0, cand2.shape[0]):
ret = spo.fmin_l_bfgs_b(self.grad_optimize_ei,
cand2[i,:].flatten(), args=(comp,vals,True),
bounds=b, disp=0)
cand2[i,:] = ret[0]
cand = np.vstack((cand, cand2))
ei = self.compute_ei(comp, pend, cand, vals)
best_cand = np.argmax(ei)
if (best_cand >= numcand):
return (int(numcand), cand[best_cand,:])
return int(candidates[best_cand])
# Compute EI over hyperparameter samples
def ei_over_hypers(self,comp,pend,cand,vals):
overall_ei = np.zeros((cand.shape[0], self.mcmc_iters))
for mcmc_iter in xrange(self.mcmc_iters):
hyper = self.hyper_samples[mcmc_iter]
self.mean = hyper[0]
self.noise = hyper[1]
self.amp2 = hyper[2]
self.ls = hyper[3]
overall_ei[:,mcmc_iter] = self.compute_ei(comp, pend, cand,
vals)
return overall_ei
def check_grad_ei(self, cand, comp, pend, vals):
(ei,dx1) = self.grad_optimize_ei_over_hypers(cand, comp, pend, vals)
dx2 = dx1*0
idx = np.zeros(cand.shape[0])
for i in xrange(0, cand.shape[0]):
idx[i] = 1e-6
(ei1,tmp) = self.grad_optimize_ei_over_hypers(cand + idx, comp, pend, vals)
(ei2,tmp) = self.grad_optimize_ei_over_hypers(cand - idx, comp, pend, vals)
dx2[i] = (ei - ei2)/(2*1e-6)
idx[i] = 0
print 'computed grads', dx1
print 'finite diffs', dx2
print (dx1/dx2)
print np.sum((dx1 - dx2)**2)
time.sleep(2)
# Adjust points by optimizing EI over a set of hyperparameter samples
def grad_optimize_ei_over_hypers(self, cand, comp, pend, vals, compute_grad=True):
summed_ei = 0
summed_grad_ei = np.zeros(cand.shape).flatten()
ls = self.ls.copy()
amp2 = self.amp2
mean = self.mean
noise = self.noise
for hyper in self.hyper_samples:
self.mean = hyper[0]
self.noise = hyper[1]
self.amp2 = hyper[2]
self.ls = hyper[3]
if compute_grad:
(ei,g_ei) = self.grad_optimize_ei(cand,comp,pend,vals,compute_grad)
summed_grad_ei = summed_grad_ei + g_ei
else:
ei = self.grad_optimize_ei(cand,comp,pend,vals,compute_grad)
summed_ei += ei
self.mean = mean
self.amp2 = amp2
self.noise = noise
self.ls = ls.copy()
if compute_grad:
return (summed_ei, summed_grad_ei)
else:
return summed_ei
# Adjust points based on optimizing their ei
def grad_optimize_ei(self, cand, comp, pend, vals, compute_grad=True):
if pend.shape[0] == 0:
best = np.min(vals)
cand = np.reshape(cand, (-1, comp.shape[1]))
# The primary covariances for prediction.
comp_cov = self.cov(comp)
cand_cross = self.cov(comp, cand)
# Compute the required Cholesky.
obsv_cov = comp_cov + self.noise*np.eye(comp.shape[0])
obsv_chol = spla.cholesky(obsv_cov, lower=True)
cov_grad_func = getattr(gp, 'grad_' + self.cov_func.__name__)
cand_cross_grad = cov_grad_func(self.ls, comp, cand)
# Predictive things.
# Solve the linear systems.
alpha = spla.cho_solve((obsv_chol, True), vals - self.mean)
beta = spla.solve_triangular(obsv_chol, cand_cross, lower=True)
# Predict the marginal means and variances at candidates.
func_m = np.dot(cand_cross.T, alpha) + self.mean
func_v = self.amp2*(1+1e-6) - np.sum(beta**2, axis=0)
# Expected improvement
func_s = np.sqrt(func_v)
u = (best - func_m) / func_s
ncdf = sps.norm.cdf(u)
npdf = sps.norm.pdf(u)
ei = func_s*( u*ncdf + npdf)
if not compute_grad:
return ei
# Gradients of ei w.r.t. mean and variance
g_ei_m = -ncdf
g_ei_s2 = 0.5*npdf / func_s
# Apply covariance function
grad_cross = np.squeeze(cand_cross_grad)
grad_xp_m = np.dot(alpha.transpose(),grad_cross)
grad_xp_v = np.dot(-2*spla.cho_solve(
(obsv_chol, True),cand_cross).transpose(), grad_cross)
grad_xp = 0.5*self.amp2*(grad_xp_m*g_ei_m + grad_xp_v*g_ei_s2)
ei = -np.sum(ei)
return ei, grad_xp.flatten()
else:
# If there are pending experiments, fantasize their outcomes.
cand = np.reshape(cand, (-1, comp.shape[1]))
# Create a composite vector of complete and pending.
comp_pend = np.concatenate((comp, pend))
# Compute the covariance and Cholesky decomposition.
comp_pend_cov = (self.cov(comp_pend) +
self.noise*np.eye(comp_pend.shape[0]))
comp_pend_chol = spla.cholesky(comp_pend_cov, lower=True)
# Compute submatrices.
pend_cross = self.cov(comp, pend)
pend_kappa = self.cov(pend)
# Use the sub-Cholesky.
obsv_chol = comp_pend_chol[:comp.shape[0],:comp.shape[0]]
# Solve the linear systems.
alpha = spla.cho_solve((obsv_chol, True), vals - self.mean)
beta = spla.cho_solve((obsv_chol, True), pend_cross)
# Finding predictive means and variances.
pend_m = np.dot(pend_cross.T, alpha) + self.mean
pend_K = pend_kappa - np.dot(pend_cross.T, beta)
# Take the Cholesky of the predictive covariance.
pend_chol = spla.cholesky(pend_K, lower=True)
# Make predictions.
npr.set_state(self.randomstate)
pend_fant = np.dot(pend_chol, npr.randn(pend.shape[0],self.pending_samples)) + self.mean
# Include the fantasies.
fant_vals = np.concatenate(
(np.tile(vals[:,np.newaxis],
(1,self.pending_samples)), pend_fant))
# Compute bests over the fantasies.
bests = np.min(fant_vals, axis=0)
# Now generalize from these fantasies.
cand_cross = self.cov(comp_pend, cand)
cov_grad_func = getattr(gp, 'grad_' + self.cov_func.__name__)
cand_cross_grad = cov_grad_func(self.ls, comp_pend, cand)
# Solve the linear systems.
alpha = spla.cho_solve((comp_pend_chol, True),
fant_vals - self.mean)
beta = spla.solve_triangular(comp_pend_chol, cand_cross,
lower=True)
# Predict the marginal means and variances at candidates.
func_m = np.dot(cand_cross.T, alpha) + self.mean
func_v = self.amp2*(1+1e-6) - np.sum(beta**2, axis=0)
# Expected improvement
func_s = np.sqrt(func_v[:,np.newaxis])
u = (bests[np.newaxis,:] - func_m) / func_s
ncdf = sps.norm.cdf(u)
npdf = sps.norm.pdf(u)
ei = func_s*( u*ncdf + npdf)
# Gradients of ei w.r.t. mean and variance
g_ei_m = -ncdf
g_ei_s2 = 0.5*npdf / func_s
# Apply covariance function
grad_cross = np.squeeze(cand_cross_grad)
grad_xp_m = np.dot(alpha.transpose(),grad_cross)
grad_xp_v = np.dot(-2*spla.cho_solve(
(comp_pend_chol, True),cand_cross).transpose(), grad_cross)
grad_xp = 0.5*self.amp2*(grad_xp_m*np.tile(g_ei_m,(comp.shape[1],1)).T + (grad_xp_v.T*g_ei_s2).T)
ei = -np.mean(ei, axis=1)
grad_xp = np.mean(grad_xp,axis=0)
return ei, grad_xp.flatten()
def compute_ei(self, comp, pend, cand, vals):
if pend.shape[0] == 0:
# If there are no pending, don't do anything fancy.
# Current best.
best = np.min(vals)
# The primary covariances for prediction.
comp_cov = self.cov(comp)
cand_cross = self.cov(comp, cand)
# Compute the required Cholesky.
obsv_cov = comp_cov + self.noise*np.eye(comp.shape[0])
obsv_chol = spla.cholesky( obsv_cov, lower=True )
# Solve the linear systems.
alpha = spla.cho_solve((obsv_chol, True), vals - self.mean)
beta = spla.solve_triangular(obsv_chol, cand_cross, lower=True)
# Predict the marginal means and variances at candidates.
func_m = np.dot(cand_cross.T, alpha) + self.mean
func_v = self.amp2*(1+1e-6) - np.sum(beta**2, axis=0)
# Expected improvement
func_s = np.sqrt(func_v)
u = (best - func_m) / func_s
ncdf = sps.norm.cdf(u)
npdf = sps.norm.pdf(u)
ei = func_s*( u*ncdf + npdf)
return ei
else:
# If there are pending experiments, fantasize their outcomes.
# Create a composite vector of complete and pending.
comp_pend = np.concatenate((comp, pend))
# Compute the covariance and Cholesky decomposition.
comp_pend_cov = (self.cov(comp_pend) +
self.noise*np.eye(comp_pend.shape[0]))
comp_pend_chol = spla.cholesky(comp_pend_cov, lower=True)
# Compute submatrices.
pend_cross = self.cov(comp, pend)
pend_kappa = self.cov(pend)
# Use the sub-Cholesky.
obsv_chol = comp_pend_chol[:comp.shape[0],:comp.shape[0]]
# Solve the linear systems.
alpha = spla.cho_solve((obsv_chol, True), vals - self.mean)
beta = spla.cho_solve((obsv_chol, True), pend_cross)
# Finding predictive means and variances.
pend_m = np.dot(pend_cross.T, alpha) + self.mean
pend_K = pend_kappa - np.dot(pend_cross.T, beta)
# Take the Cholesky of the predictive covariance.
pend_chol = spla.cholesky(pend_K, lower=True)
# Make predictions.
npr.set_state(self.randomstate)
pend_fant = np.dot(pend_chol, npr.randn(pend.shape[0],self.pending_samples)) + self.mean
# Include the fantasies.
fant_vals = np.concatenate(
(np.tile(vals[:,np.newaxis],
(1,self.pending_samples)), pend_fant))
# Compute bests over the fantasies.
bests = np.min(fant_vals, axis=0)
# Now generalize from these fantasies.
cand_cross = self.cov(comp_pend, cand)
# Solve the linear systems.
alpha = spla.cho_solve((comp_pend_chol, True),
fant_vals - self.mean)
beta = spla.solve_triangular(comp_pend_chol, cand_cross,
lower=True)
# Predict the marginal means and variances at candidates.
func_m = np.dot(cand_cross.T, alpha) + self.mean
func_v = self.amp2*(1+1e-6) - np.sum(beta**2, axis=0)
# Expected improvement
func_s = np.sqrt(func_v[:,np.newaxis])
u = (bests[np.newaxis,:] - func_m) / func_s
ncdf = sps.norm.cdf(u)
npdf = sps.norm.pdf(u)
ei = func_s*( u*ncdf + npdf)
return np.mean(ei, axis=1)
def sample_hypers(self, comp, vals):
if self.noiseless:
self.noise = 1e-3
self._sample_noiseless(comp, vals)
else:
self._sample_noisy(comp, vals)
self._sample_ls(comp, vals)
self.hyper_samples.append((self.mean, self.noise, self.amp2, self.ls))
def _sample_ls(self, comp, vals):
def logprob(ls):
if np.any(ls < 0) or np.any(ls > self.max_ls):
return -np.inf
cov = (self.amp2 * (self.cov_func(ls, comp, None) +
1e-6*np.eye(comp.shape[0])) + self.noise*np.eye(comp.shape[0]))
chol = spla.cholesky(cov, lower=True)
solve = spla.cho_solve((chol, True), vals - self.mean)
lp = (-np.sum(np.log(np.diag(chol))) -
0.5*np.dot(vals-self.mean, solve))
return lp
self.ls = util.slice_sample(self.ls, logprob, compwise=True)
def _sample_noisy(self, comp, vals):
def logprob(hypers):
mean = hypers[0]
amp2 = hypers[1]
noise = hypers[2]
# This is pretty hacky, but keeps things sane.
if mean > np.max(vals) or mean < np.min(vals):
return -np.inf
if amp2 < 0 or noise < 0:
return -np.inf
cov = (amp2 * (self.cov_func(self.ls, comp, None) +
1e-6*np.eye(comp.shape[0])) + noise*np.eye(comp.shape[0]))
chol = spla.cholesky(cov, lower=True)
solve = spla.cho_solve((chol, True), vals - mean)
lp = -np.sum(np.log(np.diag(chol)))-0.5*np.dot(vals-mean, solve)
# Roll in noise horseshoe prior.
lp += np.log(np.log(1 + (self.noise_scale/noise)**2))
# Roll in amplitude lognormal prior
lp -= 0.5*(np.log(amp2)/self.amp2_scale)**2
return lp
hypers = util.slice_sample(np.array(
[self.mean, self.amp2, self.noise]), logprob, compwise=False)
self.mean = hypers[0]
self.amp2 = hypers[1]
self.noise = hypers[2]
def _sample_noiseless(self, comp, vals):
def logprob(hypers):
mean = hypers[0]
amp2 = hypers[1]
noise = 1e-3
# This is pretty hacky, but keeps things sane.
if mean > np.max(vals) or mean < np.min(vals):
return -np.inf
if amp2 < 0:
return -np.inf
cov = (amp2 * (self.cov_func(self.ls, comp, None) +
1e-6*np.eye(comp.shape[0])) + noise*np.eye(comp.shape[0]))
chol = spla.cholesky(cov, lower=True)
solve = spla.cho_solve((chol, True), vals - mean)
lp = -np.sum(np.log(np.diag(chol)))-0.5*np.dot(vals-mean, solve)
# Roll in amplitude lognormal prior
lp -= 0.5*(np.log(amp2)/self.amp2_scale)**2
return lp
hypers = util.slice_sample(np.array(
[self.mean, self.amp2, self.noise]), logprob, compwise=False)
self.mean = hypers[0]
self.amp2 = hypers[1]
self.noise = 1e-3
def optimize_hypers(self, comp, vals):
mygp = gp.GP(self.cov_func.__name__)
mygp.real_init(comp.shape[1], vals)
mygp.optimize_hypers(comp,vals)
self.mean = mygp.mean
self.ls = mygp.ls
self.amp2 = mygp.amp2
self.noise = mygp.noise
# Save hyperparameter samples
self.hyper_samples.append((self.mean, self.noise, self.amp2, self.ls))
self.dump_hypers()
return
|
claesenm/optunity-benchmark
|
optimizers/spearmint/spearmint_april2013_mod_src/GPEIOptChooser.py
|
Python
|
gpl-3.0
| 25,968
|
[
"Gaussian"
] |
ab8d33527597a4c8e52bb85803340d4b100e500ddf3f74133ac5cb0db699982a
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 20 12:55:12 2015
This file is (slightly modified) from Fabrizio Costa's EDeN.
Copyright 2015 Nicolo' Navarin
This file is part of scikit-learn-graph.
scikit-learn-graph is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
scikit-learn-graph is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with scikit-learn-graph. If not, see <http://www.gnu.org/licenses/>.
"""
import openbabel
import pybel
import json
import pybel
import networkx as nx
from networkx.readwrite import json_graph
import tempfile
import ioskgraph
import unicodedata
def obabel_to_eden(input, file_type = 'sdf',dict_labels={}, counter=[1], **options):
"""
Takes a string list in sdf format and yields networkx graphs.
Parameters
----------
input : string
A pointer to the data source.
"""
f = ioskgraph.read(input)
for line in f:
if line.strip():
l=unicodedata.normalize('NFKD', line).encode('ascii','ignore')
mol=pybel.readstring(file_type,l)
#print mol
#remove hydrogens
#mol.removeh()
G = obabel_to_networkx(mol,dict_labels,counter)
if len(G):
yield G
def obabel_to_networkx( mol, dict_labels={}, counter=[1]):
"""
Takes a pybel molecule object and converts it into a networkx graph.
"""
#print "ObabelToNBetworkx, counter = ", counter
g = nx.Graph()
g.graph['ordered']=False
#atoms
for atom in mol:
#label = str(atom.type)
label = str(atom.atomicnum)
if label not in dict_labels:
#print "new label", label, "key", counter
dict_labels[label]= counter[0]
counter[0]+=1
g.add_node(atom.idx, label=label, viewpoint=True)
#bonds
edges = []
bondorders = []
for bond in openbabel.OBMolBondIter(mol.OBMol):
label = str(bond.GetBO())
g.add_edge( bond.GetBeginAtomIdx(), bond.GetEndAtomIdx(), label = label )
return g
|
nickgentoo/scikit-learn-graph
|
skgraph/datasets/obabel.py
|
Python
|
gpl-3.0
| 2,446
|
[
"Pybel"
] |
5a935fc6b7ba4e1cdbc3e1ff5d7b3b0d93c6f7e866031fdfcb4d966eabb2c47b
|
import sys
import traceback
import optparse
import numpy as np
from ase.structure import bulk, estimate_lattice_constant
from ase.atoms import Atoms, string2symbols
from ase.data.molecules import molecule
from ase.visualize import view
from ase.io import read, write
from ase.constraints import FixAtoms
from gpaw.utilities import devnull
from gpaw.utilities.bulk2 import EMTRunner, GPAWRunner
from gpaw.parameters import InputParameters
from gpaw.mpi import world
defaults = InputParameters()
def build_parser():
description = ('Run GPAW calculation for simple atoms, molecules or '
'bulk systems.')
epilog = 'GPAW options: --%s. ' % ', --'.join(defaults.keys())
parser = optparse.OptionParser(usage='%prog [options] formula or filename',
version='%prog 0.1',
description=description + ' ' + epilog)
struct = optparse.OptionGroup(parser, 'Structure')
struct.add_option('-i', '--identifier',
help='String identifier added to filenames.')
struct.add_option('-x', '--crystal-structure',
help='Crystal structure.',
choices=['sc', 'fcc', 'bcc', 'diamond', 'hcp',
'rocksalt', 'zincblende'])
struct.add_option('-a', '--lattice-constant', type='float',
help='Lattice constant in Angstrom.')
struct.add_option('--c-over-a', type='float',
help='c/a ratio.')
struct.add_option('-v', '--vacuum', type='float', default=3.0,
help='Amount of vacuum to add around isolated systems '
'(in Angstrom).')
struct.add_option('-O', '--orthorhombic', action='store_true',
help='Use orthorhombic unit cell.')
struct.add_option('-C', '--cubic', action='store_true',
help='Use cubic unit cell.')
struct.add_option('-r', '--repeat',
help='Repeat unit cell. Use "-r 2" or "-r 2,3,1".')
struct.add_option('-M', '--magnetic-moment',
help='Magnetic moment(s). Use "-M 1" or "-M 2.3,-2.3".')
parser.add_option_group(struct)
behavior = optparse.OptionGroup(parser, 'Behavior')
behavior.add_option('--read', action='store_true',
help="Don't alculate anything - read from file.")
behavior.add_option('-p', '--plot', action='store_true',
help='Plot results.')
behavior.add_option('-G', '--gui', action='store_true',
help="Pop up ASE's GUI.")
behavior.add_option('-w', '--write-to-file', metavar='FILENAME',
help="Write configuration to file.")
behavior.add_option('-F', '--fit', action='store_true',
help='Find optimal volume or bondlength.')
behavior.add_option('-R', '--relax', type='float', metavar='FMAX',
help='Relax internal coordinates using L-BFGS '
'algorithm.')
behavior.add_option('--constrain-tags', type='str', metavar='T1,T2,...',
help='Constrain atoms with tags T1, T2, ...')
behavior.add_option('--parameters',
help='read input parameters from this file')
behavior.add_option('-E', '--effective-medium-theory',
action='store_true',
help='Use EMT calculator.')
parser.add_option_group(behavior)
calc_opts = optparse.OptionGroup(parser, 'Calculator')
for key in defaults:
calc_opts.add_option('--%s' % key, type=str,
help=optparse.SUPPRESS_HELP)
calc_opts.add_option('--write-gpw-file', metavar='MODE',
help='Write gpw file.')
parser.add_option_group(calc_opts)
return parser
def run(argv=None):
if argv is None:
argv = sys.argv[1:]
elif isinstance(argv, str):
argv = argv.split()
parser = build_parser()
opt, args = parser.parse_args(argv)
if len(args) != 1:
parser.error("incorrect number of arguments")
name = args[0]
if world.rank == 0:
out = sys.stdout#open('%s-%s.results' % (name, opt.identifier), 'w')
else:
out = devnull
a = None
try:
symbols = string2symbols(name)
except ValueError:
# name was not a chemical formula - must be a file name:
atoms = read(name)
else:
if opt.crystal_structure:
a = opt.lattice_constant
if a is None:
a = estimate_lattice_constant(name, opt.crystal_structure,
opt.c_over_a)
out.write('Using an estimated lattice constant of %.3f Ang\n' %
a)
atoms = bulk(name, opt.crystal_structure, a, covera=opt.c_over_a,
orthorhombic=opt.orthorhombic, cubic=opt.cubic)
else:
try:
# Molecule?
atoms = molecule(name)
except NotImplementedError:
if len(symbols) == 1:
# Atom
atoms = Atoms(name)
elif len(symbols) == 2:
# Dimer
atoms = Atoms(name, positions=[(0, 0, 0),
(opt.bond_length, 0, 0)])
else:
raise ValueError('Unknown molecule: ' + name)
if opt.magnetic_moment:
magmom = opt.magnetic_moment.split(',')
atoms.set_initial_magnetic_moments(np.tile(magmom,
len(atoms) // len(magmom)))
if opt.repeat is not None:
r = opt.repeat.split(',')
if len(r) == 1:
r = 3 * r
atoms = atoms.repeat([int(c) for c in r])
if opt.gui:
view(atoms)
return
if opt.write_to_file:
write(opt.write_to_file, atoms)
return
if opt.effective_medium_theory:
Runner = EMTRunner
else:
Runner = GPAWRunner
if opt.fit:
strains = np.linspace(0.98, 1.02, 5)
else:
strains = None
if opt.constrain_tags:
tags = [int(t) for t in opt.constrain_tags.split(',')]
constrain = FixAtoms(mask=[t in tags for t in atoms.get_tags()])
atoms.constraints = [constrain]
runner = Runner(name, atoms, strains, tag=opt.identifier,
clean=not opt.read,
fmax=opt.relax, out=out)
if not opt.effective_medium_theory:
# Import stuff that eval() may need to know:
from gpaw.wavefunctions.pw import PW
from gpaw.occupations import FermiDirac, MethfesselPaxton
if opt.parameters:
input_parameters = eval(open(opt.parameters).read())
else:
input_parameters = {}
for key in defaults:
value = getattr(opt, key)
if value is not None:
try:
input_parameters[key] = eval(value)
except (NameError, SyntaxError):
input_parameters[key] = value
runner.set_parameters(vacuum=opt.vacuum,
write_gpw_file=opt.write_gpw_file,
**input_parameters)
runner.run()
runner.summary(plot=opt.plot, a0=a)
return runner
def main():
try:
run(sys.argv[1:])
except KeyboardInterrupt:
print 'Killed!'
raise SystemExit(1)
except SystemExit:
raise
except Exception:
#traceback.print_exc()
print >> sys.stderr, """
An exception occurred! Please report the issue to
gridpaw-developer@listserv.fysik.dtu.dk - thanks! Please also report this
if it was a user error, so that a better error message can be provided
next time."""
raise
|
qsnake/gpaw
|
gpaw/utilities/gpawscript.py
|
Python
|
gpl-3.0
| 7,953
|
[
"ASE",
"CRYSTAL",
"GPAW"
] |
f86c0b03d88a44ad70c191e534aae21ba11755e3ae08b7bc2a3086ac93f55cf0
|
import operator
from glob import glob
import os
import re
import warnings
import numpy as np
import dask.array as da
import xray
from xray import Variable
from xray.backends.common import AbstractDataStore
from xray.core.utils import NDArrayMixin
from xray.core.pycompat import OrderedDict
from xray.core.indexing import NumpyIndexingAdapter
#from ..conventions import pop_to, cf_encoder
#from ..core import indexing
#from ..core.utils import (FrozenOrderedDict, NDArrayMixin,
# close_on_error, is_remote_uri)
#from ..core.pycompat import iteritems, basestring, OrderedDict
#from .common import AbstractWritableDataStore, robust_getitem
# This lookup table maps from dtype.byteorder to a readable endian
# string used by netCDF4.
_endian_lookup = {'=': 'native',
'>': 'big',
'<': 'little',
'|': 'native'}
# the variable metadata will be stored in dicts of the form
#_variable[varname] = (dimensions, description, units)
_grid_variables = OrderedDict(
# horizontal grid
X= (('X',), "X-coordinate of cell center", "meters"),
Y= (('Y',), "Y-coordinate of cell center", "meters"),
Xp1= (('Xp1',), "X-coordinate of cell corner", "meters"),
Yp1= (('Yp1',), "Y-coordinate of cell corner", "meters"),
# 2d versions
XC= (('Y','X'), "X coordinate of cell center (T-P point)", "degree_east"),
YC= (('Y','X'), "Y coordinate of cell center (T-P point)", "degree_north"),
XG= (('Yp1','Xp1'), "X coordinate of cell corner (Vorticity point)", "degree_east"),
YG= (('Yp1','Xp1'), "Y coordinate of cell corner (Vorticity point)", "degree_north"),
# vertical grid
Z= (('Z',), "vertical coordinate of cell center", "meters"),
Zp1= (('Zp1',), "vertical coordinate of cell interface", "meters"),
Zu= (('Zu',), "vertical coordinate of lower cell interface", "meters"),
Zl= (('Zl',), "vertical coordinate of upper cell interface", "meters"),
# (for some reason, the netCDF files use both R and Z notation )
# 'RC': (('Z',), "R coordinate of cell center", "m"),
# 'RF': (('Zp1',), "R coordinate of cell interface", "m"),
# 'RU': (('Zu',), "R coordinate of lower cell interface", "m"),
# 'RL': (('Zl',), "R coordinate of upper cell interface", "m"),
# horiz. differentials
dxC= (('Y','Xp1'), "x cell center separation", "meters"),
dyC= (('Yp1','X'), "y cell center separation", "meters"),
dxG= (('Yp1','X'), "x cell corner separation", "meters"),
dyG= (('Y','Xp1'), "y cell corner separation", "meters"),
# vert. differentials
drC= (('Zp1',), "r cell center separation", "m"),
drF= (('Z',), "r cell face separation", "m"),
# areas
rA= (('Y','X'), "r-face area at cell center", "m^2"),
rAw= (('Y','Xp1'), "r-face area at U point", "m^2"),
rAs= (('Yp1','X'), "r-face area at V point", "m^2"),
rAz= (('Yp1','Xp1'), "r-face area at cell corner", "m^2"),
# depth
Depth=(('Y','X'), "fluid thickness in r coordinates (at rest)", "meters"),
# thickness factors
HFacC=(('Z','Y','X'),
"vertical fraction of open cell at cell center", "none (0-1)"),
HFacW=(('Z','Y','Xp1'),
"vertical fraction of open cell at West face", "none (0-1)"),
HFacS=(('Z','Yp1','X'),
"vertical fraction of open cell at South face", "none (0-1)")
)
_grid_special_mapping = {
'Z': ('RC', (slice(None),0,0)),
'Zp1': ('RF', (slice(None),0,0)),
'Zu': ('RF', (slice(1,None),0,0)),
'Zl': ('RF', (slice(None,-1),0,0)),
'X': ('XC', (0,slice(None))),
'Y': ('YC', (slice(None),0)),
'Xp1': ('XG', (0,slice(None))),
'Yp1': ('YG', (slice(None),0)),
'rA': ('RAC', None),
'HFacC': ('hFacC', None),
'HFacW': ('hFacW', None),
'HFacS': ('hFacS', None),
}
_state_variables = OrderedDict(
# state
U= (('Z','Y','Xp1'), 'Zonal Component of Velocity', 'm/s'),
V= (('Z','Yp1','X'), 'Meridional Component of Velocity', 'm/s'),
W= (('Zl','Y','X'), 'Vertical Component of Velocity', 'm/s'),
T= (('Z','Y','X'), 'Potential Temperature', 'degC'),
S= (('Z','Y','X'), 'Salinity', 'psu'),
PH= (('Z','Y','X'), 'Hydrostatic Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
PHL=(('Y','X'), 'Bottom Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
Eta=(('Y','X'), 'Surface Height Anomaly', 'm'),
# tave
uVeltave=(('Z','Y','Xp1'), 'Zonal Component of Velocity', 'm/s'),
vVeltave=(('Z','Yp1','X'), 'Meridional Component of Velocity', 'm/s'),
wVeltave=(('Zl','Y','X'), 'Vertical Component of Velocity', 'm/s'),
Ttave=(('Z','Y','X'), 'Potential Temperature', 'degC'),
Stave=(('Z','Y','X'), 'Salinity', 'psu'),
PhHytave=(('Z','Y','X'), 'Hydrostatic Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
PHLtave=(('Y','X'), 'Bottom Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
ETAtave=(('Y','X'), 'Surface Height Anomaly', 'm'),
Convtave=(('Zl','Y','X'), "Convective Adjustment Index", "none [0-1]"),
Eta2tave=(('Y','X'), "Square of Surface Height Anomaly", "m^2"),
PHL2tave=(('Y','X'), 'Square of Hyd. Pressure Pot.(p/rho) Anomaly', 'm^4/s^4'),
sFluxtave=(('Y','X'), 'total salt flux (match salt-content variations), >0 increases salt', 'g/m^2/s'),
Tdiftave=(('Zl','Y','X'), "Vertical Diffusive Flux of Pot.Temperature", "degC.m^3/s"),
tFluxtave=(('Y','X'), "Total heat flux (match heat-content variations), >0 increases theta", "W/m^2"),
TTtave=(('Z','Y','X'), 'Squared Potential Temperature', 'degC^2'),
uFluxtave=(('Y','Xp1'), 'surface zonal momentum flux, positive -> increase u', 'N/m^2'),
UStave=(('Z','Y','Xp1'), "Zonal Transport of Salinity", "psu m/s"),
UTtave=(('Z','Y','Xp1'), "Zonal Transport of Potenial Temperature", "degC m/s"),
UUtave=(('Z','Y','Xp1'), "Zonal Transport of Zonal Momentum", "m^2/s^2"),
UVtave=(('Z','Yp1','Xp1'), 'Product of meridional and zonal velocity', 'm^2/s^2'),
vFluxtave=(('Yp1','X'), 'surface meridional momentum flux, positive -> increase v', 'N/m^2'),
VStave=(('Z','Yp1','X'), "Meridional Transport of Salinity", "psu m/s"),
VTtave=(('Z','Yp1','X'), "Meridional Transport of Potential Temperature", "degC m/s"),
VVtave=(('Z','Yp1','X'), 'Zonal Transport of Zonal Momentum', 'm^2/s^2'),
WStave=(('Zl','Y','X'), 'Vertical Transport of Salinity', "psu m/s"),
WTtave=(('Zl','Y','X'), 'Vertical Transport of Potential Temperature', "degC m/s")
)
def _force_native_endianness(var):
# possible values for byteorder are:
# = native
# < little-endian
# > big-endian
# | not applicable
# Below we check if the data type is not native or NA
if var.dtype.byteorder not in ['=', '|']:
# if endianness is specified explicitly, convert to the native type
data = var.data.astype(var.dtype.newbyteorder('='))
var = Variable(var.dims, data, var.attrs, var.encoding)
# if endian exists, remove it from the encoding.
var.encoding.pop('endian', None)
# check to see if encoding has a value for endian its 'native'
if not var.encoding.get('endian', 'native') is 'native':
raise NotImplementedError("Attempt to write non-native endian type, "
"this is not supported by the netCDF4 python "
"library.")
return var
def _parse_available_diagnostics(fname):
all_diags = {}
# add default diagnostics for grid, tave, and state
with open(fname) as f:
# will automatically skip first four header lines
for l in f:
c = re.split('\|',l)
if len(c)==7 and c[0].strip()!='Num':
key = c[1].strip()
levs = int(c[2].strip())
mate = c[3].strip()
if mate: mate = int(mate)
code = c[4]
units = c[5].strip()
desc = c[6].strip()
dds = MITgcmDiagnosticDescription(
key, code, units, desc, levs, mate)
# return dimensions, description, units
all_diags[key] = (dds.coords(), dds.desc, dds.units)
return all_diags
class MITgcmDiagnosticDescription(object):
def __init__(self, key, code, units=None, desc=None, levs=None, mate=None):
self.key = key
self.levs = levs
self.mate = mate
self.code = code
self.units = units
self.desc = desc
def coords(self):
"""Parse code to determine coordinates."""
hpoint = self.code[1]
rpoint = self.code[8]
rlev = self.code[9]
xcoords = {'U': 'Xp1', 'V': 'X', 'M': 'X', 'Z': 'Xp1'}
ycoords = {'U': 'Y', 'V': 'Yp1', 'M': 'Y', 'Z': 'Yp1'}
rcoords = {'M': 'Z', 'U': 'Zu', 'L': 'Zl'}
if rlev=='1' and self.levs==1:
return (ycoords[hpoint], xcoords[hpoint])
elif rlev=='R':
return (rcoords[rpoint], ycoords[hpoint], xcoords[hpoint])
else:
warnings.warn("Not sure what to do with rlev = " + rlev)
return (rcoords[rpoint], ycoords[hpoint], xcoords[hpoint])
def _parse_meta(fname):
"""Get the metadata as a dict out of the mitGCM mds .meta file."""
flds = {}
basename = re.match('(^.+?)\..+', os.path.basename(fname)).groups()[0]
flds['basename'] = basename
with open(fname) as f:
text = f.read()
# split into items
for item in re.split(';', text):
# remove whitespace at beginning
item = re.sub('^\s+', '', item)
#match = re.match('(\w+) = ', item)
match = re.match('(\w+) = (\[|\{)(.*)(\]|\})', item, re.DOTALL)
if match:
key, _, value, _ = match.groups()
# remove more whitespace
value = re.sub('^\s+', '', value)
value = re.sub('\s+$', '', value)
#print key,':', value
flds[key] = value
# now check the needed things are there
needed_keys = ['dimList','nDims','nrecords','dataprec']
for k in needed_keys:
assert flds.has_key(k)
# transform datatypes
flds['nDims'] = int(flds['nDims'])
flds['nrecords'] = int(flds['nrecords'])
# use big endian always
flds['dataprec'] = np.dtype(re.sub("'",'',flds['dataprec'])).newbyteorder('>')
flds['dimList'] = [[int(h) for h in
re.split(',', g)] for g in
re.split(',\n',flds['dimList'])]
if flds.has_key('fldList'):
flds['fldList'] = [re.match("'*(\w+)",g).groups()[0] for g in
re.split("'\s+'",flds['fldList'])]
assert flds['nrecords'] == len(flds['fldList'])
return flds
def _read_mds(fname, iternum=None, use_mmap=True,
force_dict=True, convert_big_endian=False):
"""Read an MITgcm .meta / .data file pair"""
if iternum is None:
istr = ''
else:
assert isinstance(iternum, int)
istr = '.%010d' % iternum
datafile = fname + istr + '.data'
metafile = fname + istr + '.meta'
# get metadata
meta = _parse_meta(metafile)
# why does the .meta file contain so much repeated info?
# just get the part we need
# and reverse order (numpy uses C order, mds is fortran)
shape = [g[0] for g in meta['dimList']][::-1]
assert len(shape) == meta['nDims']
# now add an extra for number of recs
nrecs = meta['nrecords']
shape.insert(0, nrecs)
# load and shape data
if use_mmap:
d = np.memmap(datafile, meta['dataprec'], 'r')
else:
d = np.fromfile(datafile, meta['dataprec'])
if convert_big_endian:
dtnew = d.dtype.newbyteorder('=')
d = d.astype(dtnew)
d.shape = shape
if nrecs == 1:
if meta.has_key('fldList'):
name = meta['fldList'][0]
else:
name = meta['basename']
if force_dict:
return {name: d[0]}
else:
return d[0]
else:
# need record names
out = {}
for n, name in enumerate(meta['fldList']):
out[name] = d[n]
return out
class MDSArrayWrapper(NDArrayMixin):
def __init__(self, array):
self.array = array
@property
def dtype(self):
dtype = self.array.dtype
def _list_all_mds_files(dirname):
"""Find all the meta / data files"""
files = glob(os.path.join(dirname, '*.meta'))
# strip the suffix
return [f[:-5] for f in files]
#class MemmapArrayWrapper(NumpyIndexingAdapter):
class MemmapArrayWrapper(NDArrayMixin):
def __init__(self, memmap_array):
self._memmap_array = memmap_array
@property
def array(self):
# We can't store the actual netcdf_variable object or its data array,
# because otherwise scipy complains about variables or files still
# referencing mmapped arrays when we try to close datasets without
# having read all data in the file.
return self._memmap_array
@property
def dtype(self):
return self._memmap_array.dtype
def __getitem__(self, key):
data = self._memmap_array.__getitem__(key)
return np.asarray(data)
_valid_geometry = ['Cartesian', 'SphericalPolar']
def open_mdsdataset(dirname, iters=None, deltaT=1,
prefix=None, ref_date=None, calendar=None,
ignore_pickup=True, geometry='Cartesian'):
"""Open MITgcm-style mds file output as xray datset."""
store = _MDSDataStore(dirname, iters, deltaT,
prefix, ref_date, calendar,
ignore_pickup, geometry)
return xray.Dataset.load_store(store)
class _MDSDataStore(AbstractDataStore):
"""Represents the entire directory of MITgcm mds output
including all grid variables. Similar in some ways to
netCDF.Dataset."""
def __init__(self, dirname, iters=None, deltaT=1,
prefix=None, ref_date=None, calendar=None,
ignore_pickup=True, geometry='Cartesian'):
"""iters: list of iteration numbers
deltaT: timestep
prefix: list of file prefixes (if None use all)
"""
assert geometry in _valid_geometry
self.geometry = geometry
# the directory where the files live
self.dirname = dirname
# storage dicts for variables and attributes
self._variables = OrderedDict()
self._attributes = OrderedDict()
self._dimensions = []
### figure out the mapping between diagnostics names and variable properties
# all possible diagnostics
diag_meta = _parse_available_diagnostics(
os.path.join(dirname, 'available_diagnostics.log'))
### read grid files
for k in _grid_variables:
if _grid_special_mapping.has_key(k):
fname = _grid_special_mapping[k][0]
sl = _grid_special_mapping[k][1]
else:
fname = k
sl = None
data = None
try:
data = _read_mds(os.path.join(dirname, fname), force_dict=False)
except IOError:
try:
data = _read_mds(os.path.join(dirname, fname.upper()),
force_dict=False)
except IOError:
warnings.warn("Couldn't load grid variable " + k)
if data is not None:
data = data[sl] if sl is not None else data.squeeze()
dims, desc, units = _grid_variables[k]
self._variables[k] = Variable(
dims, MemmapArrayWrapper(data), {'description': desc, 'units': units})
self._dimensions.append(k)
# now get variables from our iters
if iters is not None:
# create iteration array
iterdata = np.asarray(iters)
self._variables['iter'] = Variable(('time',), iterdata,
{'description': 'model timestep number'})
# create time array
timedata = np.asarray(iters)*deltaT
time_attrs = {'description': 'model time'}
if ref_date is not None:
time_attrs['units'] = 'seconds since %s' % ref_date
else:
time_attrs['units'] = 'seconds'
if calendar is not None:
time_attrs['calendar'] = calendar
self._variables['time'] = Variable(
('time',), timedata, time_attrs)
self._dimensions.append('time')
varnames = []
fnames = []
_data_vars = OrderedDict()
# look at first iter to get variable metadata
for f in glob(os.path.join(dirname, '*.%010d.meta' % iters[0])):
if ignore_pickup and re.search('pickup', f):
pass
else:
go = True
if prefix is not None:
bname = os.path.basename(f[:-16])
matches = [bname==p for p in prefix]
if not any(matches):
go = False
if go:
meta = _parse_meta(f)
if meta.has_key('fldList'):
flds = meta['fldList']
[varnames.append(fl) for fl in flds]
else:
varnames.append(meta['basename'])
fnames.append(os.path.join(dirname,meta['basename']))
# read data as dask arrays (should be an option)
vardata = {}
for k in varnames:
vardata[k] = []
for i in iters:
for f in fnames:
try:
data = _read_mds(f, i, force_dict=True)
for k in data.keys():
mwrap = MemmapArrayWrapper(data[k])
vardata[k].append(
da.from_array(mwrap, mwrap.shape))
except IOError:
# couldn't find the variable, remove it from the list
#print 'Removing %s from list (iter %g)' % (k, i)
varnames.remove(k)
# final loop to create Variable objects
for k in varnames:
try:
dims, desc, units = _state_variables[k]
except KeyError:
dims, desc, units = diag_meta[k]
# check for shape compatability
varshape = vardata[k][0].shape
varndims = len(varshape)
if len(dims) != varndims:
warnings.warn("Shape of variable data is not compatible "
"with expected number of dimensions. This "
"can arise if the 'levels' option is used "
"in data.diagnostics. Right now we have no "
"way to infer the level, so the variable is "
"skipped: " + k)
else:
# add time to dimension
dims_time = ('time',) + dims
# wrap variable in dask array
vardask = da.stack([da.from_array(d, varshape) for d in vardata[k]])
self._variables[k] = Variable( dims_time, vardask,
{'description': desc, 'units': units})
self._attributes = {'history': 'Some made up attribute'}
def get_variables(self):
return self._variables
def get_attrs(self):
return self._attributes
def get_dimensions(self):
return self._dimensions
def close(self):
pass
# from MITgcm netCDF grid file
# dimensions:
# Z = 30 ;
# Zp1 = 31 ;
# Zu = 30 ;
# Zl = 30 ;
# X = 25 ;
# Y = 40 ;
# Xp1 = 26 ;
# Yp1 = 41 ;
# variables:
# double Z(Z) ;
# Z:long_name = "vertical coordinate of cell center" ;
# Z:units = "meters" ;
# Z:positive = "up" ;
# double RC(Z) ;
# RC:description = "R coordinate of cell center" ;
# RC:units = "m" ;
# double Zp1(Zp1) ;
# Zp1:long_name = "vertical coordinate of cell interface" ;
# Zp1:units = "meters" ;
# Zp1:positive = "up" ;
# double RF(Zp1) ;
# RF:description = "R coordinate of cell interface" ;
# RF:units = "m" ;
# double Zu(Zu) ;
# Zu:long_name = "vertical coordinate of lower cell interface" ;
# Zu:units = "meters" ;
# Zu:positive = "up" ;
# double RU(Zu) ;
# RU:description = "R coordinate of upper interface" ;
# RU:units = "m" ;
# double Zl(Zl) ;
# Zl:long_name = "vertical coordinate of upper cell interface" ;
# Zl:units = "meters" ;
# Zl:positive = "up" ;
# double RL(Zl) ;
# RL:description = "R coordinate of lower interface" ;
# RL:units = "m" ;
# double drC(Zp1) ;
# drC:description = "r cell center separation" ;
# double drF(Z) ;
# drF:description = "r cell face separation" ;
# double X(X) ;
# X:long_name = "X-coordinate of cell center" ;
# X:units = "meters" ;
# double Y(Y) ;
# Y:long_name = "Y-Coordinate of cell center" ;
# Y:units = "meters" ;
# double XC(Y, X) ;
# XC:description = "X coordinate of cell center (T-P point)" ;
# XC:units = "degree_east" ;
# double YC(Y, X) ;
# YC:description = "Y coordinate of cell center (T-P point)" ;
# YC:units = "degree_north" ;
# double Xp1(Xp1) ;
# Xp1:long_name = "X-Coordinate of cell corner" ;
# Xp1:units = "meters" ;
# double Yp1(Yp1) ;
# Yp1:long_name = "Y-Coordinate of cell corner" ;
# Yp1:units = "meters" ;
# double XG(Yp1, Xp1) ;
# XG:description = "X coordinate of cell corner (Vorticity point)" ;
# XG:units = "degree_east" ;
# double YG(Yp1, Xp1) ;
# YG:description = "Y coordinate of cell corner (Vorticity point)" ;
# YG:units = "degree_north" ;
# double dxC(Y, Xp1) ;
# dxC:description = "x cell center separation" ;
# double dyC(Yp1, X) ;
# dyC:description = "y cell center separation" ;
# double dxF(Y, X) ;
# dxF:description = "x cell face separation" ;
# double dyF(Y, X) ;
# dyF:description = "y cell face separation" ;
# double dxG(Yp1, X) ;
# dxG:description = "x cell corner separation" ;
# double dyG(Y, Xp1) ;
# dyG:description = "y cell corner separation" ;
# double dxV(Yp1, Xp1) ;
# dxV:description = "x v-velocity separation" ;
# double dyU(Yp1, Xp1) ;
# dyU:description = "y u-velocity separation" ;
# double rA(Y, X) ;
# rA:description = "r-face area at cell center" ;
# double rAw(Y, Xp1) ;
# rAw:description = "r-face area at U point" ;
# double rAs(Yp1, X) ;
# rAs:description = "r-face area at V point" ;
# double rAz(Yp1, Xp1) ;
# rAz:description = "r-face area at cell corner" ;
# double fCori(Y, X) ;
# fCori:description = "Coriolis f at cell center" ;
# double fCoriG(Yp1, Xp1) ;
# fCoriG:description = "Coriolis f at cell corner" ;
# double R_low(Y, X) ;
# R_low:description = "base of fluid in r-units" ;
# double Ro_surf(Y, X) ;
# Ro_surf:description = "surface reference (at rest) position" ;
# double Depth(Y, X) ;
# Depth:description = "fluid thickness in r coordinates (at rest)" ;
# double HFacC(Z, Y, X) ;
# HFacC:description = "vertical fraction of open cell at cell center" ;
# double HFacW(Z, Y, Xp1) ;
# HFacW:description = "vertical fraction of open cell at West face" ;
# double HFacS(Z, Yp1, X) ;
# HFacS:description = "vertical fraction of open cell at South face" ;
|
rabernat/xgcm-examples
|
_xgcm/mdsxray.py
|
Python
|
mit
| 23,927
|
[
"NetCDF"
] |
16568499d99c520923baa813f4ade448f9b940ada39ebfdf36e55d8e0e7e82de
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
alexnet model adapted for serialization testing
has subset_pct set so that there are a low number of iterations per epoch
and no partial minibatches, dropout is turned off for reproducibility on gpu
and the learning rate is scaled to handle the reduced dropout percentage.
"""
from neon.util.argparser import NeonArgparser
from neon.initializers import Constant, Gaussian
from neon.layers import Conv, Dropout, Pooling, GeneralizedCost, Affine
from neon.optimizers import GradientDescentMomentum, MultiOptimizer, Schedule
from neon.transforms import Rectlin, Softmax, CrossEntropyMulti, TopKMisclassification
from neon.models import Model
from neon.data import ImageLoader
from neon.callbacks.callbacks import Callbacks
# parse the command line arguments (generates the backend)
parser = NeonArgparser(__doc__)
parser.add_argument('--direct', action='store_true',
help='do not initialize layers, deserialize directly')
args = parser.parse_args()
# setup data provider
img_set_options = dict(repo_dir=args.data_dir,
inner_size=224,
subset_pct=0.09990891117239205)
train = ImageLoader(set_name='train', scale_range=(256, 256), shuffle=False,
do_transforms=False, **img_set_options)
test = ImageLoader(set_name='validation', scale_range=(256, 384), shuffle=False,
do_transforms=False, **img_set_options)
layers = [Conv((11, 11, 64), init=Gaussian(scale=0.01), bias=Constant(0),
activation=Rectlin(), padding=3, strides=4),
Pooling(3, strides=2),
Conv((5, 5, 192), init=Gaussian(scale=0.01), bias=Constant(1),
activation=Rectlin(), padding=2),
Pooling(3, strides=2),
Conv((3, 3, 384), init=Gaussian(scale=0.03), bias=Constant(0),
activation=Rectlin(), padding=1),
Conv((3, 3, 256), init=Gaussian(scale=0.03), bias=Constant(1),
activation=Rectlin(), padding=1),
Conv((3, 3, 256), init=Gaussian(scale=0.03), bias=Constant(1),
activation=Rectlin(), padding=1),
Pooling(3, strides=2),
Affine(nout=4096, init=Gaussian(scale=0.01), bias=Constant(1), activation=Rectlin()),
Dropout(keep=1.0),
Affine(nout=4096, init=Gaussian(scale=0.01), bias=Constant(1), activation=Rectlin()),
Dropout(keep=1.0),
Affine(nout=1000, init=Gaussian(scale=0.01), bias=Constant(-7), activation=Softmax())]
if args.direct:
assert args.model_file is not None
model = Model(args.model_file)
args.model_file = None
else:
model = Model(layers=layers)
# drop weights LR by 1/250**(1/3) at epochs (23, 45, 66), drop bias LR by 1/10 at epoch 45
weight_sched = Schedule([22, 44, 65], (1 / 250.)**(1 / 3.))
opt_gdm = GradientDescentMomentum(0.01 / 10, 0.9, wdecay=0.0005, schedule=weight_sched,
stochastic_round=args.rounding)
opt_biases = GradientDescentMomentum(0.02 / 10, 0.9, schedule=Schedule([44], 0.1),
stochastic_round=args.rounding)
opt = MultiOptimizer({'default': opt_gdm, 'Bias': opt_biases})
# configure callbacks
valmetric = TopKMisclassification(k=5)
callbacks = Callbacks(model, eval_set=test, metric=valmetric, **args.callback_args)
cost = GeneralizedCost(costfunc=CrossEntropyMulti())
model.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)
|
Jokeren/neon
|
tests/serialization/alexnet.py
|
Python
|
apache-2.0
| 4,215
|
[
"Gaussian"
] |
d02d8cc435f7efbf44db1818a79fa2aba200fc511e6e4d8f9d243c9ed2e0a7ce
|
import logging
import operator
import os
from galaxy import util
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import new_secure_hash
from galaxy.model.item_attrs import Dictifiable
import tool_shed.repository_types.util as rt_util
from galaxy import eggs
eggs.require( 'mercurial' )
from mercurial import hg
from mercurial import ui
log = logging.getLogger( __name__ )
class APIKeys( object ):
pass
class User( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'username' )
dict_element_visible_keys = ( 'id', 'username' )
def __init__( self, email=None, password=None ):
self.email = email
self.password = password
self.external = False
self.deleted = False
self.purged = False
self.username = None
self.new_repo_alert = False
def all_roles( self ):
roles = [ ura.role for ura in self.roles ]
for group in [ uga.group for uga in self.groups ]:
for role in [ gra.role for gra in group.roles ]:
if role not in roles:
roles.append( role )
return roles
def check_password( self, cleartext ):
"""Check if 'cleartext' matches 'self.password' when hashed."""
return self.password == new_secure_hash( text_type=cleartext )
def get_disk_usage( self, nice_size=False ):
return 0
@property
def nice_total_disk_usage( self ):
return 0
def set_disk_usage( self, bytes ):
pass
total_disk_usage = property( get_disk_usage, set_disk_usage )
def set_password_cleartext( self, cleartext ):
"""Set 'self.password' to the digest of 'cleartext'."""
self.password = new_secure_hash( text_type=cleartext )
class Group( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'name' )
dict_element_visible_keys = ( 'id', 'name' )
def __init__( self, name = None ):
self.name = name
self.deleted = False
class Role( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'name' )
dict_element_visible_keys = ( 'id', 'name', 'description', 'type' )
private_id = None
types = Bunch( PRIVATE = 'private',
SYSTEM = 'system',
USER = 'user',
ADMIN = 'admin',
SHARING = 'sharing' )
def __init__( self, name="", description="", type="system", deleted=False ):
self.name = name
self.description = description
self.type = type
self.deleted = deleted
@property
def is_repository_admin_role( self ):
# A repository admin role must always be associated with a repository. The mapper returns an
# empty list for those roles that have no repositories. This method will require changes if
# new features are introduced that results in more than one role per repository.
if self.repositories:
return True
return False
class UserGroupAssociation( object ):
def __init__( self, user, group ):
self.user = user
self.group = group
class UserRoleAssociation( object ):
def __init__( self, user, role ):
self.user = user
self.role = role
class GroupRoleAssociation( object ):
def __init__( self, group, role ):
self.group = group
self.role = role
class RepositoryRoleAssociation( object ):
def __init__( self, repository, role ):
self.repository = repository
self.role = role
class GalaxySession( object ):
def __init__( self,
id=None,
user=None,
remote_host=None,
remote_addr=None,
referer=None,
current_history=None,
session_key=None,
is_valid=False,
prev_session_id=None ):
self.id = id
self.user = user
self.remote_host = remote_host
self.remote_addr = remote_addr
self.referer = referer
self.current_history = current_history
self.session_key = session_key
self.is_valid = is_valid
self.prev_session_id = prev_session_id
class Repository( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'name', 'type', 'description', 'user_id', 'private', 'deleted',
'times_downloaded', 'deprecated' )
dict_element_visible_keys = ( 'id', 'name', 'type', 'description', 'long_description', 'user_id', 'private',
'deleted', 'times_downloaded', 'deprecated' )
file_states = Bunch( NORMAL = 'n',
NEEDS_MERGING = 'm',
MARKED_FOR_REMOVAL = 'r',
MARKED_FOR_ADDITION = 'a',
NOT_TRACKED = '?' )
def __init__( self, id=None, name=None, type=None, description=None, long_description=None, user_id=None, private=False,
deleted=None, email_alerts=None, times_downloaded=0, deprecated=False ):
self.id = id
self.name = name or "Unnamed repository"
self.type = type
self.description = description
self.long_description = long_description
self.user_id = user_id
self.private = private
self.deleted = deleted
self.email_alerts = email_alerts
self.times_downloaded = times_downloaded
self.deprecated = deprecated
@property
def admin_role( self ):
admin_role_name = '%s_%s_admin' % ( str( self.name ), str( self.user.username ) )
for rra in self.roles:
role = rra.role
if str( role.name ) == admin_role_name:
return role
raise Exception( 'Repository %s owned by %s is not associated with a required administrative role.' % \
( str( self.name ), str( self.user.username ) ) )
def allow_push( self, app ):
repo = hg.repository( ui.ui(), self.repo_path( app ) )
return repo.ui.config( 'web', 'allow_push' )
def can_change_type( self, app ):
# Allow changing the type only if the repository has no contents, has never been installed, or has
# never been changed from the default type.
if self.is_new( app ):
return True
if self.times_downloaded == 0:
return True
if self.type == rt_util.UNRESTRICTED:
return True
return False
def can_change_type_to( self, app, new_type_label ):
if self.type == new_type_label:
return False
if self.can_change_type( app ):
new_type = app.repository_types_registry.get_class_by_label( new_type_label )
if new_type.is_valid_for_type( app, self ):
return True
return False
def get_changesets_for_setting_metadata( self, app ):
type_class = self.get_type_class( app )
return type_class.get_changesets_for_setting_metadata( app, self )
def get_type_class( self, app ):
return app.repository_types_registry.get_class_by_label( self.type )
def is_new( self, app ):
repo = hg.repository( ui.ui(), self.repo_path( app ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
return tip_ctx.rev() < 0
def repo_path( self, app ):
return app.hgweb_config_manager.get_entry( os.path.join( "repos", self.user.username, self.name ) )
def revision( self, app ):
repo = hg.repository( ui.ui(), self.repo_path( app ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
return "%s:%s" % ( str( tip_ctx.rev() ), str( repo.changectx( repo.changelog.tip() ) ) )
def set_allow_push( self, app, usernames, remove_auth='' ):
allow_push = util.listify( self.allow_push( app ) )
if remove_auth:
allow_push.remove( remove_auth )
else:
for username in util.listify( usernames ):
if username not in allow_push:
allow_push.append( username )
allow_push = '%s\n' % ','.join( allow_push )
repo = hg.repository( ui.ui(), path=self.repo_path( app ) )
# Why doesn't the following work?
#repo.ui.setconfig( 'web', 'allow_push', allow_push )
lines = repo.opener( 'hgrc', 'rb' ).readlines()
fp = repo.opener( 'hgrc', 'wb' )
for line in lines:
if line.startswith( 'allow_push' ):
fp.write( 'allow_push = %s' % allow_push )
else:
fp.write( line )
fp.close()
def tip( self, app ):
repo = hg.repository( ui.ui(), self.repo_path( app ) )
return str( repo.changectx( repo.changelog.tip() ) )
def to_dict( self, view='collection', value_mapper=None ):
rval = super( Repository, self ).to_dict( view=view, value_mapper=value_mapper )
if 'user_id' in rval:
rval[ 'owner' ] = self.user.username
return rval
class RepositoryMetadata( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'missing_test_components',
'tools_functionally_correct', 'do_not_test', 'test_install_error', 'has_repository_dependencies',
'includes_datatypes', 'includes_tools', 'includes_tool_dependencies', 'includes_tools_for_display_in_tool_panel',
'includes_workflows', 'time_last_tested' )
dict_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'missing_test_components',
'tools_functionally_correct', 'do_not_test', 'test_install_error', 'time_last_tested', 'tool_test_results',
'has_repository_dependencies', 'includes_datatypes', 'includes_tools', 'includes_tool_dependencies',
'includes_tools_for_display_in_tool_panel', 'includes_workflows' )
def __init__( self, id=None, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False,
downloadable=False, missing_test_components=None, tools_functionally_correct=False, do_not_test=False,
test_install_error=False, time_last_tested=None, tool_test_results=None, has_repository_dependencies=False,
includes_datatypes=False, includes_tools=False, includes_tool_dependencies=False, includes_workflows=False ):
self.id = id
self.repository_id = repository_id
self.changeset_revision = changeset_revision
self.metadata = metadata
self.tool_versions = tool_versions
self.malicious = malicious
self.downloadable = downloadable
self.missing_test_components = missing_test_components
self.tools_functionally_correct = tools_functionally_correct
self.do_not_test = do_not_test
self.test_install_error = test_install_error
self.time_last_tested = time_last_tested
self.tool_test_results = tool_test_results
self.has_repository_dependencies = has_repository_dependencies
# We don't consider the special case has_repository_dependencies_only_if_compiling_contained_td here.
self.includes_datatypes = includes_datatypes
self.includes_tools = includes_tools
self.includes_tool_dependencies = includes_tool_dependencies
self.includes_workflows = includes_workflows
@property
def includes_tools_for_display_in_tool_panel( self ):
if self.metadata:
tool_dicts = self.metadata.get( 'tools', [] )
for tool_dict in tool_dicts:
if tool_dict.get( 'add_to_tool_panel', True ):
return True
return False
class SkipToolTest( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'repository_metadata_id', 'initial_changeset_revision' )
dict_element_visible_keys = ( 'id', 'repository_metadata_id', 'initial_changeset_revision', 'comment' )
def __init__( self, id=None, repository_metadata_id=None, initial_changeset_revision=None, comment=None ):
self.id = id
self.repository_metadata_id = repository_metadata_id
self.initial_changeset_revision = initial_changeset_revision
self.comment = comment
def as_dict( self, value_mapper=None ):
return self.to_dict( view='element', value_mapper=value_mapper )
class RepositoryReview( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'user_id', 'rating', 'deleted' )
dict_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'user_id', 'rating', 'deleted' )
approved_states = Bunch( NO='no', YES='yes' )
def __init__( self, repository_id=None, changeset_revision=None, user_id=None, rating=None, deleted=False ):
self.repository_id = repository_id
self.changeset_revision = changeset_revision
self.user_id = user_id
self.rating = rating
self.deleted = deleted
class ComponentReview( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'repository_review_id', 'component_id', 'private', 'approved', 'rating', 'deleted' )
dict_element_visible_keys = ( 'id', 'repository_review_id', 'component_id', 'private', 'approved', 'rating', 'deleted' )
approved_states = Bunch( NO='no', YES='yes', NA='not_applicable' )
def __init__( self, repository_review_id=None, component_id=None, comment=None, private=False, approved=False, rating=None, deleted=False ):
self.repository_review_id = repository_review_id
self.component_id = component_id
self.comment = comment
self.private = private
self.approved = approved
self.rating = rating
self.deleted = deleted
class Component( object ):
def __init__( self, name=None, description=None ):
self.name = name
self.description = description
class ItemRatingAssociation( object ):
def __init__( self, id=None, user=None, item=None, rating=0, comment='' ):
self.id = id
self.user = user
self.item = item
self.rating = rating
self.comment = comment
def set_item( self, item ):
""" Set association's item. """
pass
class RepositoryRatingAssociation( ItemRatingAssociation ):
def set_item( self, repository ):
self.repository = repository
class Category( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'name', 'description', 'deleted' )
dict_element_visible_keys = ( 'id', 'name', 'description', 'deleted' )
def __init__( self, name=None, description=None, deleted=False ):
self.name = name
self.description = description
self.deleted = deleted
class RepositoryCategoryAssociation( object ):
def __init__( self, repository=None, category=None ):
self.repository = repository
self.category = category
class Tag( object ):
def __init__( self, id=None, type=None, parent_id=None, name=None ):
self.id = id
self.type = type
self.parent_id = parent_id
self.name = name
def __str__ ( self ):
return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
class ItemTagAssociation( object ):
def __init__( self, id=None, user=None, item_id=None, tag_id=None, user_tname=None, value=None ):
self.id = id
self.user = user
self.item_id = item_id
self.tag_id = tag_id
self.user_tname = user_tname
self.value = None
self.user_value = None
class PostJobAction( object ):
def __init__( self, action_type, workflow_step, output_name = None, action_arguments = None):
self.action_type = action_type
self.output_name = output_name
self.action_arguments = action_arguments
self.workflow_step = workflow_step
class StoredWorkflowAnnotationAssociation( object ):
pass
class WorkflowStepAnnotationAssociation( object ):
pass
class Workflow( object ):
def __init__( self ):
self.user = None
self.name = None
self.has_cycles = None
self.has_errors = None
self.steps = []
class WorkflowStep( object ):
def __init__( self ):
self.id = None
self.type = None
self.name = None
self.tool_id = None
self.tool_inputs = None
self.tool_errors = None
self.position = None
self.input_connections = []
self.config = None
class WorkflowStepConnection( object ):
def __init__( self ):
self.output_step = None
self.output_name = None
self.input_step = None
self.input_name = None
## ---- Utility methods -------------------------------------------------------
def sort_by_attr( seq, attr ):
"""
Sort the sequence of objects by object's attribute
Arguments:
seq - the list or any sequence (including immutable one) of objects to sort.
attr - the name of attribute to sort by
"""
# Use the "Schwartzian transform"
# Create the auxiliary list of tuples where every i-th tuple has form
# (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
# only to provide stable sorting, but mainly to eliminate comparison of objects
# (which can be expensive or prohibited) in case of equal attribute values.
intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
intermed.sort()
return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
def directory_hash_id( id ):
s = str( id )
l = len( s )
# Shortcut -- ids 0-999 go under ../000/
if l < 4:
return [ "000" ]
# Pad with zeros until a multiple of three
padded = ( ( ( 3 - len( s ) ) % 3 ) * "0" ) + s
# Drop the last three digits -- 1000 files per directory
padded = padded[:-3]
# Break into chunks of three
return [ padded[i*3:(i+1)*3] for i in range( len( padded ) // 3 ) ]
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/galaxy/webapps/tool_shed/model/__init__.py
|
Python
|
gpl-3.0
| 18,249
|
[
"Galaxy"
] |
c034bb0a3385f4d531b3cf245f6948f2ca5a8a97de2967000b06ddbb9ea49d38
|
# coding=utf-8
from nose.tools import eq_
from csxj.datasources.parser_tools.utils import extract_plaintext_urls_from_text
class TestPlainTextURLExtractor(object):
def setUp(self):
self.simple_url = 'http://www.foo.com'
# fuck yeah gehol
self.complex_url = 'http://164.15.72.157:8080/Reporting/Individual;Student%20Set%20Groups;id;%23SPLUS35F0F1?&template=Ann%E9e%20d%27%E9tude&weeks=1-14&days=1-6&periods=5-33&width=0&height=0'
self.text = 'This text was written in notepad, hence {0} , fuck you if you like clicking stuff'
self.text_with_urls = """ Visit my website at http://www.example.com, it's awesome!
This is shit: http://en.wikipedia.org/wiki/PC_Tools_(Central_Point_Software)
And this is shit too: http://msdn.microsoft.com/en-us/library/aa752574(VS.85).aspx
My website (http://www.awesomeexample.com) is awesome. How about lastexample.com?
"""
def test_simple_url(self):
""" extract_plaintext_urls_from_text() can extract a simple URL """
text_with_url = self.text.format(self.simple_url)
urls = extract_plaintext_urls_from_text(text_with_url)
eq_(urls, [self.simple_url])
def test_complex_url(self):
""" extract_plaintext_urls_from_text() can extract a complex URL (parameters, port, spaces and semicolons) """
text_with_url = self.text.format(self.complex_url)
urls = extract_plaintext_urls_from_text(text_with_url)
eq_(urls, [self.complex_url])
def test_multiple_urls(self):
""" extract_plaintext_urls_from_text() can extract several URLs from a piece of text"""
text = 'this {0} has {1} many {2} links {3}'
text_with_urls = text.format(self.simple_url, self.complex_url, self.complex_url, self.simple_url)
urls = extract_plaintext_urls_from_text(text_with_urls)
eq_(urls, [self.simple_url, self.complex_url, self.complex_url, self.simple_url])
def test_text_with_urls(self):
""" extract_plaintext_urls_from_text()"""
urls = extract_plaintext_urls_from_text(self.text_with_urls)
eq_(urls, ['http://www.example.com', 'http://en.wikipedia.org/wiki/PC_Tools_(Central_Point_Software)',
'http://msdn.microsoft.com/en-us/library/aa752574(VS.85).aspx', 'http://www.awesomeexample.com',
'lastexample.com'])
def test_no_url(self):
""" extract_plaintext_urls_from_text() returns an empty list if the text contains no URL"""
text = self.text.format('not a url')
urls = extract_plaintext_urls_from_text(text)
eq_(urls, [])
def test_schemeless_url(self):
""" extract_plaintext_urls_from_text() can handle urls with no scheme (e.g. 'www.foo.com') """
url = "www.foo.com"
extracted_urls = extract_plaintext_urls_from_text(url)
eq_([url], extracted_urls, msg=u"(Expected '{0}', got'{1}')".format([url], extracted_urls))
def test_schemeless_no_www_url(self):
""" extract_plaintext_urls_from_text() can handle urls with no scheme, no 'www' prefix (e.g. 'foo.com') """
urls = ["foo.net", "Foo.net"]
for url in urls:
extracted_urls = extract_plaintext_urls_from_text(url)
eq_([url.lower()], extracted_urls, msg=u"Could not extract schemeless url without 'www' prefix (Expected '{0}', got'{1}')".format([url], extracted_urls))
def test_schemeless_subdomain_url(self):
""" extract_plaintext_urls_from_text() can handle urls with no scheme and a subdomain (e.g. 'blog.foo.net') """
url = "blog.foo.net"
extracted_urls = extract_plaintext_urls_from_text(url)
eq_([url], extracted_urls, msg=u"Could not extract schemeless url with subdomain (Expected '{0}', got'{1}')".format([url], extracted_urls))
def test_tinylinks(self):
"""extract_plaintext_urls_from_text() correctly guesses that things like “bit.ly/foo” and “is.gd/foo/” """
url = "bit.ly/foo"
extracted_urls = extract_plaintext_urls_from_text(url)
eq_([url], extracted_urls)
def test_discard_enails(self):
"""extract_plaintext_urls_from_text() ignores email adresses"""
urls = ["blah@foo.com", "@foo.com", "ladh.be@gmail.com"]
for url in urls:
extracted_urls = extract_plaintext_urls_from_text(url)
eq_([], extracted_urls, msg=u"{0} was matched as a url: {1}".format(url, extracted_urls))
|
sevas/csxj-crawler
|
tests/datasources/parser_tools/test_url_extraction.py
|
Python
|
mit
| 4,465
|
[
"VisIt"
] |
91b721690b91a98b8d8922bc4f96b08da5fb4c67ccf4b9118d409a9da32dd15e
|
"""Header value parser implementing various email-related RFC parsing rules.
The parsing methods defined in this module implement various email related
parsing rules. Principal among them is RFC 5322, which is the followon
to RFC 2822 and primarily a clarification of the former. It also implements
RFC 2047 encoded word decoding.
RFC 5322 goes to considerable trouble to maintain backward compatibility with
RFC 822 in the parse phase, while cleaning up the structure on the generation
phase. This parser supports correct RFC 5322 generation by tagging white space
as folding white space only when folding is allowed in the non-obsolete rule
sets. Actually, the parser is even more generous when accepting input than RFC
5322 mandates, following the spirit of Postel's Law, which RFC 5322 encourages.
Where possible deviations from the standard are annotated on the 'defects'
attribute of tokens that deviate.
The general structure of the parser follows RFC 5322, and uses its terminology
where there is a direct correspondence. Where the implementation requires a
somewhat different structure than that used by the formal grammar, new terms
that mimic the closest existing terms are used. Thus, it really helps to have
a copy of RFC 5322 handy when studying this code.
Input to the parser is a string that has already been unfolded according to
RFC 5322 rules. According to the RFC this unfolding is the very first step, and
this parser leaves the unfolding step to a higher level message parser, which
will have already detected the line breaks that need unfolding while
determining the beginning and end of each header.
The output of the parser is a TokenList object, which is a list subclass. A
TokenList is a recursive data structure. The terminal nodes of the structure
are Terminal objects, which are subclasses of str. These do not correspond
directly to terminal objects in the formal grammar, but are instead more
practical higher level combinations of true terminals.
All TokenList and Terminal objects have a 'value' attribute, which produces the
semantically meaningful value of that part of the parse subtree. The value of
all whitespace tokens (no matter how many sub-tokens they may contain) is a
single space, as per the RFC rules. This includes 'CFWS', which is herein
included in the general class of whitespace tokens. There is one exception to
the rule that whitespace tokens are collapsed into single spaces in values: in
the value of a 'bare-quoted-string' (a quoted-string with no leading or
trailing whitespace), any whitespace that appeared between the quotation marks
is preserved in the returned value. Note that in all Terminal strings quoted
pairs are turned into their unquoted values.
All TokenList and Terminal objects also have a string value, which attempts to
be a "canonical" representation of the RFC-compliant form of the substring that
produced the parsed subtree, including minimal use of quoted pair quoting.
Whitespace runs are not collapsed.
Comment tokens also have a 'content' attribute providing the string found
between the parens (including any nested comments) with whitespace preserved.
All TokenList and Terminal objects have a 'defects' attribute which is a
possibly empty list all of the defects found while creating the token. Defects
may appear on any token in the tree, and a composite list of all defects in the
subtree is available through the 'all_defects' attribute of any node. (For
Terminal notes x.defects == x.all_defects.)
Each object in a parse tree is called a 'token', and each has a 'token_type'
attribute that gives the name from the RFC 5322 grammar that it represents.
Not all RFC 5322 nodes are produced, and there is one non-RFC 5322 node that
may be produced: 'ptext'. A 'ptext' is a string of printable ascii characters.
It is returned in place of lists of (ctext/quoted-pair) and
(qtext/quoted-pair).
XXX: provide complete list of token types.
"""
import re
import urllib # For urllib.parse.unquote
from string import hexdigits
from collections import OrderedDict
from operator import itemgetter
from email import _encoded_words as _ew
from email import errors
from email import utils
#
# Useful constants and functions
#
WSP = set(' \t')
CFWS_LEADER = WSP | set('(')
SPECIALS = set(r'()<>@,:;.\"[]')
ATOM_ENDS = SPECIALS | WSP
DOT_ATOM_ENDS = ATOM_ENDS - set('.')
# '.', '"', and '(' do not end phrases in order to support obs-phrase
PHRASE_ENDS = SPECIALS - set('."(')
TSPECIALS = (SPECIALS | set('/?=')) - set('.')
TOKEN_ENDS = TSPECIALS | WSP
ASPECIALS = TSPECIALS | set("*'%")
ATTRIBUTE_ENDS = ASPECIALS | WSP
EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
def quote_string(value):
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
#
# Accumulator for header folding
#
class _Folded:
def __init__(self, maxlen, policy):
self.maxlen = maxlen
self.policy = policy
self.lastlen = 0
self.stickyspace = None
self.firstline = True
self.done = []
self.current = []
def newline(self):
self.done.extend(self.current)
self.done.append(self.policy.linesep)
self.current.clear()
self.lastlen = 0
def finalize(self):
if self.current:
self.newline()
def __str__(self):
return ''.join(self.done)
def append(self, stoken):
self.current.append(stoken)
def append_if_fits(self, token, stoken=None):
if stoken is None:
stoken = str(token)
l = len(stoken)
if self.stickyspace is not None:
stickyspace_len = len(self.stickyspace)
if self.lastlen + stickyspace_len + l <= self.maxlen:
self.current.append(self.stickyspace)
self.lastlen += stickyspace_len
self.current.append(stoken)
self.lastlen += l
self.stickyspace = None
self.firstline = False
return True
if token.has_fws:
ws = token.pop_leading_fws()
if ws is not None:
self.stickyspace += str(ws)
stickyspace_len += len(ws)
token._fold(self)
return True
if stickyspace_len and l + 1 <= self.maxlen:
margin = self.maxlen - l
if 0 < margin < stickyspace_len:
trim = stickyspace_len - margin
self.current.append(self.stickyspace[:trim])
self.stickyspace = self.stickyspace[trim:]
stickyspace_len = trim
self.newline()
self.current.append(self.stickyspace)
self.current.append(stoken)
self.lastlen = l + stickyspace_len
self.stickyspace = None
self.firstline = False
return True
if not self.firstline:
self.newline()
self.current.append(self.stickyspace)
self.current.append(stoken)
self.stickyspace = None
self.firstline = False
return True
if self.lastlen + l <= self.maxlen:
self.current.append(stoken)
self.lastlen += l
return True
if l < self.maxlen:
self.newline()
self.current.append(stoken)
self.lastlen = l
return True
return False
#
# TokenList and its subclasses
#
class TokenList(list):
token_type = None
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.defects = []
def __str__(self):
return ''.join(str(x) for x in self)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__,
super().__repr__())
@property
def value(self):
return ''.join(x.value for x in self if x.value)
@property
def all_defects(self):
return sum((x.all_defects for x in self), self.defects)
#
# Folding API
#
# parts():
#
# return a list of objects that constitute the "higher level syntactic
# objects" specified by the RFC as the best places to fold a header line.
# The returned objects must include leading folding white space, even if
# this means mutating the underlying parse tree of the object. Each object
# is only responsible for returning *its* parts, and should not drill down
# to any lower level except as required to meet the leading folding white
# space constraint.
#
# _fold(folded):
#
# folded: the result accumulator. This is an instance of _Folded.
# (XXX: I haven't finished factoring this out yet, the folding code
# pretty much uses this as a state object.) When the folded.current
# contains as much text as will fit, the _fold method should call
# folded.newline.
# folded.lastlen: the current length of the test stored in folded.current.
# folded.maxlen: The maximum number of characters that may appear on a
# folded line. Differs from the policy setting in that "no limit" is
# represented by +inf, which means it can be used in the trivially
# logical fashion in comparisons.
#
# Currently no subclasses implement parts, and I think this will remain
# true. A subclass only needs to implement _fold when the generic version
# isn't sufficient. _fold will need to be implemented primarily when it is
# possible for encoded words to appear in the specialized token-list, since
# there is no generic algorithm that can know where exactly the encoded
# words are allowed. A _fold implementation is responsible for filling
# lines in the same general way that the top level _fold does. It may, and
# should, call the _fold method of sub-objects in a similar fashion to that
# of the top level _fold.
#
# XXX: I'm hoping it will be possible to factor the existing code further
# to reduce redundancy and make the logic clearer.
@property
def parts(self):
klass = self.__class__
this = []
for token in self:
if token.startswith_fws():
if this:
yield this[0] if len(this)==1 else klass(this)
this.clear()
end_ws = token.pop_trailing_ws()
this.append(token)
if end_ws:
yield klass(this)
this = [end_ws]
if this:
yield this[0] if len(this)==1 else klass(this)
def startswith_fws(self):
return self[0].startswith_fws()
def pop_leading_fws(self):
if self[0].token_type == 'fws':
return self.pop(0)
return self[0].pop_leading_fws()
def pop_trailing_ws(self):
if self[-1].token_type == 'cfws':
return self.pop(-1)
return self[-1].pop_trailing_ws()
@property
def has_fws(self):
for part in self:
if part.has_fws:
return True
return False
def has_leading_comment(self):
return self[0].has_leading_comment()
@property
def comments(self):
comments = []
for token in self:
comments.extend(token.comments)
return comments
def fold(self, *, policy):
# max_line_length 0/None means no limit, ie: infinitely long.
maxlen = policy.max_line_length or float("+inf")
folded = _Folded(maxlen, policy)
self._fold(folded)
folded.finalize()
return str(folded)
def as_encoded_word(self, charset):
# This works only for things returned by 'parts', which include
# the leading fws, if any, that should be used.
res = []
ws = self.pop_leading_fws()
if ws:
res.append(ws)
trailer = self.pop(-1) if self[-1].token_type=='fws' else ''
res.append(_ew.encode(str(self), charset))
res.append(trailer)
return ''.join(res)
def cte_encode(self, charset, policy):
res = []
for part in self:
res.append(part.cte_encode(charset, policy))
return ''.join(res)
def _fold(self, folded):
encoding = 'utf-8' if folded.policy.utf8 else 'ascii'
for part in self.parts:
tstr = str(part)
tlen = len(tstr)
try:
str(part).encode(encoding)
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
# XXX: this should be a policy setting when utf8 is False.
charset = 'utf-8'
tstr = part.cte_encode(charset, folded.policy)
tlen = len(tstr)
if folded.append_if_fits(part, tstr):
continue
# Peel off the leading whitespace if any and make it sticky, to
# avoid infinite recursion.
ws = part.pop_leading_fws()
if ws is not None:
folded.stickyspace = str(ws)
if folded.append_if_fits(part):
continue
if part.has_fws:
part._fold(folded)
continue
# There are no fold points in this one; it is too long for a single
# line and can't be split...we just have to put it on its own line.
folded.append(tstr)
folded.newline()
def pprint(self, indent=''):
print('\n'.join(self._pp(indent='')))
def ppstr(self, indent=''):
return '\n'.join(self._pp(indent=''))
def _pp(self, indent=''):
yield '{}{}/{}('.format(
indent,
self.__class__.__name__,
self.token_type)
for token in self:
if not hasattr(token, '_pp'):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
extra = ''
yield '{}){}'.format(indent, extra)
class WhiteSpaceTokenList(TokenList):
@property
def value(self):
return ' '
@property
def comments(self):
return [x.content for x in self if x.token_type=='comment']
class UnstructuredTokenList(TokenList):
token_type = 'unstructured'
def _fold(self, folded):
last_ew = None
encoding = 'utf-8' if folded.policy.utf8 else 'ascii'
for part in self.parts:
tstr = str(part)
is_ew = False
try:
str(part).encode(encoding)
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
charset = 'utf-8'
if last_ew is not None:
# We've already done an EW, combine this one with it
# if there's room.
chunk = get_unstructured(
''.join(folded.current[last_ew:]+[tstr])).as_encoded_word(charset)
oldlastlen = sum(len(x) for x in folded.current[:last_ew])
schunk = str(chunk)
lchunk = len(schunk)
if oldlastlen + lchunk <= folded.maxlen:
del folded.current[last_ew:]
folded.append(schunk)
folded.lastlen = oldlastlen + lchunk
continue
tstr = part.as_encoded_word(charset)
is_ew = True
if folded.append_if_fits(part, tstr):
if is_ew:
last_ew = len(folded.current) - 1
continue
if is_ew or last_ew:
# It's too big to fit on the line, but since we've
# got encoded words we can use encoded word folding.
part._fold_as_ew(folded)
continue
# Peel off the leading whitespace if any and make it sticky, to
# avoid infinite recursion.
ws = part.pop_leading_fws()
if ws is not None:
folded.stickyspace = str(ws)
if folded.append_if_fits(part):
continue
if part.has_fws:
part._fold(folded)
continue
# It can't be split...we just have to put it on its own line.
folded.append(tstr)
folded.newline()
last_ew = None
def cte_encode(self, charset, policy):
res = []
last_ew = None
for part in self:
spart = str(part)
try:
spart.encode('us-ascii')
res.append(spart)
except UnicodeEncodeError:
if last_ew is None:
res.append(part.cte_encode(charset, policy))
last_ew = len(res)
else:
tl = get_unstructured(''.join(res[last_ew:] + [spart]))
res.append(tl.as_encoded_word(charset))
return ''.join(res)
class Phrase(TokenList):
token_type = 'phrase'
def _fold(self, folded):
# As with Unstructured, we can have pure ASCII with or without
# surrogateescape encoded bytes, or we could have unicode. But this
# case is more complicated, since we have to deal with the various
# sub-token types and how they can be composed in the face of
# unicode-that-needs-CTE-encoding, and the fact that if a token a
# comment that becomes a barrier across which we can't compose encoded
# words.
last_ew = None
encoding = 'utf-8' if folded.policy.utf8 else 'ascii'
for part in self.parts:
tstr = str(part)
tlen = len(tstr)
has_ew = False
try:
str(part).encode(encoding)
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
charset = 'utf-8'
if last_ew is not None and not part.has_leading_comment():
# We've already done an EW, let's see if we can combine
# this one with it. The last_ew logic ensures that all we
# have at this point is atoms, no comments or quoted
# strings. So we can treat the text between the last
# encoded word and the content of this token as
# unstructured text, and things will work correctly. But
# we have to strip off any trailing comment on this token
# first, and if it is a quoted string we have to pull out
# the content (we're encoding it, so it no longer needs to
# be quoted).
if part[-1].token_type == 'cfws' and part.comments:
remainder = part.pop(-1)
else:
remainder = ''
for i, token in enumerate(part):
if token.token_type == 'bare-quoted-string':
part[i] = UnstructuredTokenList(token[:])
chunk = get_unstructured(
''.join(folded.current[last_ew:]+[tstr])).as_encoded_word(charset)
schunk = str(chunk)
lchunk = len(schunk)
if last_ew + lchunk <= folded.maxlen:
del folded.current[last_ew:]
folded.append(schunk)
folded.lastlen = sum(len(x) for x in folded.current)
continue
tstr = part.as_encoded_word(charset)
tlen = len(tstr)
has_ew = True
if folded.append_if_fits(part, tstr):
if has_ew and not part.comments:
last_ew = len(folded.current) - 1
elif part.comments or part.token_type == 'quoted-string':
# If a comment is involved we can't combine EWs. And if a
# quoted string is involved, it's not worth the effort to
# try to combine them.
last_ew = None
continue
part._fold(folded)
def cte_encode(self, charset, policy):
res = []
last_ew = None
is_ew = False
for part in self:
spart = str(part)
try:
spart.encode('us-ascii')
res.append(spart)
except UnicodeEncodeError:
is_ew = True
if last_ew is None:
if not part.comments:
last_ew = len(res)
res.append(part.cte_encode(charset, policy))
elif not part.has_leading_comment():
if part[-1].token_type == 'cfws' and part.comments:
remainder = part.pop(-1)
else:
remainder = ''
for i, token in enumerate(part):
if token.token_type == 'bare-quoted-string':
part[i] = UnstructuredTokenList(token[:])
tl = get_unstructured(''.join(res[last_ew:] + [spart]))
res[last_ew:] = [tl.as_encoded_word(charset)]
if part.comments or (not is_ew and part.token_type == 'quoted-string'):
last_ew = None
return ''.join(res)
class Word(TokenList):
token_type = 'word'
class CFWSList(WhiteSpaceTokenList):
token_type = 'cfws'
def has_leading_comment(self):
return bool(self.comments)
class Atom(TokenList):
token_type = 'atom'
class Token(TokenList):
token_type = 'token'
class EncodedWord(TokenList):
token_type = 'encoded-word'
cte = None
charset = None
lang = None
@property
def encoded(self):
if self.cte is not None:
return self.cte
_ew.encode(str(self), self.charset)
class QuotedString(TokenList):
token_type = 'quoted-string'
@property
def content(self):
for x in self:
if x.token_type == 'bare-quoted-string':
return x.value
@property
def quoted_value(self):
res = []
for x in self:
if x.token_type == 'bare-quoted-string':
res.append(str(x))
else:
res.append(x.value)
return ''.join(res)
@property
def stripped_value(self):
for token in self:
if token.token_type == 'bare-quoted-string':
return token.value
class BareQuotedString(QuotedString):
token_type = 'bare-quoted-string'
def __str__(self):
return quote_string(''.join(str(x) for x in self))
@property
def value(self):
return ''.join(str(x) for x in self)
class Comment(WhiteSpaceTokenList):
token_type = 'comment'
def __str__(self):
return ''.join(sum([
["("],
[self.quote(x) for x in self],
[")"],
], []))
def quote(self, value):
if value.token_type == 'comment':
return str(value)
return str(value).replace('\\', '\\\\').replace(
'(', '\(').replace(
')', '\)')
@property
def content(self):
return ''.join(str(x) for x in self)
@property
def comments(self):
return [self.content]
class AddressList(TokenList):
token_type = 'address-list'
@property
def addresses(self):
return [x for x in self if x.token_type=='address']
@property
def mailboxes(self):
return sum((x.mailboxes
for x in self if x.token_type=='address'), [])
@property
def all_mailboxes(self):
return sum((x.all_mailboxes
for x in self if x.token_type=='address'), [])
class Address(TokenList):
token_type = 'address'
@property
def display_name(self):
if self[0].token_type == 'group':
return self[0].display_name
@property
def mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return [self[0]]
return self[0].all_mailboxes
class MailboxList(TokenList):
token_type = 'mailbox-list'
@property
def mailboxes(self):
return [x for x in self if x.token_type=='mailbox']
@property
def all_mailboxes(self):
return [x for x in self
if x.token_type in ('mailbox', 'invalid-mailbox')]
class GroupList(TokenList):
token_type = 'group-list'
@property
def mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].all_mailboxes
class Group(TokenList):
token_type = "group"
@property
def mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].mailboxes
@property
def all_mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].all_mailboxes
@property
def display_name(self):
return self[0].display_name
class NameAddr(TokenList):
token_type = 'name-addr'
@property
def display_name(self):
if len(self) == 1:
return None
return self[0].display_name
@property
def local_part(self):
return self[-1].local_part
@property
def domain(self):
return self[-1].domain
@property
def route(self):
return self[-1].route
@property
def addr_spec(self):
return self[-1].addr_spec
class AngleAddr(TokenList):
token_type = 'angle-addr'
@property
def local_part(self):
for x in self:
if x.token_type == 'addr-spec':
return x.local_part
@property
def domain(self):
for x in self:
if x.token_type == 'addr-spec':
return x.domain
@property
def route(self):
for x in self:
if x.token_type == 'obs-route':
return x.domains
@property
def addr_spec(self):
for x in self:
if x.token_type == 'addr-spec':
return x.addr_spec
else:
return '<>'
class ObsRoute(TokenList):
token_type = 'obs-route'
@property
def domains(self):
return [x.domain for x in self if x.token_type == 'domain']
class Mailbox(TokenList):
token_type = 'mailbox'
@property
def display_name(self):
if self[0].token_type == 'name-addr':
return self[0].display_name
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
return self[0].domain
@property
def route(self):
if self[0].token_type == 'name-addr':
return self[0].route
@property
def addr_spec(self):
return self[0].addr_spec
class InvalidMailbox(TokenList):
token_type = 'invalid-mailbox'
@property
def display_name(self):
return None
local_part = domain = route = addr_spec = display_name
class Domain(TokenList):
token_type = 'domain'
@property
def domain(self):
return ''.join(super().value.split())
class DotAtom(TokenList):
token_type = 'dot-atom'
class DotAtomText(TokenList):
token_type = 'dot-atom-text'
class AddrSpec(TokenList):
token_type = 'addr-spec'
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
if len(self) < 3:
return None
return self[-1].domain
@property
def value(self):
if len(self) < 3:
return self[0].value
return self[0].value.rstrip()+self[1].value+self[2].value.lstrip()
@property
def addr_spec(self):
nameset = set(self.local_part)
if len(nameset) > len(nameset-DOT_ATOM_ENDS):
lp = quote_string(self.local_part)
else:
lp = self.local_part
if self.domain is not None:
return lp + '@' + self.domain
return lp
class ObsLocalPart(TokenList):
token_type = 'obs-local-part'
class DisplayName(Phrase):
token_type = 'display-name'
@property
def display_name(self):
res = TokenList(self)
if res[0].token_type == 'cfws':
res.pop(0)
else:
if res[0][0].token_type == 'cfws':
res[0] = TokenList(res[0][1:])
if res[-1].token_type == 'cfws':
res.pop()
else:
if res[-1][-1].token_type == 'cfws':
res[-1] = TokenList(res[-1][:-1])
return res.value
@property
def value(self):
quote = False
if self.defects:
quote = True
else:
for x in self:
if x.token_type == 'quoted-string':
quote = True
if quote:
pre = post = ''
if self[0].token_type=='cfws' or self[0][0].token_type=='cfws':
pre = ' '
if self[-1].token_type=='cfws' or self[-1][-1].token_type=='cfws':
post = ' '
return pre+quote_string(self.display_name)+post
else:
return super().value
class LocalPart(TokenList):
token_type = 'local-part'
@property
def value(self):
if self[0].token_type == "quoted-string":
return self[0].quoted_value
else:
return self[0].value
@property
def local_part(self):
# Strip whitespace from front, back, and around dots.
res = [DOT]
last = DOT
last_is_tl = False
for tok in self[0] + [DOT]:
if tok.token_type == 'cfws':
continue
if (last_is_tl and tok.token_type == 'dot' and
last[-1].token_type == 'cfws'):
res[-1] = TokenList(last[:-1])
is_tl = isinstance(tok, TokenList)
if (is_tl and last.token_type == 'dot' and
tok[0].token_type == 'cfws'):
res.append(TokenList(tok[1:]))
else:
res.append(tok)
last = res[-1]
last_is_tl = is_tl
res = TokenList(res[1:-1])
return res.value
class DomainLiteral(TokenList):
token_type = 'domain-literal'
@property
def domain(self):
return ''.join(super().value.split())
@property
def ip(self):
for x in self:
if x.token_type == 'ptext':
return x.value
class MIMEVersion(TokenList):
token_type = 'mime-version'
major = None
minor = None
class Parameter(TokenList):
token_type = 'parameter'
sectioned = False
extended = False
charset = 'us-ascii'
@property
def section_number(self):
# Because the first token, the attribute (name) eats CFWS, the second
# token is always the section if there is one.
return self[1].number if self.sectioned else 0
@property
def param_value(self):
# This is part of the "handle quoted extended parameters" hack.
for token in self:
if token.token_type == 'value':
return token.stripped_value
if token.token_type == 'quoted-string':
for token in token:
if token.token_type == 'bare-quoted-string':
for token in token:
if token.token_type == 'value':
return token.stripped_value
return ''
class InvalidParameter(Parameter):
token_type = 'invalid-parameter'
class Attribute(TokenList):
token_type = 'attribute'
@property
def stripped_value(self):
for token in self:
if token.token_type.endswith('attrtext'):
return token.value
class Section(TokenList):
token_type = 'section'
number = None
class Value(TokenList):
token_type = 'value'
@property
def stripped_value(self):
token = self[0]
if token.token_type == 'cfws':
token = self[1]
if token.token_type.endswith(
('quoted-string', 'attribute', 'extended-attribute')):
return token.stripped_value
return self.value
class MimeParameters(TokenList):
token_type = 'mime-parameters'
@property
def params(self):
# The RFC specifically states that the ordering of parameters is not
# guaranteed and may be reordered by the transport layer. So we have
# to assume the RFC 2231 pieces can come in any order. However, we
# output them in the order that we first see a given name, which gives
# us a stable __str__.
params = OrderedDict()
for token in self:
if not token.token_type.endswith('parameter'):
continue
if token[0].token_type != 'attribute':
continue
name = token[0].value.strip()
if name not in params:
params[name] = []
params[name].append((token.section_number, token))
for name, parts in params.items():
parts = sorted(parts, key=itemgetter(0))
first_param = parts[0][1]
charset = first_param.charset
# Our arbitrary error recovery is to ignore duplicate parameters,
# to use appearance order if there are duplicate rfc 2231 parts,
# and to ignore gaps. This mimics the error recovery of get_param.
if not first_param.extended and len(parts) > 1:
if parts[1][0] == 0:
parts[1][1].defects.append(errors.InvalidHeaderDefect(
'duplicate parameter name; duplicate(s) ignored'))
parts = parts[:1]
# Else assume the *0* was missing...note that this is different
# from get_param, but we registered a defect for this earlier.
value_parts = []
i = 0
for section_number, param in parts:
if section_number != i:
# We could get fancier here and look for a complete
# duplicate extended parameter and ignore the second one
# seen. But we're not doing that. The old code didn't.
if not param.extended:
param.defects.append(errors.InvalidHeaderDefect(
'duplicate parameter name; duplicate ignored'))
continue
else:
param.defects.append(errors.InvalidHeaderDefect(
"inconsistent RFC2231 parameter numbering"))
i += 1
value = param.param_value
if param.extended:
try:
value = urllib.parse.unquote_to_bytes(value)
except UnicodeEncodeError:
# source had surrogate escaped bytes. What we do now
# is a bit of an open question. I'm not sure this is
# the best choice, but it is what the old algorithm did
value = urllib.parse.unquote(value, encoding='latin-1')
else:
try:
value = value.decode(charset, 'surrogateescape')
except LookupError:
# XXX: there should really be a custom defect for
# unknown character set to make it easy to find,
# because otherwise unknown charset is a silent
# failure.
value = value.decode('us-ascii', 'surrogateescape')
if utils._has_surrogates(value):
param.defects.append(errors.UndecodableBytesDefect())
value_parts.append(value)
value = ''.join(value_parts)
yield name, value
def __str__(self):
params = []
for name, value in self.params:
if value:
params.append('{}={}'.format(name, quote_string(value)))
else:
params.append(name)
params = '; '.join(params)
return ' ' + params if params else ''
class ParameterizedHeaderValue(TokenList):
@property
def params(self):
for token in reversed(self):
if token.token_type == 'mime-parameters':
return token.params
return {}
@property
def parts(self):
if self and self[-1].token_type == 'mime-parameters':
# We don't want to start a new line if all of the params don't fit
# after the value, so unwrap the parameter list.
return TokenList(self[:-1] + self[-1])
return TokenList(self).parts
class ContentType(ParameterizedHeaderValue):
token_type = 'content-type'
maintype = 'text'
subtype = 'plain'
class ContentDisposition(ParameterizedHeaderValue):
token_type = 'content-disposition'
content_disposition = None
class ContentTransferEncoding(TokenList):
token_type = 'content-transfer-encoding'
cte = '7bit'
class HeaderLabel(TokenList):
token_type = 'header-label'
class Header(TokenList):
token_type = 'header'
def _fold(self, folded):
folded.append(str(self.pop(0)))
folded.lastlen = len(folded.current[0])
# The first line of the header is different from all others: we don't
# want to start a new object on a new line if it has any fold points in
# it that would allow part of it to be on the first header line.
# Further, if the first fold point would fit on the new line, we want
# to do that, but if it doesn't we want to put it on the first line.
# Folded supports this via the stickyspace attribute. If this
# attribute is not None, it does the special handling.
folded.stickyspace = str(self.pop(0)) if self[0].token_type == 'cfws' else ''
rest = self.pop(0)
if self:
raise ValueError("Malformed Header token list")
rest._fold(folded)
#
# Terminal classes and instances
#
class Terminal(str):
def __new__(cls, value, token_type):
self = super().__new__(cls, value)
self.token_type = token_type
self.defects = []
return self
def __repr__(self):
return "{}({})".format(self.__class__.__name__, super().__repr__())
@property
def all_defects(self):
return list(self.defects)
def _pp(self, indent=''):
return ["{}{}/{}({}){}".format(
indent,
self.__class__.__name__,
self.token_type,
super().__repr__(),
'' if not self.defects else ' {}'.format(self.defects),
)]
def cte_encode(self, charset, policy):
value = str(self)
try:
value.encode('us-ascii')
return value
except UnicodeEncodeError:
return _ew.encode(value, charset)
def pop_trailing_ws(self):
# This terminates the recursion.
return None
def pop_leading_fws(self):
# This terminates the recursion.
return None
@property
def comments(self):
return []
def has_leading_comment(self):
return False
def __getnewargs__(self):
return(str(self), self.token_type)
class WhiteSpaceTerminal(Terminal):
@property
def value(self):
return ' '
def startswith_fws(self):
return True
has_fws = True
class ValueTerminal(Terminal):
@property
def value(self):
return self
def startswith_fws(self):
return False
has_fws = False
def as_encoded_word(self, charset):
return _ew.encode(str(self), charset)
class EWWhiteSpaceTerminal(WhiteSpaceTerminal):
@property
def value(self):
return ''
@property
def encoded(self):
return self[:]
def __str__(self):
return ''
has_fws = True
# XXX these need to become classes and used as instances so
# that a program can't change them in a parse tree and screw
# up other parse trees. Maybe should have tests for that, too.
DOT = ValueTerminal('.', 'dot')
ListSeparator = ValueTerminal(',', 'list-separator')
RouteComponentMarker = ValueTerminal('@', 'route-component-marker')
#
# Parser
#
# Parse strings according to RFC822/2047/2822/5322 rules.
#
# This is a stateless parser. Each get_XXX function accepts a string and
# returns either a Terminal or a TokenList representing the RFC object named
# by the method and a string containing the remaining unparsed characters
# from the input. Thus a parser method consumes the next syntactic construct
# of a given type and returns a token representing the construct plus the
# unparsed remainder of the input string.
#
# For example, if the first element of a structured header is a 'phrase',
# then:
#
# phrase, value = get_phrase(value)
#
# returns the complete phrase from the start of the string value, plus any
# characters left in the string after the phrase is removed.
_wsp_splitter = re.compile(r'([{}]+)'.format(''.join(WSP))).split
_non_atom_end_matcher = re.compile(r"[^{}]+".format(
''.join(ATOM_ENDS).replace('\\','\\\\').replace(']','\]'))).match
_non_printable_finder = re.compile(r"[\x00-\x20\x7F]").findall
_non_token_end_matcher = re.compile(r"[^{}]+".format(
''.join(TOKEN_ENDS).replace('\\','\\\\').replace(']','\]'))).match
_non_attribute_end_matcher = re.compile(r"[^{}]+".format(
''.join(ATTRIBUTE_ENDS).replace('\\','\\\\').replace(']','\]'))).match
_non_extended_attribute_end_matcher = re.compile(r"[^{}]+".format(
''.join(EXTENDED_ATTRIBUTE_ENDS).replace(
'\\','\\\\').replace(']','\]'))).match
def _validate_xtext(xtext):
"""If input token contains ASCII non-printables, register a defect."""
non_printables = _non_printable_finder(xtext)
if non_printables:
xtext.defects.append(errors.NonPrintableDefect(non_printables))
if utils._has_surrogates(xtext):
xtext.defects.append(errors.UndecodableBytesDefect(
"Non-ASCII characters found in header token"))
def _get_ptext_to_endchars(value, endchars):
"""Scan printables/quoted-pairs until endchars and return unquoted ptext.
This function turns a run of qcontent, ccontent-without-comments, or
dtext-with-quoted-printables into a single string by unquoting any
quoted printables. It returns the string, the remaining value, and
a flag that is True iff there were any quoted printables decoded.
"""
fragment, *remainder = _wsp_splitter(value, 1)
vchars = []
escape = False
had_qp = False
for pos in range(len(fragment)):
if fragment[pos] == '\\':
if escape:
escape = False
had_qp = True
else:
escape = True
continue
if escape:
escape = False
elif fragment[pos] in endchars:
break
vchars.append(fragment[pos])
else:
pos = pos + 1
return ''.join(vchars), ''.join([fragment[pos:]] + remainder), had_qp
def get_fws(value):
"""FWS = 1*WSP
This isn't the RFC definition. We're using fws to represent tokens where
folding can be done, but when we are parsing the *un*folding has already
been done so we don't need to watch out for CRLF.
"""
newvalue = value.lstrip()
fws = WhiteSpaceTerminal(value[:len(value)-len(newvalue)], 'fws')
return fws, newvalue
def get_encoded_word(value):
""" encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
"""
ew = EncodedWord()
if not value.startswith('=?'):
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
tok, *remainder = value[2:].split('?=', 1)
if tok == value[2:]:
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
remstr = ''.join(remainder)
if len(remstr) > 1 and remstr[0] in hexdigits and remstr[1] in hexdigits:
# The ? after the CTE was followed by an encoded word escape (=XX).
rest, *remainder = remstr.split('?=', 1)
tok = tok + '?=' + rest
if len(tok.split()) > 1:
ew.defects.append(errors.InvalidHeaderDefect(
"whitespace inside encoded word"))
ew.cte = value
value = ''.join(remainder)
try:
text, charset, lang, defects = _ew.decode('=?' + tok + '?=')
except ValueError:
raise errors.HeaderParseError(
"encoded word format invalid: '{}'".format(ew.cte))
ew.charset = charset
ew.lang = lang
ew.defects.extend(defects)
while text:
if text[0] in WSP:
token, text = get_fws(text)
ew.append(token)
continue
chars, *remainder = _wsp_splitter(text, 1)
vtext = ValueTerminal(chars, 'vtext')
_validate_xtext(vtext)
ew.append(vtext)
text = ''.join(remainder)
return ew, value
def get_unstructured(value):
"""unstructured = (*([FWS] vchar) *WSP) / obs-unstruct
obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS)
obs-utext = %d0 / obs-NO-WS-CTL / LF / CR
obs-NO-WS-CTL is control characters except WSP/CR/LF.
So, basically, we have printable runs, plus control characters or nulls in
the obsolete syntax, separated by whitespace. Since RFC 2047 uses the
obsolete syntax in its specification, but requires whitespace on either
side of the encoded words, I can see no reason to need to separate the
non-printable-non-whitespace from the printable runs if they occur, so we
parse this into xtext tokens separated by WSP tokens.
Because an 'unstructured' value must by definition constitute the entire
value, this 'get' routine does not return a remaining value, only the
parsed TokenList.
"""
# XXX: but what about bare CR and LF? They might signal the start or
# end of an encoded word. YAGNI for now, since our current parsers
# will never send us strings with bare CR or LF.
unstructured = UnstructuredTokenList()
while value:
if value[0] in WSP:
token, value = get_fws(value)
unstructured.append(token)
continue
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: Need to figure out how to register defects when
# appropriate here.
pass
else:
have_ws = True
if len(unstructured) > 0:
if unstructured[-1].token_type != 'fws':
unstructured.defects.append(errors.InvalidHeaderDefect(
"missing whitespace before encoded word"))
have_ws = False
if have_ws and len(unstructured) > 1:
if unstructured[-2].token_type == 'encoded-word':
unstructured[-1] = EWWhiteSpaceTerminal(
unstructured[-1], 'fws')
unstructured.append(token)
continue
tok, *remainder = _wsp_splitter(value, 1)
vtext = ValueTerminal(tok, 'vtext')
_validate_xtext(vtext)
unstructured.append(vtext)
value = ''.join(remainder)
return unstructured
def get_qp_ctext(value):
"""ctext = <printable ascii except \ ( )>
This is not the RFC ctext, since we are handling nested comments in comment
and unquoting quoted-pairs here. We allow anything except the '()'
characters, but if we find any ASCII other than the RFC defined printable
ASCII, a NonPrintableDefect is added to the token's defects list. Since
quoted pairs are converted to their unquoted values, what is returned is
a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value
is ' '.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '()')
ptext = WhiteSpaceTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_qcontent(value):
"""qcontent = qtext / quoted-pair
We allow anything except the DQUOTE character, but if we find any ASCII
other than the RFC defined printable ASCII, a NonPrintableDefect is
added to the token's defects list. Any quoted pairs are converted to their
unquoted values, so what is returned is a 'ptext' token. In this case it
is a ValueTerminal.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '"')
ptext = ValueTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_atext(value):
"""atext = <matches _atext_matcher>
We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to
the token's defects list if we find non-atext characters.
"""
m = _non_atom_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected atext but found '{}'".format(value))
atext = m.group()
value = value[len(atext):]
atext = ValueTerminal(atext, 'atext')
_validate_xtext(atext)
return atext, value
def get_bare_quoted_string(value):
"""bare-quoted-string = DQUOTE *([FWS] qcontent) [FWS] DQUOTE
A quoted-string without the leading or trailing white space. Its
value is the text between the quote marks, with whitespace
preserved and quoted pairs decoded.
"""
if value[0] != '"':
raise errors.HeaderParseError(
"expected '\"' but found '{}'".format(value))
bare_quoted_string = BareQuotedString()
value = value[1:]
while value and value[0] != '"':
if value[0] in WSP:
token, value = get_fws(value)
elif value[:2] == '=?':
try:
token, value = get_encoded_word(value)
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"encoded word inside quoted string"))
except errors.HeaderParseError:
token, value = get_qcontent(value)
else:
token, value = get_qcontent(value)
bare_quoted_string.append(token)
if not value:
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"end of header inside quoted string"))
return bare_quoted_string, value
return bare_quoted_string, value[1:]
def get_comment(value):
"""comment = "(" *([FWS] ccontent) [FWS] ")"
ccontent = ctext / quoted-pair / comment
We handle nested comments here, and quoted-pair in our qp-ctext routine.
"""
if value and value[0] != '(':
raise errors.HeaderParseError(
"expected '(' but found '{}'".format(value))
comment = Comment()
value = value[1:]
while value and value[0] != ")":
if value[0] in WSP:
token, value = get_fws(value)
elif value[0] == '(':
token, value = get_comment(value)
else:
token, value = get_qp_ctext(value)
comment.append(token)
if not value:
comment.defects.append(errors.InvalidHeaderDefect(
"end of header inside comment"))
return comment, value
return comment, value[1:]
def get_cfws(value):
"""CFWS = (1*([FWS] comment) [FWS]) / FWS
"""
cfws = CFWSList()
while value and value[0] in CFWS_LEADER:
if value[0] in WSP:
token, value = get_fws(value)
else:
token, value = get_comment(value)
cfws.append(token)
return cfws, value
def get_quoted_string(value):
"""quoted-string = [CFWS] <bare-quoted-string> [CFWS]
'bare-quoted-string' is an intermediate class defined by this
parser and not by the RFC grammar. It is the quoted string
without any attached CFWS.
"""
quoted_string = QuotedString()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
token, value = get_bare_quoted_string(value)
quoted_string.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
return quoted_string, value
def get_atom(value):
"""atom = [CFWS] 1*atext [CFWS]
An atom could be an rfc2047 encoded word.
"""
atom = Atom()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
if value and value[0] in ATOM_ENDS:
raise errors.HeaderParseError(
"expected atom but found '{}'".format(value))
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_atext(value)
else:
token, value = get_atext(value)
atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
return atom, value
def get_dot_atom_text(value):
""" dot-text = 1*atext *("." 1*atext)
"""
dot_atom_text = DotAtomText()
if not value or value[0] in ATOM_ENDS:
raise errors.HeaderParseError("expected atom at a start of "
"dot-atom-text but found '{}'".format(value))
while value and value[0] not in ATOM_ENDS:
token, value = get_atext(value)
dot_atom_text.append(token)
if value and value[0] == '.':
dot_atom_text.append(DOT)
value = value[1:]
if dot_atom_text[-1] is DOT:
raise errors.HeaderParseError("expected atom at end of dot-atom-text "
"but found '{}'".format('.'+value))
return dot_atom_text, value
def get_dot_atom(value):
""" dot-atom = [CFWS] dot-atom-text [CFWS]
Any place we can have a dot atom, we could instead have an rfc2047 encoded
word.
"""
dot_atom = DotAtom()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_dot_atom_text(value)
else:
token, value = get_dot_atom_text(value)
dot_atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
return dot_atom, value
def get_word(value):
"""word = atom / quoted-string
Either atom or quoted-string may start with CFWS. We have to peel off this
CFWS first to determine which type of word to parse. Afterward we splice
the leading CFWS, if any, into the parsed sub-token.
If neither an atom or a quoted-string is found before the next special, a
HeaderParseError is raised.
The token returned is either an Atom or a QuotedString, as appropriate.
This means the 'word' level of the formal grammar is not represented in the
parse tree; this is because having that extra layer when manipulating the
parse tree is more confusing than it is helpful.
"""
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
else:
leader = None
if value[0]=='"':
token, value = get_quoted_string(value)
elif value[0] in SPECIALS:
raise errors.HeaderParseError("Expected 'atom' or 'quoted-string' "
"but found '{}'".format(value))
else:
token, value = get_atom(value)
if leader is not None:
token[:0] = [leader]
return token, value
def get_phrase(value):
""" phrase = 1*word / obs-phrase
obs-phrase = word *(word / "." / CFWS)
This means a phrase can be a sequence of words, periods, and CFWS in any
order as long as it starts with at least one word. If anything other than
words is detected, an ObsoleteHeaderDefect is added to the token's defect
list. We also accept a phrase that starts with CFWS followed by a dot;
this is registered as an InvalidHeaderDefect, since it is not supported by
even the obsolete grammar.
"""
phrase = Phrase()
try:
token, value = get_word(value)
phrase.append(token)
except errors.HeaderParseError:
phrase.defects.append(errors.InvalidHeaderDefect(
"phrase does not start with word"))
while value and value[0] not in PHRASE_ENDS:
if value[0]=='.':
phrase.append(DOT)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"period in 'phrase'"))
value = value[1:]
else:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"comment found without atom"))
else:
raise
phrase.append(token)
return phrase, value
def get_local_part(value):
""" local-part = dot-atom / quoted-string / obs-local-part
"""
local_part = LocalPart()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected local-part but found '{}'".format(value))
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] != '\\' and value[0] in PHRASE_ENDS:
raise
token = TokenList()
if leader is not None:
token[:0] = [leader]
local_part.append(token)
if value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
obs_local_part, value = get_obs_local_part(str(local_part) + value)
if obs_local_part.token_type == 'invalid-obs-local-part':
local_part.defects.append(errors.InvalidHeaderDefect(
"local-part is not dot-atom, quoted-string, or obs-local-part"))
else:
local_part.defects.append(errors.ObsoleteHeaderDefect(
"local-part is not a dot-atom (contains CFWS)"))
local_part[0] = obs_local_part
try:
local_part.value.encode('ascii')
except UnicodeEncodeError:
local_part.defects.append(errors.NonASCIILocalPartDefect(
"local-part contains non-ASCII characters)"))
return local_part, value
def get_obs_local_part(value):
""" obs-local-part = word *("." word)
"""
obs_local_part = ObsLocalPart()
last_non_ws_was_dot = False
while value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
if value[0] == '.':
if last_non_ws_was_dot:
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"invalid repeated '.'"))
obs_local_part.append(DOT)
last_non_ws_was_dot = True
value = value[1:]
continue
elif value[0]=='\\':
obs_local_part.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"'\\' character outside of quoted-string/ccontent"))
last_non_ws_was_dot = False
continue
if obs_local_part and obs_local_part[-1].token_type != 'dot':
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"missing '.' between words"))
try:
token, value = get_word(value)
last_non_ws_was_dot = False
except errors.HeaderParseError:
if value[0] not in CFWS_LEADER:
raise
token, value = get_cfws(value)
obs_local_part.append(token)
if (obs_local_part[0].token_type == 'dot' or
obs_local_part[0].token_type=='cfws' and
obs_local_part[1].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid leading '.' in local part"))
if (obs_local_part[-1].token_type == 'dot' or
obs_local_part[-1].token_type=='cfws' and
obs_local_part[-2].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid trailing '.' in local part"))
if obs_local_part.defects:
obs_local_part.token_type = 'invalid-obs-local-part'
return obs_local_part, value
def get_dtext(value):
""" dtext = <printable ascii except \ [ ]> / obs-dtext
obs-dtext = obs-NO-WS-CTL / quoted-pair
We allow anything except the excluded characters, but if we find any
ASCII other than the RFC defined printable ASCII, a NonPrintableDefect is
added to the token's defects list. Quoted pairs are converted to their
unquoted values, so what is returned is a ptext token, in this case a
ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is
added to the returned token's defect list.
"""
ptext, value, had_qp = _get_ptext_to_endchars(value, '[]')
ptext = ValueTerminal(ptext, 'ptext')
if had_qp:
ptext.defects.append(errors.ObsoleteHeaderDefect(
"quoted printable found in domain-literal"))
_validate_xtext(ptext)
return ptext, value
def _check_for_early_dl_end(value, domain_literal):
if value:
return False
domain_literal.append(errors.InvalidHeaderDefect(
"end of input inside domain-literal"))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
return True
def get_domain_literal(value):
""" domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
"""
domain_literal = DomainLiteral()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
if not value:
raise errors.HeaderParseError("expected domain-literal")
if value[0] != '[':
raise errors.HeaderParseError("expected '[' at start of domain-literal "
"but found '{}'".format(value))
value = value[1:]
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
domain_literal.append(ValueTerminal('[', 'domain-literal-start'))
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
token, value = get_dtext(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] != ']':
raise errors.HeaderParseError("expected ']' at end of domain-literal "
"but found '{}'".format(value))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
return domain_literal, value
def get_domain(value):
""" domain = dot-atom / domain-literal / obs-domain
obs-domain = atom *("." atom))
"""
domain = Domain()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected domain but found '{}'".format(value))
if value[0] == '[':
token, value = get_domain_literal(value)
if leader is not None:
token[:0] = [leader]
domain.append(token)
return domain, value
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
token, value = get_atom(value)
if leader is not None:
token[:0] = [leader]
domain.append(token)
if value and value[0] == '.':
domain.defects.append(errors.ObsoleteHeaderDefect(
"domain is not a dot-atom (contains CFWS)"))
if domain[0].token_type == 'dot-atom':
domain[:] = domain[0]
while value and value[0] == '.':
domain.append(DOT)
token, value = get_atom(value[1:])
domain.append(token)
return domain, value
def get_addr_spec(value):
""" addr-spec = local-part "@" domain
"""
addr_spec = AddrSpec()
token, value = get_local_part(value)
addr_spec.append(token)
if not value or value[0] != '@':
addr_spec.defects.append(errors.InvalidHeaderDefect(
"add-spec local part with no domain"))
return addr_spec, value
addr_spec.append(ValueTerminal('@', 'address-at-symbol'))
token, value = get_domain(value[1:])
addr_spec.append(token)
return addr_spec, value
def get_obs_route(value):
""" obs-route = obs-domain-list ":"
obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain])
Returns an obs-route token with the appropriate sub-tokens (that is,
there is no obs-domain-list in the parse tree).
"""
obs_route = ObsRoute()
while value and (value[0]==',' or value[0] in CFWS_LEADER):
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
elif value[0] == ',':
obs_route.append(ListSeparator)
value = value[1:]
if not value or value[0] != '@':
raise errors.HeaderParseError(
"expected obs-route domain but found '{}'".format(value))
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
while value and value[0]==',':
obs_route.append(ListSeparator)
value = value[1:]
if not value:
break
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
if value[0] == '@':
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
if not value:
raise errors.HeaderParseError("end of header while parsing obs-route")
if value[0] != ':':
raise errors.HeaderParseError( "expected ':' marking end of "
"obs-route but found '{}'".format(value))
obs_route.append(ValueTerminal(':', 'end-of-obs-route-marker'))
return obs_route, value[1:]
def get_angle_addr(value):
""" angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr
obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS]
"""
angle_addr = AngleAddr()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
if not value or value[0] != '<':
raise errors.HeaderParseError(
"expected angle-addr but found '{}'".format(value))
angle_addr.append(ValueTerminal('<', 'angle-addr-start'))
value = value[1:]
# Although it is not legal per RFC5322, SMTP uses '<>' in certain
# circumstances.
if value[0] == '>':
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
angle_addr.defects.append(errors.InvalidHeaderDefect(
"null addr-spec in angle-addr"))
value = value[1:]
return angle_addr, value
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
try:
token, value = get_obs_route(value)
angle_addr.defects.append(errors.ObsoleteHeaderDefect(
"obsolete route specification in angle-addr"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected addr-spec or obs-route but found '{}'".format(value))
angle_addr.append(token)
token, value = get_addr_spec(value)
angle_addr.append(token)
if value and value[0] == '>':
value = value[1:]
else:
angle_addr.defects.append(errors.InvalidHeaderDefect(
"missing trailing '>' on angle-addr"))
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
return angle_addr, value
def get_display_name(value):
""" display-name = phrase
Because this is simply a name-rule, we don't return a display-name
token containing a phrase, but rather a display-name token with
the content of the phrase.
"""
display_name = DisplayName()
token, value = get_phrase(value)
display_name.extend(token[:])
display_name.defects = token.defects[:]
return display_name, value
def get_name_addr(value):
""" name-addr = [display-name] angle-addr
"""
name_addr = NameAddr()
# Both the optional display name and the angle-addr can start with cfws.
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(leader))
if value[0] != '<':
if value[0] in PHRASE_ENDS:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(value))
token, value = get_display_name(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(token))
if leader is not None:
token[0][:0] = [leader]
leader = None
name_addr.append(token)
token, value = get_angle_addr(value)
if leader is not None:
token[:0] = [leader]
name_addr.append(token)
return name_addr, value
def get_mailbox(value):
""" mailbox = name-addr / addr-spec
"""
# The only way to figure out if we are dealing with a name-addr or an
# addr-spec is to try parsing each one.
mailbox = Mailbox()
try:
token, value = get_name_addr(value)
except errors.HeaderParseError:
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected mailbox but found '{}'".format(value))
if any(isinstance(x, errors.InvalidHeaderDefect)
for x in token.all_defects):
mailbox.token_type = 'invalid-mailbox'
mailbox.append(token)
return mailbox, value
def get_invalid_mailbox(value, endchars):
""" Read everything up to one of the chars in endchars.
This is outside the formal grammar. The InvalidMailbox TokenList that is
returned acts like a Mailbox, but the data attributes are None.
"""
invalid_mailbox = InvalidMailbox()
while value and value[0] not in endchars:
if value[0] in PHRASE_ENDS:
invalid_mailbox.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_mailbox.append(token)
return invalid_mailbox, value
def get_mailbox_list(value):
""" mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list
obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS])
For this routine we go outside the formal grammar in order to improve error
handling. We recognize the end of the mailbox list only at the end of the
value or at a ';' (the group terminator). This is so that we can turn
invalid mailboxes into InvalidMailbox tokens and continue parsing any
remaining valid mailboxes. We also allow all mailbox entries to be null,
and this condition is handled appropriately at a higher level.
"""
mailbox_list = MailboxList()
while value and value[0] != ';':
try:
token, value = get_mailbox(value)
mailbox_list.append(token)
except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] in ',;':
mailbox_list.append(leader)
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
elif value[0] == ',':
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] not in ',;':
# Crap after mailbox; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = mailbox_list[-1]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',;')
mailbox.extend(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] == ',':
mailbox_list.append(ListSeparator)
value = value[1:]
return mailbox_list, value
def get_group_list(value):
""" group-list = mailbox-list / CFWS / obs-group-list
obs-group-list = 1*([CFWS] ",") [CFWS]
"""
group_list = GroupList()
if not value:
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header before group-list"))
return group_list, value
leader = None
if value and value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
# This should never happen in email parsing, since CFWS-only is a
# legal alternative to group-list in a group, which is the only
# place group-list appears.
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header in group-list"))
group_list.append(leader)
return group_list, value
if value[0] == ';':
group_list.append(leader)
return group_list, value
token, value = get_mailbox_list(value)
if len(token.all_mailboxes)==0:
if leader is not None:
group_list.append(leader)
group_list.extend(token)
group_list.defects.append(errors.ObsoleteHeaderDefect(
"group-list with empty entries"))
return group_list, value
if leader is not None:
token[:0] = [leader]
group_list.append(token)
return group_list, value
def get_group(value):
""" group = display-name ":" [group-list] ";" [CFWS]
"""
group = Group()
token, value = get_display_name(value)
if not value or value[0] != ':':
raise errors.HeaderParseError("expected ':' at end of group "
"display name but found '{}'".format(value))
group.append(token)
group.append(ValueTerminal(':', 'group-display-name-terminator'))
value = value[1:]
if value and value[0] == ';':
group.append(ValueTerminal(';', 'group-terminator'))
return group, value[1:]
token, value = get_group_list(value)
group.append(token)
if not value:
group.defects.append(errors.InvalidHeaderDefect(
"end of header in group"))
if value[0] != ';':
raise errors.HeaderParseError(
"expected ';' at end of group but found {}".format(value))
group.append(ValueTerminal(';', 'group-terminator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
group.append(token)
return group, value
def get_address(value):
""" address = mailbox / group
Note that counter-intuitively, an address can be either a single address or
a list of addresses (a group). This is why the returned Address object has
a 'mailboxes' attribute which treats a single address as a list of length
one. When you need to differentiate between to two cases, extract the single
element, which is either a mailbox or a group token.
"""
# The formal grammar isn't very helpful when parsing an address. mailbox
# and group, especially when allowing for obsolete forms, start off very
# similarly. It is only when you reach one of @, <, or : that you know
# what you've got. So, we try each one in turn, starting with the more
# likely of the two. We could perhaps make this more efficient by looking
# for a phrase and then branching based on the next character, but that
# would be a premature optimization.
address = Address()
try:
token, value = get_group(value)
except errors.HeaderParseError:
try:
token, value = get_mailbox(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected address but found '{}'".format(value))
address.append(token)
return address, value
def get_address_list(value):
""" address_list = (address *("," address)) / obs-addr-list
obs-addr-list = *([CFWS] ",") address *("," [address / CFWS])
We depart from the formal grammar here by continuing to parse until the end
of the input, assuming the input to be entirely composed of an
address-list. This is always true in email parsing, and allows us
to skip invalid addresses to parse additional valid ones.
"""
address_list = AddressList()
while value:
try:
token, value = get_address(value)
address_list.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] == ',':
address_list.append(leader)
address_list.defects.append(errors.ObsoleteHeaderDefect(
"address-list entry with no content"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
elif value[0] == ',':
address_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in address-list"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value and value[0] != ',':
# Crap after address; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = address_list[-1][0]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',')
mailbox.extend(token)
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value: # Must be a , at this point.
address_list.append(ValueTerminal(',', 'list-separator'))
value = value[1:]
return address_list, value
#
# XXX: As I begin to add additional header parsers, I'm realizing we probably
# have two level of parser routines: the get_XXX methods that get a token in
# the grammar, and parse_XXX methods that parse an entire field value. So
# get_address_list above should really be a parse_ method, as probably should
# be get_unstructured.
#
def parse_mime_version(value):
""" mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]
"""
# The [CFWS] is implicit in the RFC 2045 BNF.
# XXX: This routine is a bit verbose, should factor out a get_int method.
mime_version = MIMEVersion()
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Missing MIME version number (eg: 1.0)"))
return mime_version
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Expected MIME version number but found only CFWS"))
digits = ''
while value and value[0] != '.' and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME major version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.major = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value or value[0] != '.':
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
if value:
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
mime_version.append(ValueTerminal('.', 'version-separator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
return mime_version
digits = ''
while value and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME minor version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.minor = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if value:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Excess non-CFWS text after MIME version"))
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
def get_invalid_parameter(value):
""" Read everything up to the next ';'.
This is outside the formal grammar. The InvalidParameter TokenList that is
returned acts like a Parameter, but the data attributes are None.
"""
invalid_parameter = InvalidParameter()
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
invalid_parameter.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_parameter.append(token)
return invalid_parameter, value
def get_ttext(value):
"""ttext = <matches _ttext_matcher>
We allow any non-TOKEN_ENDS in ttext, but add defects to the token's
defects list if we find non-ttext characters. We also register defects for
*any* non-printables even though the RFC doesn't exclude all of them,
because we follow the spirit of RFC 5322.
"""
m = _non_token_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected ttext but found '{}'".format(value))
ttext = m.group()
value = value[len(ttext):]
ttext = ValueTerminal(ttext, 'ttext')
_validate_xtext(ttext)
return ttext, value
def get_token(value):
"""token = [CFWS] 1*ttext [CFWS]
The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or
tspecials. We also exclude tabs even though the RFC doesn't.
The RFC implies the CFWS but is not explicit about it in the BNF.
"""
mtoken = Token()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
if value and value[0] in TOKEN_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_ttext(value)
mtoken.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
return mtoken, value
def get_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character)
We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the
token's defects list if we find non-attrtext characters. We also register
defects for *any* non-printables even though the RFC doesn't exclude all of
them, because we follow the spirit of RFC 5322.
"""
m = _non_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_attribute(value):
""" [CFWS] 1*attrtext [CFWS]
This version of the BNF makes the CFWS explicit, and as usual we use a
value terminal for the actual run of characters. The RFC equivalent of
attrtext is the token characters, with the subtraction of '*', "'", and '%'.
We include tab in the excluded set just as we do for token.
"""
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_extended_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character plus '%')
This is a special parsing routine so that we get a value that
includes % escapes as a single string (which we decode as a single
string later).
"""
m = _non_extended_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected extended attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'extended-attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_extended_attribute(value):
""" [CFWS] 1*extended_attrtext [CFWS]
This is like the non-extended version except we allow % characters, so that
we can pick up an encoded value as a single string.
"""
# XXX: should we have an ExtendedAttribute TokenList?
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in EXTENDED_ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_extended_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_section(value):
""" '*' digits
The formal BNF is more complicated because leading 0s are not allowed. We
check for that and add a defect. We also assume no CFWS is allowed between
the '*' and the digits, though the RFC is not crystal clear on that.
The caller should already have dealt with leading CFWS.
"""
section = Section()
if not value or value[0] != '*':
raise errors.HeaderParseError("Expected section but found {}".format(
value))
section.append(ValueTerminal('*', 'section-marker'))
value = value[1:]
if not value or not value[0].isdigit():
raise errors.HeaderParseError("Expected section number but "
"found {}".format(value))
digits = ''
while value and value[0].isdigit():
digits += value[0]
value = value[1:]
if digits[0] == '0' and digits != '0':
section.defects.append(errors.InvalidHeaderError("section number"
"has an invalid leading 0"))
section.number = int(digits)
section.append(ValueTerminal(digits, 'digits'))
return section, value
def get_value(value):
""" quoted-string / attribute
"""
v = Value()
if not value:
raise errors.HeaderParseError("Expected value but found end of string")
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError("Expected value but found "
"only {}".format(leader))
if value[0] == '"':
token, value = get_quoted_string(value)
else:
token, value = get_extended_attribute(value)
if leader is not None:
token[:0] = [leader]
v.append(token)
return v, value
def get_parameter(value):
""" attribute [section] ["*"] [CFWS] "=" value
The CFWS is implied by the RFC but not made explicit in the BNF. This
simplified form of the BNF from the RFC is made to conform with the RFC BNF
through some extra checks. We do it this way because it makes both error
recovery and working with the resulting parse tree easier.
"""
# It is possible CFWS would also be implicitly allowed between the section
# and the 'extended-attribute' marker (the '*') , but we've never seen that
# in the wild and we will therefore ignore the possibility.
param = Parameter()
token, value = get_attribute(value)
param.append(token)
if not value or value[0] == ';':
param.defects.append(errors.InvalidHeaderDefect("Parameter contains "
"name ({}) but no value".format(token)))
return param, value
if value[0] == '*':
try:
token, value = get_section(value)
param.sectioned = True
param.append(token)
except errors.HeaderParseError:
pass
if not value:
raise errors.HeaderParseError("Incomplete parameter")
if value[0] == '*':
param.append(ValueTerminal('*', 'extended-parameter-marker'))
value = value[1:]
param.extended = True
if value[0] != '=':
raise errors.HeaderParseError("Parameter not followed by '='")
param.append(ValueTerminal('=', 'parameter-separator'))
value = value[1:]
leader = None
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
param.append(token)
remainder = None
appendto = param
if param.extended and value and value[0] == '"':
# Now for some serious hackery to handle the common invalid case of
# double quotes around an extended value. We also accept (with defect)
# a value marked as encoded that isn't really.
qstring, remainder = get_quoted_string(value)
inner_value = qstring.stripped_value
semi_valid = False
if param.section_number == 0:
if inner_value and inner_value[0] == "'":
semi_valid = True
else:
token, rest = get_attrtext(inner_value)
if rest and rest[0] == "'":
semi_valid = True
else:
try:
token, rest = get_extended_attrtext(inner_value)
except:
pass
else:
if not rest:
semi_valid = True
if semi_valid:
param.defects.append(errors.InvalidHeaderDefect(
"Quoted string value for extended parameter is invalid"))
param.append(qstring)
for t in qstring:
if t.token_type == 'bare-quoted-string':
t[:] = []
appendto = t
break
value = inner_value
else:
remainder = None
param.defects.append(errors.InvalidHeaderDefect(
"Parameter marked as extended but appears to have a "
"quoted string value that is non-encoded"))
if value and value[0] == "'":
token = None
else:
token, value = get_value(value)
if not param.extended or param.section_number > 0:
if not value or value[0] != "'":
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
param.defects.append(errors.InvalidHeaderDefect(
"Apparent initial-extended-value but attribute "
"was not marked as extended or was not initial section"))
if not value:
# Assume the charset/lang is missing and the token is the value.
param.defects.append(errors.InvalidHeaderDefect(
"Missing required charset/lang delimiters"))
appendto.append(token)
if remainder is None:
return param, value
else:
if token is not None:
for t in token:
if t.token_type == 'extended-attrtext':
break
t.token_type == 'attrtext'
appendto.append(t)
param.charset = t.value
if value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {!r}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231 delimiter'))
value = value[1:]
if value and value[0] != "'":
token, value = get_attrtext(value)
appendto.append(token)
param.lang = token.value
if not value or value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231 delimiter'))
value = value[1:]
if remainder is not None:
# Treat the rest of value as bare quoted string content.
v = Value()
while value:
if value[0] in WSP:
token, value = get_fws(value)
else:
token, value = get_qcontent(value)
v.append(token)
token = v
else:
token, value = get_value(value)
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
def parse_mime_parameters(value):
""" parameter *( ";" parameter )
That BNF is meant to indicate this routine should only be called after
finding and handling the leading ';'. There is no corresponding rule in
the formal RFC grammar, but it is more convenient for us for the set of
parameters to be treated as its own TokenList.
This is 'parse' routine because it consumes the reminaing value, but it
would never be called to parse a full header. Instead it is called to
parse everything after the non-parameter value of a specific MIME header.
"""
mime_parameters = MimeParameters()
while value:
try:
token, value = get_parameter(value)
mime_parameters.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
mime_parameters.append(leader)
return mime_parameters
if value[0] == ';':
if leader is not None:
mime_parameters.append(leader)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter entry with no content"))
else:
token, value = get_invalid_parameter(value)
if leader:
token[:0] = [leader]
mime_parameters.append(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"invalid parameter {!r}".format(token)))
if value and value[0] != ';':
# Junk after the otherwise valid parameter. Mark it as
# invalid, but it will have a value.
param = mime_parameters[-1]
param.token_type = 'invalid-parameter'
token, value = get_invalid_parameter(value)
param.extend(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter with invalid trailing text {!r}".format(token)))
if value:
# Must be a ';' at this point.
mime_parameters.append(ValueTerminal(';', 'parameter-separator'))
value = value[1:]
return mime_parameters
def _find_mime_parameters(tokenlist, value):
"""Do our best to find the parameters in an invalid MIME header
"""
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
tokenlist.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
tokenlist.append(token)
if not value:
return
tokenlist.append(ValueTerminal(';', 'parameter-separator'))
tokenlist.append(parse_mime_parameters(value[1:]))
def parse_content_type_header(value):
""" maintype "/" subtype *( ";" parameter )
The maintype and substype are tokens. Theoretically they could
be checked against the official IANA list + x-token, but we
don't do that.
"""
ctype = ContentType()
recover = False
if not value:
ctype.defects.append(errors.HeaderMissingRequiredValue(
"Missing content type specification"))
return ctype
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content maintype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
# XXX: If we really want to follow the formal grammar we should make
# mantype and subtype specialized TokenLists here. Probably not worth it.
if not value or value[0] != '/':
ctype.defects.append(errors.InvalidHeaderDefect(
"Invalid content type"))
if value:
_find_mime_parameters(ctype, value)
return ctype
ctype.maintype = token.value.strip().lower()
ctype.append(ValueTerminal('/', 'content-type-separator'))
value = value[1:]
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content subtype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
ctype.subtype = token.value.strip().lower()
if not value:
return ctype
if value[0] != ';':
ctype.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content type, but "
"found {!r}".format(value)))
# The RFC requires that a syntactically invalid content-type be treated
# as text/plain. Perhaps we should postel this, but we should probably
# only do that if we were checking the subtype value against IANA.
del ctype.maintype, ctype.subtype
_find_mime_parameters(ctype, value)
return ctype
ctype.append(ValueTerminal(';', 'parameter-separator'))
ctype.append(parse_mime_parameters(value[1:]))
return ctype
def parse_content_disposition_header(value):
""" disposition-type *( ";" parameter )
"""
disp_header = ContentDisposition()
if not value:
disp_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content disposition"))
return disp_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
disp_header.defects.append(errors.InvalidHeaderDefect(
"Expected content disposition but found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(token)
disp_header.content_disposition = token.value.strip().lower()
if not value:
return disp_header
if value[0] != ';':
disp_header.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content disposition, but "
"found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(ValueTerminal(';', 'parameter-separator'))
disp_header.append(parse_mime_parameters(value[1:]))
return disp_header
def parse_content_transfer_encoding_header(value):
""" mechanism
"""
# We should probably validate the values, since the list is fixed.
cte_header = ContentTransferEncoding()
if not value:
cte_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content transfer encoding"))
return cte_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Expected content transfer encoding but found {!r}".format(value)))
else:
cte_header.append(token)
cte_header.cte = token.value.strip().lower()
if not value:
return cte_header
while value:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Extra text after content transfer encoding"))
if value[0] in PHRASE_ENDS:
cte_header.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
cte_header.append(token)
return cte_header
|
Reflexe/doc_to_pdf
|
Windows/program/python-core-3.5.0/lib/email/_header_value_parser.py
|
Python
|
mpl-2.0
| 105,084
|
[
"CRYSTAL"
] |
51bd6cc3b9da39376e2c93dd196ec92d2ecc961ce855826f9077965f480cbcce
|
text_addresses = {'Altar': (0x180300, 256),
'Triforce': (0x180400, 256),
'Uncle': (0x180500, 256),
'Ganon1': (0x180600, 256),
'Ganon2': (0x180700, 256),
'Blind': (0x180800, 256),
'TavernMan': (0x180C00, 256),
'Sahasrahla1': (0x180A00, 256),
'Sahasrahla2': (0x180B00, 256),
'BombShop1': (0x180E00, 256),
'BombShop2': (0x180D00, 256),
'PyramidFairy': (0x180900, 256),
'EtherTablet': (0x180F00, 256),
'BombosTablet': (0x181000, 256),
'Ganon1Invincible': (0x181100, 256),
'Ganon2Invincible': (0x181200, 256)}
credits_addresses = {'KingsReturn': (0x76928, 22),
'Sanctuary': (0x76964, 16),
'Kakariko': (0x76997, 23),
'DesertPalace': (0x769D4, 24),
'MountainTower': (0x76A12, 24),
'LinksHouse': (0x76A52, 19),
'Zora': (0x76A85, 20),
'MagicShop': (0x76AC5, 23),
'Lumberjacks': (0x76AFC, 16),
'FluteBoy': (0x76B34, 23),
'WishingWell': (0x76B71, 23),
'Blacksmiths': (0x76BAC, 23),
'SickKid': (0x76BDF, 20),
'DeathMountain': (0x76C19, 16),
'LostWoods': (0x76C51, 16),
'Altar': (0x76C81, 20)}
Uncle_texts = ['Good Luck!\nYou will need it.', 'Forward this message to 10 other people or this seed will be awful.', 'I hope you like your seeds bootless and fluteless.',
'10\n9\n8\n7\n6\n5\n4\n3\n2\n1\nGo!', 'I have crippling depression.', 'I\'m off to visit cousin Fritzl.']
Triforce_texts = ['Product has Hole in center. Bad seller, 0 out of 5.', 'Who stole the fourth triangle?', 'Trifource?\nMore Like Tritrice, am I right?'
'\n Well Done!', 'You just wasted 2 hours of your life.', 'This was meant to be a trapezoid']
BombShop2_texts = ['Bombs!\nBombs!\nBiggest!\nBestest!\nGreatest!\nBoomest!']
PyramidFairy_texts = ['May I talk to you about our lord and savior, Ganon?']
Sahasrahla2_texts = ['You already got my item, idiot.', 'Why are you still talking to me?', 'This text won\'t change.', 'Have you met my brother, Hasarahshla?']
Blind_texts = ['I bet you expected a vision related pun?\n\nNot Today.\n Didn\'t see that coming, did you?', 'What do you call a blind dinosaur?\n A Doyouthinkhe-saurus',
'A blind man walks into a bar...\n\n\n and a table\n\n\n and a door.',
'Why can\'t blind people eat fish?\n Because it\'s see food']
Ganon1_texts = ['\n\n\n\n\n\n\n\n\nWhy are you reading an empty textbox?', 'Hi', 'Hey, can you turn off the lights?', 'Oink Oink',
'Uncle: How do you like my Ganon cosplay?', 'I\'ll try spinning - that\'s a good trick!', 'Did you ever hear the tragedy of Darth Plagueis the Wise?']
TavernMan_texts = ['Did you know that talking to random NPCs wastes time in a race? I hope this information may be of use to you in the future.']
KingsReturn_texts = ['Who is this even', 'The Harem']
Sanctuary_texts = ['A Priest\'s love']
Kakariko_texts = ['Shasschahshahsahahrahsashsa', 'Schaschlik']
Blacksmiths_texts = ['frogs for bread', 'That\'s not a sword', 'The Rupeesmiths']
DeathMountain_texts = ['lost again', 'Alzheimer']
LostWoods_texts = ['thieves\' stump', 'He\'s got wood', 'Dancing pickles']
WishingWell_texts = ['Bottle for Bottle']
DesertPalace_texts = ['literacy moves']
MountainTower_texts = ['up up and away']
LinksHouse_texts = ['Home Sweet Home', 'Only one bed']
Lumberjacks_texts = ['Chop Chop', 'logfellas']
SickKid_texts = ['Next Time Stay Down']
Zora_texts = ['Splashes For Sale', 'Slippery when wet']
MagicShop_texts = ['Drug deal', 'Shrooms for days']
FluteBoy_texts = ['Stumped']
def string_to_credits(s, length):
buf = bytearray()
if len(s) > length:
s = s[:length]
padding = length - len(s)
leftpadding = padding // 2
rightpadding = padding - leftpadding
s = ' '*leftpadding + s + ' '*rightpadding
for char in s.lower():
buf.append(char_to_credit_char(char))
return buf
def string_to_alttp_text(s, maxbytes=256):
lines = s.upper().split('\n')
outbuf = bytearray()
lineindex = 0
while lines:
linespace = 14
line = lines.pop(0)
words = line.split(' ')
outbuf.append(0x74 if lineindex == 0 else 0x75 if lineindex == 1 else 0x76) # line starter
while words:
word = words.pop(0)
# sanity check: if the word we have is more than 14 characters, we take as much as we can still fit and push the rest back for later
if len(word) > 14:
if linespace < 14:
word = ' ' + word
word_first = word[:linespace]
words.insert(0, word[linespace:])
lines.insert(0, ' '.join(words))
write_word(outbuf, word_first)
break
if len(word) <= (linespace if linespace == 14 else linespace - 1):
if linespace < 14:
word = ' ' + word
linespace -= len(word)
write_word(outbuf, word)
else:
# ran out of space, push word and lines back and continue with next line
words.insert(0, word)
lines.insert(0, ' '.join(words))
break
lineindex += 1
if lineindex % 3 == 0 and lines:
outbuf.append(0x7E)
if lineindex >= 3 and lines:
outbuf.append(0x73)
# check for max length
if len(outbuf) > maxbytes - 1:
outbuf = outbuf[:maxbytes - 1]
# make sure we interpret the end of box character
if outbuf[-1] == 0x00:
outbuf[-1] = 0x73
outbuf.append(0x7F)
return outbuf
def write_word(buf, word):
for char in word:
buf.extend([0x00, char_to_alttp_char(char)])
char_map = {' ': 0xFF,
'?': 0xC6,
'!': 0xC7,
',': 0xC8,
'-': 0xC9,
'…': 0xCC,
'.': 0xCD,
'~': 0xCE,
'~': 0xCE,
"'": 0xD8,
'’': 0xD8,
'↑': 0xE0,
'↓': 0xE1,
'→': 0xE2,
'←': 0xE3,
'あ': 0x00,
'い': 0x01,
'う': 0x02,
'え': 0x03,
'お': 0x04,
'や': 0x05,
'ゆ': 0x06,
'よ': 0x07,
'か': 0x08,
'き': 0x09,
'く': 0x0A,
'け': 0x0B,
'こ': 0x0C,
'わ': 0x0D,
'を': 0x0E,
'ん': 0x0F,
'さ': 0x10,
'し': 0x11,
'す': 0x12,
'せ': 0x13,
'そ': 0x14,
'が': 0x15,
'ぎ': 0x16,
'ぐ': 0x17,
'た': 0x18,
'ち': 0x19,
'つ': 0x1A,
'て': 0x1B,
'と': 0x1C,
'げ': 0x1D,
'ご': 0x1E,
'ざ': 0x1F,
'な': 0x20,
'に': 0x21,
'ぬ': 0x22,
'ね': 0x23,
'の': 0x24,
'じ': 0x25,
'ず': 0x26,
'ぜ': 0x27,
'は': 0x28,
'ひ': 0x29,
'ふ': 0x2A,
'へ': 0x2B,
'ほ': 0x2C,
'ぞ': 0x2D,
'だ': 0x2E,
'ぢ': 0x2F,
'ま': 0x30,
'み': 0x31,
'む': 0x32,
'め': 0x33,
'も': 0x34,
'づ': 0x35,
'で': 0x36,
'ど': 0x37,
'ら': 0x38,
'り': 0x39,
'る': 0x3A,
'れ': 0x3B,
'ろ': 0x3C,
'ば': 0x3D,
'び': 0x3E,
'ぶ': 0x3F,
'べ': 0x40,
'ぼ': 0x41,
'ぱ': 0x42,
'ぴ': 0x43,
'ぷ': 0x44,
'ぺ': 0x45,
'ぽ': 0x46,
'ゃ': 0x47,
'ゅ': 0x48,
'ょ': 0x49,
'っ': 0x4A,
'ぁ': 0x4B,
'ぃ': 0x4C,
'ぅ': 0x4D,
'ぇ': 0x4E,
'ぉ': 0x4F,
'ア': 0x50,
'イ': 0x51,
'ウ': 0x52,
'エ': 0x53,
'オ': 0x54,
'ヤ': 0x55,
'ユ': 0x56,
'ヨ': 0x57,
'カ': 0x58,
'キ': 0x59,
'ク': 0x5A,
'ケ': 0x5B,
'コ': 0x5C,
'ワ': 0x5D,
'ヲ': 0x5E,
'ン': 0x5F,
'サ': 0x60,
'シ': 0x61,
'ス': 0x62,
'セ': 0x63,
'ソ': 0x64,
'ガ': 0x65,
'ギ': 0x66,
'グ': 0x67,
'タ': 0x68,
'チ': 0x69,
'ツ': 0x6A,
'テ': 0x6B,
'ト': 0x6C,
'ゲ': 0x6D,
'ゴ': 0x6E,
'ザ': 0x6F,
'ナ': 0x70,
'ニ': 0x71,
'ヌ': 0x72,
'ネ': 0x73,
'ノ': 0x74,
'ジ': 0x75,
'ズ': 0x76,
'ゼ': 0x77,
'ハ': 0x78,
'ヒ': 0x79,
'フ': 0x7A,
'ヘ': 0x7B,
'ホ': 0x7C,
'ゾ': 0x7D,
'ダ': 0x7E,
'マ': 0x80,
'ミ': 0x81,
'ム': 0x82,
'メ': 0x83,
'モ': 0x84,
'ヅ': 0x85,
'デ': 0x86,
'ド': 0x87,
'ラ': 0x88,
'リ': 0x89,
'ル': 0x8A,
'レ': 0x8B,
'ロ': 0x8C,
'バ': 0x8D,
'ビ': 0x8E,
'ブ': 0x8F,
'ベ': 0x90,
'ボ': 0x91,
'パ': 0x92,
'ピ': 0x93,
'プ': 0x94,
'ペ': 0x95,
'ポ': 0x96,
'ャ': 0x97,
'ュ': 0x98,
'ョ': 0x99,
'ッ': 0x9A,
'ァ': 0x9B,
'ィ': 0x9C,
'ゥ': 0x9D,
'ェ': 0x9E,
'ォ': 0x9F}
credit_char_map = {' ': 0x9F,
',': 0x37,
'.': 0x37,
'-': 0x36,
"'": 0x35}
def char_to_alttp_char(char):
if 0x30 <= ord(char) <= 0x39:
return ord(char) + 0x70
if 0x41 <= ord(char) <= 0x5A:
return ord(char) + 0x69
return char_map.get(char, 0xFF)
def char_to_credit_char(char):
if 0x61 <= ord(char) <= 0x7A:
return ord(char) - 0x47
return credit_char_map.get(char, 0x9F)
|
LLCoolDave/ALttPEntranceRandomizer
|
Text.py
|
Python
|
mit
| 10,990
|
[
"VisIt"
] |
00d4543058a3a4c79a53f9dd167293860b9a268bc67d1ff611b5f5972acb92e0
|
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
# Plotting the results of gypsum_solubility and the equivalent GWB simulation
import os
import sys
import matplotlib.pyplot as plt
f = open("gold/cooling_out.csv", "r")
data = [list(map(float, line.strip().split(","))) for line in f.readlines()[2:]]
f.close()
albite = [x[1] for x in data]
max_micro = [x[2] for x in data]
muscovite = [x[3] for x in data]
quartz = [x[4] for x in data]
temp = [x[5] for x in data]
gwb_temp = [300, 245, 217.5, 162.5, 135, 80, 52.5, 25]
gwb_albite = [20.04, 14.63, 12.47, 9.2, 8.046, 6.578, 6.205, 6.016]
gwb_max_micro = [10.00, 15.85, 18.17, 21.73, 23, 24.59, 24.98, 25.19]
gwb_muscovite = [4.999, 5.001, 5.002, 5.002, 5.002, 5.002, 5.002, 5.002]
gwb_quartz = [2.000, 2.040, 2.063, 2.1, 2.114, 2.133, 2.138, 2.140]
plt.figure()
plt.plot(temp, albite, 'k-', linewidth = 2.0, label = 'Albite (MOOSE)')
plt.plot(temp, max_micro, 'r-', linewidth = 2.0, label = 'Microcline (MOOSE)')
plt.plot(temp, muscovite, 'g-', linewidth = 2.0, label = 'Muscovite (MOOSE)')
plt.plot(temp, quartz, 'b-', linewidth = 2.0, label = 'Quartz (MOOSE)')
plt.plot(gwb_temp, gwb_albite, 'ks', label = "Albite (GWB)")
plt.plot(gwb_temp, gwb_max_micro, 'rs', label = "Microcline (GWB)")
plt.plot(gwb_temp, gwb_muscovite, 'gs', label = "Muscovite (GWB)")
plt.plot(gwb_temp, gwb_quartz, 'bs', label = "Quartz (GWB)")
ax = plt.gca()
ax.set_xlim(ax.get_xlim()[::-1])
plt.legend()
plt.xlabel("Temperature (degC)")
plt.ylabel("precipitate volume (cm$^{3}$)")
plt.title("Minerals precipitated in a cooling solution");
plt.savefig("../../../doc/content/media/geochemistry/cooling.png")
sys.exit(0)
|
harterj/moose
|
modules/geochemistry/test/tests/time_dependent_reactions/cooling.py
|
Python
|
lgpl-2.1
| 1,931
|
[
"MOOSE"
] |
612ee009bc3610e227da448a3987562ebcd9a6d5b23e0b462301036b82e8fda7
|
#!/usr/bin/env python
"""
version 0.1
Minor version changes:
- pep8 can make output directories directly
Given a nucleotide sequence, get pfam stuff with their rest api, return
USAGE:
$ python snagnblast.py accessions.txt_or_accessions.csv /BLAST/directory/ /output/directory/
"""
print("Warning! This script is depreciated in favor of snagnblast_multi.py")
import os
#import sys
#import re
import datetime
import subprocess
import argparse
from Bio import SeqIO, Entrez
#from Bio.SeqRecord import SeqRecord
from Bio.Seq import Seq
import pandas as pd
#import numpy as np
#from Bio.Alphabet import IUPAC
#from Bio.Blast import NCBIXML
from Bio.Blast.Applications import NcbiblastnCommandline
from Bio.Blast.Applications import NcbitblastxCommandline
from Bio.Align.Applications import ClustalwCommandline
DEBUG = True
#%%
#define inputs
if DEBUG:
genelist = os.path.expanduser("~/GitHub/FB/Ecoli_comparative_genomics/data/test_virgenes_bp.csv")
blastdb = os.path.expanduser("~/BLAST/env_Coli")
output = os.path.expanduser("~/GitHub/FB/Ecoli_comparative_genomics/results/")
score_min = 70
blasttype = "tblastx"
else:
parser = argparse.ArgumentParser(description="This script takes a list of gene accessions \
from either a text file or a csv, grabs the sequencs from NCBI, and proceeds \
to use either blastn or tblastx to detect the presence of the genes in a custom \
database")
parser.add_argument("genelist", help="file containing gene accessions. if delimited, use \
the headers in the example file as a template")
parser.add_argument("blastdb", help="blastdb of interest")
parser.add_argument("-o", "--output", help="directory in which to place the output files")
parser.add_argument("-s", "--score_min", help="not currently used; will be used to \
determinine a scoring threshold")
parser.add_argument("-t", "--blast_type", help="blastn or tblastx")
args = parser.parse_args()
genelist = args.genelist
blastdb = args.blastdb
blasttype = args.blast_type
output = args.output
score_min = args.score_min
date = str(datetime.datetime.now().strftime('%Y%m%d'))
if not os.path.isdir(output):
print("creating %s" % output)
os.mkdir(output)
#%% open accessions file, determine type, and parse
Entrez.email = "alfredTheDaring@gmail.com"
print("reading in gene list")
genes = open(genelist, "r")
if genes.name.endswith("csv"):
genelist_type = "delim"
print("gene list is a comma-deliminated file")
n = ("accession", "name", "phenotype", "function", "genome", "note", "source")
genedf = pd.read_csv(genes, sep=",")
genenames = genedf.iloc[0:, 1].tolist()
genenames = [x for x in genenames if str(x) != 'nan']
genesred = genedf.iloc[0:, 1:3]
else:
print("Reading error; only accepts csv's")
#%% Grab sequences from NCBI, write out resulting fasta file
output_seq_dir = os.path.join(output, str(date+"files_from_grabHits"), "")
os.mkdir(output_seq_dir)
# defaults
gene = "stx1"
db = "nucleotide"
retmax = "100"
field = "[All Fields]"
organism = "Escherichia coli[porgn]"
len_start, len_end = "1", "1000"
use_history = "y" # y or n
#%%
seq_res_list = []
clustalw_comms = []
for i in range(0, len(genesred.index)):
print(i)
if i < 5 and genesred.iloc[i, 0] != "nan" and genesred.iloc[i, 1] != "nan":
with open(os.path.join(output_seq_dir, str(genesred.iloc[i, 1] + "_seqs.fasta")), "w") as outfile:
acc = genesred.iloc[i, 1]
print(acc)
length = genesred.iloc[i, 0]
print(length)
query = str(acc + field + " AND " + organism + " AND " + len_start + "[SLEN] : " +
str(round(length*2)) + "[SLEN]")
esearch_handle = Entrez.esearch(db="nucleotide", term=query, usehistory=True, retmax=30)
result = Entrez.read(esearch_handle)
webEnv = result['WebEnv']
print(webEnv)
queryKey = result["QueryKey"]
print(queryKey)
efetch_handle = Entrez.efetch(db="nucleotide",retmode="text",rettype="fasta",
webenv=webEnv, query_key=queryKey)
outfile.write(efetch_handle.read())
efetch_handle.close()
seq_res_list.append(os.path.join(output_seq_dir, str(genesred.iloc[i, 1] + "_seqs.fasta")))
clustalw_comms.append(ClustalwCommandline("clustalw2",
infile=os.path.join(output_seq_dir, str(genesred.iloc[i, 1] + "_seqs.fasta"))))
# search_res_list.append(search_handle)
#%%
for i in search_res_list
result = Entrez.read(request)
webEnv = result["WebEnv"]
queryKey = result["QueryKey"]
handle = Entrez.efetch(db="nucleotide",retmode="xml", webenv=webEnv, query_k
#%%
seqs = SeqIO.parse(sequence_handle, "fasta")
def esearch():
esearchCall = str("https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=" + db +
"&retmax=" + retmax + "&term=" + acc + field + "+" + organism + "+" +
len_range + "&usehistory=" + use_history)
def efetch():
efetchCall = str("https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.cgi?db=" +
db + "&query_key=1&WebEnv=" + webenv + "&rettype=fasta")
for acc, index in enumerate(accessions):
print("\n\nFetching %i accessions from NCBI" % len(accessions))
sequence_handle = Entrez.efetch(db="nucleotide", id=accessions, rettype="fasta")
seqs = SeqIO.parse(sequence_handle, "fasta")
with open(str(os.path.join(output, date)+"_sequences.fa"), "w") as fasta_output:
SeqIO.write(seqs, fasta_output, "fasta")
#%%
sequences_fasta = open(str(os.path.join(output, date)+"_sequences.fa"), "r")
entrez_results = list(SeqIO.parse(sequences_fasta, "fasta"))
#%%
for i, rec in enumerate(entrez_results):
if i < 5:
protein = entrez_results
#%%
print("returned %i accessions from ncbi" % len(entrez_results))
if(len(accessions) != len(entrez_results)):
print("Warning! not all accessions were found!")
sequences_fasta.close()
#%%
def run_blastn():
# build commandline call
output_path_tab = str(os.path.join(output, date)+"_dcmegablast_results.tab")
blast_cline = NcbiblastnCommandline(query=fasta_output.name,
db=blastdb, evalue=10,
outfmt=7, out=output_path_tab)
add_params = " -num_threads 4 -max_target_seqs 2000 -task dc-megablast"
blast_command = str(str(blast_cline)+add_params)
print("Running blastn search...")
# subprocess.Popen(blast_command, stdout=subprocess.PIPE, shell=True).stdout.read()
subprocess.call(blast_command, shell=True)
return(output_path_tab)
def run_tblastx():
# build commandline call
output_path_tab = str(os.path.join(output, date)+"_tblastx_results.tab")
blast_cline = NcbitblastxCommandline(query=fasta_output.name,
db=blastdb, evalue=10,
outfmt=7, out=output_path_tab)
add_params = " -num_threads 4 -max_target_seqs 2000 -query_gencode 11 -db_gencode 11"
blast_command = str(str(blast_cline)+add_params)
print("Running tblastx search...")
# subprocess.Popen(blast_command, stdout=subprocess.PIPE, shell=True).stdout.read()
subprocess.call(blast_command, shell=True)
return(output_path_tab)
#%% Execute
if blasttype == "blastn":
output_path_tab = run_blastn()
elif blasttype == "tblastx":
output_path_tab = run_tblastx()
else:
print("you need to use either blastn or tblastx, sorry!")
#%% parse output
print("cleaning up the csv output")
colnames = ["query_id", "subject_id", "identity_perc", "alignment_length", "mismatches",
"gap_opens", "q_start", "q_end", "s_start", "s_end", "evalue", "bit_score"]
csv_results = pd.read_csv(open(output_path_tab), comment="#", sep="\t", names=colnames)
#This regex will probably break things rather badly before too long...
# it looks for capital letter and numbers, dot, number, ie SHH11555JJ8.99
csv_results["accession"] = csv_results.query_id.str.extract('(?P<accession>[A-Z _\d]*\.\d*)')
#%% write out results with new headers or with new headers and merged metadat from accessions.tab
output_path_csv = str(os.path.splitext(output_path_tab)[0]+".csv")
if genelist_type == "delim":
results_annotated = pd.merge(csv_results, genedf, how="left", on="accession")
results_annotated.to_csv(open(output_path_csv, "w"))
else:
csv_results.to_csv(open(output_path_csv, "w"))
|
nickp60/open_utils
|
grabHits/grabHits.py
|
Python
|
mit
| 8,667
|
[
"BLAST"
] |
82c72ec5035d0f9ef981ac154b50ab2037ecd229f1ce443dd0ff99d1d09d7ffb
|
"""rbf - Radial basis functions for interpolation/smoothing scattered N-D data.
Written by John Travers <jtravs@gmail.com>, February 2007
Based closely on Matlab code by Alex Chirokov
Additional, large, improvements by Robert Hetland
Some additional alterations by Travis Oliphant
Interpolation with multi-dimensional target domain by Josua Sassen
Permission to use, modify, and distribute this software is given under the
terms of the SciPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Copyright (c) 2006-2007, Robert Hetland <hetland@tamu.edu>
Copyright (c) 2007, John Travers <jtravs@gmail.com>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Robert Hetland nor the names of any
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import division, print_function, absolute_import
import sys
import numpy as np
from scipy import linalg
from scipy._lib.six import callable, get_method_function, get_function_code
from scipy.special import xlogy
from scipy.spatial.distance import cdist, pdist, squareform
__all__ = ['Rbf']
class Rbf(object):
"""
Rbf(*args)
A class for radial basis function interpolation of functions from
N-D scattered data to an M-D domain.
Parameters
----------
*args : arrays
x, y, z, ..., d, where x, y, z, ... are the coordinates of the nodes
and d is the array of values at the nodes
function : str or callable, optional
The radial basis function, based on the radius, r, given by the norm
(default is Euclidean distance); the default is 'multiquadric'::
'multiquadric': sqrt((r/self.epsilon)**2 + 1)
'inverse': 1.0/sqrt((r/self.epsilon)**2 + 1)
'gaussian': exp(-(r/self.epsilon)**2)
'linear': r
'cubic': r**3
'quintic': r**5
'thin_plate': r**2 * log(r)
If callable, then it must take 2 arguments (self, r). The epsilon
parameter will be available as self.epsilon. Other keyword
arguments passed in will be available as well.
epsilon : float, optional
Adjustable constant for gaussian or multiquadrics functions
- defaults to approximate average distance between nodes (which is
a good start).
smooth : float, optional
Values greater than zero increase the smoothness of the
approximation. 0 is for interpolation (default), the function will
always go through the nodal points in this case.
norm : str, callable, optional
A function that returns the 'distance' between two points, with
inputs as arrays of positions (x, y, z, ...), and an output as an
array of distance. E.g., the default: 'euclidean', such that the result
is a matrix of the distances from each point in ``x1`` to each point in
``x2``. For more options, see documentation of
`scipy.spatial.distances.cdist`.
mode : str, optional
Mode of the interpolation, can be '1-D' (default) or 'N-D'. When it is
'1-D' the data `d` will be considered as 1-D and flattened
internally. When it is 'N-D' the data `d` is assumed to be an array of
shape (n_samples, m), where m is the dimension of the target domain.
Attributes
----------
N : int
The number of data points (as determined by the input arrays).
di : ndarray
The 1-D array of data values at each of the data coordinates `xi`.
xi : ndarray
The 2-D array of data coordinates.
function : str or callable
The radial basis function. See description under Parameters.
epsilon : float
Parameter used by gaussian or multiquadrics functions. See Parameters.
smooth : float
Smoothing parameter. See description under Parameters.
norm : str or callable
The distance function. See description under Parameters.
mode : str
Mode of the interpolation. See description under Parameters.
nodes : ndarray
A 1-D array of node values for the interpolation.
A : internal property, do not use
Examples
--------
>>> from scipy.interpolate import Rbf
>>> x, y, z, d = np.random.rand(4, 50)
>>> rbfi = Rbf(x, y, z, d) # radial basis function interpolator instance
>>> xi = yi = zi = np.linspace(0, 1, 20)
>>> di = rbfi(xi, yi, zi) # interpolated values
>>> di.shape
(20,)
"""
# Available radial basis functions that can be selected as strings;
# they all start with _h_ (self._init_function relies on that)
def _h_multiquadric(self, r):
return np.sqrt((1.0/self.epsilon*r)**2 + 1)
def _h_inverse_multiquadric(self, r):
return 1.0/np.sqrt((1.0/self.epsilon*r)**2 + 1)
def _h_gaussian(self, r):
return np.exp(-(1.0/self.epsilon*r)**2)
def _h_linear(self, r):
return r
def _h_cubic(self, r):
return r**3
def _h_quintic(self, r):
return r**5
def _h_thin_plate(self, r):
return xlogy(r**2, r)
# Setup self._function and do smoke test on initial r
def _init_function(self, r):
if isinstance(self.function, str):
self.function = self.function.lower()
_mapped = {'inverse': 'inverse_multiquadric',
'inverse multiquadric': 'inverse_multiquadric',
'thin-plate': 'thin_plate'}
if self.function in _mapped:
self.function = _mapped[self.function]
func_name = "_h_" + self.function
if hasattr(self, func_name):
self._function = getattr(self, func_name)
else:
functionlist = [x[3:] for x in dir(self)
if x.startswith('_h_')]
raise ValueError("function must be a callable or one of " +
", ".join(functionlist))
self._function = getattr(self, "_h_"+self.function)
elif callable(self.function):
allow_one = False
if hasattr(self.function, 'func_code') or \
hasattr(self.function, '__code__'):
val = self.function
allow_one = True
elif hasattr(self.function, "im_func"):
val = get_method_function(self.function)
elif hasattr(self.function, "__call__"):
val = get_method_function(self.function.__call__)
else:
raise ValueError("Cannot determine number of arguments to "
"function")
argcount = get_function_code(val).co_argcount
if allow_one and argcount == 1:
self._function = self.function
elif argcount == 2:
if sys.version_info[0] >= 3:
self._function = self.function.__get__(self, Rbf)
else:
import new
self._function = new.instancemethod(self.function, self,
Rbf)
else:
raise ValueError("Function argument must take 1 or 2 "
"arguments.")
a0 = self._function(r)
if a0.shape != r.shape:
raise ValueError("Callable must take array and return array of "
"the same shape")
return a0
def __init__(self, *args, **kwargs):
# `args` can be a variable number of arrays; we flatten them and store
# them as a single 2-D array `xi` of shape (n_args-1, array_size),
# plus a 1-D array `di` for the values.
# All arrays must have the same number of elements
self.xi = np.asarray([np.asarray(a, dtype=np.float_).flatten()
for a in args[:-1]])
self.N = self.xi.shape[-1]
self.mode = kwargs.pop('mode', '1-D')
if self.mode == '1-D':
self.di = np.asarray(args[-1]).flatten()
self._target_dim = 1
elif self.mode == 'N-D':
self.di = np.asarray(args[-1])
self._target_dim = self.di.shape[-1]
else:
raise ValueError("Mode has to be 1-D or N-D.")
if not all([x.size == self.di.shape[0] for x in self.xi]):
raise ValueError("All arrays must be equal length.")
self.norm = kwargs.pop('norm', 'euclidean')
self.epsilon = kwargs.pop('epsilon', None)
if self.epsilon is None:
# default epsilon is the "the average distance between nodes" based
# on a bounding hypercube
ximax = np.amax(self.xi, axis=1)
ximin = np.amin(self.xi, axis=1)
edges = ximax - ximin
edges = edges[np.nonzero(edges)]
self.epsilon = np.power(np.prod(edges)/self.N, 1.0/edges.size)
self.smooth = kwargs.pop('smooth', 0.0)
self.function = kwargs.pop('function', 'multiquadric')
# attach anything left in kwargs to self for use by any user-callable
# function or to save on the object returned.
for item, value in kwargs.items():
setattr(self, item, value)
# Compute weights
if self._target_dim > 1: # If we have more than one target dimension,
# we first factorize the matrix
self.nodes = np.zeros((self.N, self._target_dim), dtype=self.di.dtype)
lu, piv = linalg.lu_factor(self.A)
for i in range(self._target_dim):
self.nodes[:, i] = linalg.lu_solve((lu, piv), self.di[:, i])
else:
self.nodes = linalg.solve(self.A, self.di)
@property
def A(self):
# this only exists for backwards compatibility: self.A was available
# and, at least technically, public.
r = squareform(pdist(self.xi.T, self.norm)) # Pairwise norm
return self._init_function(r) - np.eye(self.N)*self.smooth
def _call_norm(self, x1, x2):
return cdist(x1.T, x2.T, self.norm)
def __call__(self, *args):
args = [np.asarray(x) for x in args]
if not all([x.shape == y.shape for x in args for y in args]):
raise ValueError("Array lengths must be equal")
if self._target_dim > 1:
shp = args[0].shape + (self._target_dim,)
else:
shp = args[0].shape
xa = np.asarray([a.flatten() for a in args], dtype=np.float_)
r = self._call_norm(xa, self.xi)
return np.dot(self._function(r), self.nodes).reshape(shp)
|
jamestwebber/scipy
|
scipy/interpolate/rbf.py
|
Python
|
bsd-3-clause
| 11,970
|
[
"Gaussian"
] |
f1c4e0d52fb2b9c59cd778d183ea37b89c05ccaa7dbb3efd4a84883a16d41f4a
|
"""Simple VTK scene.
This script displays a simple 3D scene containing a single actor (a
cube) that the user can interact with via the mouse and keyboard. The
purpose of the script is to illustrate the VTK visualization pipeline
and how to use VTK in Python.
You can run the script from the command line by typing
python isosurface.py
"""
import vtk
# Generate polygon data for a cube
cube = vtk.vtkCubeSource()
# Create a mapper and an actor for the cube data
cube_mapper = vtk.vtkPolyDataMapper()
cube_mapper.SetInput(cube.GetOutput())
cube_actor = vtk.vtkActor()
cube_actor.SetMapper(cube_mapper)
cube_actor.GetProperty().SetColor(1.0, 0.0, 0.0) # make the cube red
# Create a renderer and add the cube actor to it
renderer = vtk.vtkRenderer()
renderer.SetBackground(0.0, 0.0, 0.0) # make the background black
renderer.AddActor(cube_actor)
# Create a render window
render_window = vtk.vtkRenderWindow()
render_window.SetWindowName("Simple VTK scene")
render_window.SetSize(400, 400)
render_window.AddRenderer(renderer)
# Create an interactor
interactor = vtk.vtkRenderWindowInteractor()
interactor.SetRenderWindow(render_window)
# Initialize the interactor and start the rendering loop
interactor.Initialize()
render_window.Render()
interactor.Start()
|
RDeckers/ScientificVisualization-1TD389
|
Assignments/getting_started/cube.py
|
Python
|
gpl-3.0
| 1,267
|
[
"VTK"
] |
3738b3895350d3d146bc7c3c3a1d82371994cf8f162d3da902c0b1d618045973
|
#!/bin/env python
# Automatically translated python version of
# OpenSceneGraph example program "osgpointsprite"
# !!! This program will need manual tuning before it will work. !!!
import sys
from osgpypp import osg
from osgpypp import osgDB
from osgpypp import osgViewer
# Translated from file 'osgpointsprite.cpp'
# OpenSceneGraph example, osgpointsprite.
#*
#* Permission is hereby granted, free of charge, to any person obtaining a copy
#* of this software and associated documentation files (the "Software"), to deal
#* in the Software without restriction, including without limitation the rights
#* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#* copies of the Software, and to permit persons to whom the Software is
#* furnished to do so, subject to the following conditions:
#*
#* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#* THE SOFTWARE.
#
#include <osg/PointSprite>
#include <osg/BlendFunc>
#include <osg/StateAttribute>
#include <osg/Point>
#include <osg/Geometry>
#include <osg/Texture2D>
#include <osg/TexEnv>
#include <osg/GLExtensions>
#include <osg/TexEnv>
#include <osgDB/ReadFile>
#include <osgViewer/Viewer>
osg.Geode *makeGalaxy(unsigned nvertices)
geode = osg.Geode()
galaxy = osg.Geometry()
vertices = osg.Vec3Array()
colors = osg.Vec4Array()
ini = osg.Vec4(1,1,0,1)
fin = osg.Vec4(0,0,1,1)
#* Formula for the two spirals
for (unsigned i=0i<nvertices/2i++)
val = (i*2/(float)nvertices * 2 * 3.14159265359)
modx1 = rand() / (float)RAND_MAX*2
mody1 = rand() / (float)RAND_MAX*2
modx2 = rand() / (float)RAND_MAX*2
mody2 = rand() / (float)RAND_MAX*2
modz1 = ((rand()-RAND_MAX/2) / (float)(RAND_MAX))*3/(val+1)
modz2 = ((rand()-RAND_MAX/2) / (float)(RAND_MAX))*3/(val+1)
vertices.push_back(osg.Vec3(cos(val)*val+modx1, sin(val)*val+mody1, modz1))
vertices.push_back(osg.Vec3(-cos(val)*val+modx2, -sin(val)*val+mody2, modz2))
colors.push_back(ini+(fin-ini)*(i*2/(float)nvertices))
colors.push_back(ini+(fin-ini)*(i*2/(float)nvertices))
galaxy.setVertexArray(vertices)
galaxy.setColorArray(colors, osg.Array.BIND_PER_VERTEX)
galaxy.addPrimitiveSet(osg.DrawArrays(osg.PrimitiveSet.POINTS, 0, nvertices))
geode.addDrawable(galaxy)
return geode
def makeStateSet(size):
set = osg.StateSet()
#/ Setup cool blending
set.setMode(GL_BLEND, osg.StateAttribute.ON)
fn = osg.BlendFunc()
fn.setFunction(osg.BlendFunc.SRC_ALPHA, osg.BlendFunc.DST_ALPHA)
set.setAttributeAndModes(fn, osg.StateAttribute.ON)
#/ Setup the point sprites
sprite = osg.PointSprite()
set.setTextureAttributeAndModes(0, sprite, osg.StateAttribute.ON)
#/ Give some size to the points to be able to see the sprite
point = osg.Point()
point.setSize(size)
set.setAttribute(point)
#/ Disable depth test to avoid sort problems and Lighting
set.setMode(GL_DEPTH_TEST, osg.StateAttribute.OFF)
set.setMode(GL_LIGHTING, osg.StateAttribute.OFF)
#/ The texture for the sprites
tex = osg.Texture2D()
tex.setImage(osgDB.readImageFile("Images/particle.rgb"))
set.setTextureAttributeAndModes(0, tex, osg.StateAttribute.ON)
return set
int main(int, char *[])
viewer = osgViewer.Viewer()
#/ Make the galaxy of points
node = makeGalaxy(5000)
node.setStateSet(makeStateSet(10.0))
viewer.setSceneData(node)
return viewer.run()
if __name__ == "__main__":
main(sys.argv)
|
JaneliaSciComp/osgpyplusplus
|
examples/rough_translated1/osgpointsprite.py
|
Python
|
bsd-3-clause
| 3,976
|
[
"Galaxy"
] |
2e9a6ef25545951a7d610c0557dfe0202d67cac52d49b53af096bedb46549d66
|
from __future__ import print_function
import sys
import numpy as np
from ase import Atoms
from ase.io import write, read
from ase.calculators.singlepoint import SinglePointCalculator
a = 5.0
d = 1.9
c = a / 2
atoms = Atoms('AuH',
positions=[(c, c, 0), (c, c, d)],
cell=(a, a, 2 * d),
pbc=(0, 0, 1))
extra = np.array([2.3, 4.2])
atoms.set_array('extra', extra)
atoms *= (1, 1, 2)
images = [atoms.copy(), atoms.copy()]
r = ['xyz', 'traj', 'cube', 'pdb', 'cfg', 'struct', 'cif', 'gen', 'extxyz']
# attach some results to the Atoms. These are serialised by the extxyz writer.
spc = SinglePointCalculator(atoms,
energy=-1.0,
stress=[1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
forces=-1.0*atoms.get_positions())
atoms.set_calculator(spc)
try:
import json
except ImportError:
pass
else:
r += ['json', 'db']
try:
import Scientific
version = Scientific.__version__.split('.')
print('Found ScientificPython version: ', Scientific.__version__)
if list(map(int, version)) < [2, 8]:
print('ScientificPython 2.8 or greater required for numpy support')
raise ImportError
except ImportError:
print('No Scientific python found. Check your PYTHONPATH')
else:
r += ['etsf']
w = r + ['xsf', 'findsym']
try:
import matplotlib
except ImportError:
pass
else:
w += ['png', 'eps']
if sys.version_info[0] == 3:
r.remove('cif')
only_one_image = ['cube', 'png', 'eps', 'cfg', 'struct', 'etsf', 'gen',
'json', 'db']
for format in w:
print(format, 'O', end=' ')
fname1 = 'io-test.1.' + format
fname2 = 'io-test.2.' + format
write(fname1, atoms, format=format)
if format not in only_one_image:
write(fname2, images, format=format)
if format in r:
print('I')
a1 = read(fname1)
assert np.all(np.abs(a1.get_positions() -
atoms.get_positions()) < 1e-6)
if format in ['traj', 'cube', 'cfg', 'struct', 'gen', 'extxyz']:
assert np.all(np.abs(a1.get_cell() - atoms.get_cell()) < 1e-6)
if format in ['cfg', 'extxyz']:
assert np.all(np.abs(a1.get_array('extra') -
atoms.get_array('extra')) < 1e-6)
if format in ['extxyz']:
assert np.all(a1.get_pbc() == atoms.get_pbc())
assert np.all(a1.get_potential_energy() == atoms.get_potential_energy())
assert np.all(a1.get_stress() == atoms.get_stress())
assert np.all(abs(a1.get_forces() - atoms.get_forces()) < 1e-6)
if format not in only_one_image:
a2 = read(fname2)
a3 = read(fname2, index=0)
a4 = read(fname2, index=slice(None))
if format in ['cif'] and sys.platform in ['win32']:
# Fails on Windows:
# https://trac.fysik.dtu.dk/projects/ase/ticket/62
pass
else:
assert len(a4) == 2
else:
print()
|
suttond/MODOI
|
ase/test/fio/oi.py
|
Python
|
lgpl-3.0
| 3,087
|
[
"ASE"
] |
f2fd3f74cff265de65731cc374e4a6647ee2a7ddf86c138ef281bf53b1c1657e
|
import logging
import os
from django.views.generic import View
from django.http import HttpResponse
from django.conf import settings
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie
class FrontendAppView(View):
"""
Serves the compiled frontend entry point (only works if you have run `yarn
run build`).
"""
@method_decorator(ensure_csrf_cookie)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def get(self, request):
try:
with open(os.path.join(str(settings.REACT_APP_DIR), 'build', 'index.html')) as f:
return HttpResponse(f.read())
except FileNotFoundError:
logging.exception('Production build of app not found')
return HttpResponse(
"""
This URL is only used when you have built the production
version of the app. Visit http://localhost:3000/ instead, or
run `yarn run build` to test the production version.
""",
status=501,
)
|
Hawk94/dust
|
backend/views.py
|
Python
|
mit
| 1,140
|
[
"VisIt"
] |
0b52732016196c5c75b116f651d4ac9807a644a60e3fd2e008d8fab02ebbc355
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class datalabelingCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'create_annotation_spec_set': ('parent', 'annotation_spec_set', ),
'create_dataset': ('parent', 'dataset', ),
'create_evaluation_job': ('parent', 'job', ),
'create_instruction': ('parent', 'instruction', ),
'delete_annotated_dataset': ('name', ),
'delete_annotation_spec_set': ('name', ),
'delete_dataset': ('name', ),
'delete_evaluation_job': ('name', ),
'delete_instruction': ('name', ),
'export_data': ('name', 'annotated_dataset', 'output_config', 'filter', 'user_email_address', ),
'get_annotated_dataset': ('name', ),
'get_annotation_spec_set': ('name', ),
'get_data_item': ('name', ),
'get_dataset': ('name', ),
'get_evaluation': ('name', ),
'get_evaluation_job': ('name', ),
'get_example': ('name', 'filter', ),
'get_instruction': ('name', ),
'import_data': ('name', 'input_config', 'user_email_address', ),
'label_image': ('parent', 'basic_config', 'feature', 'image_classification_config', 'bounding_poly_config', 'polyline_config', 'segmentation_config', ),
'label_text': ('parent', 'basic_config', 'feature', 'text_classification_config', 'text_entity_extraction_config', ),
'label_video': ('parent', 'basic_config', 'feature', 'video_classification_config', 'object_detection_config', 'object_tracking_config', 'event_config', ),
'list_annotated_datasets': ('parent', 'filter', 'page_size', 'page_token', ),
'list_annotation_spec_sets': ('parent', 'filter', 'page_size', 'page_token', ),
'list_data_items': ('parent', 'filter', 'page_size', 'page_token', ),
'list_datasets': ('parent', 'filter', 'page_size', 'page_token', ),
'list_evaluation_jobs': ('parent', 'filter', 'page_size', 'page_token', ),
'list_examples': ('parent', 'filter', 'page_size', 'page_token', ),
'list_instructions': ('parent', 'filter', 'page_size', 'page_token', ),
'pause_evaluation_job': ('name', ),
'resume_evaluation_job': ('name', ),
'search_evaluations': ('parent', 'filter', 'page_size', 'page_token', ),
'search_example_comparisons': ('parent', 'page_size', 'page_token', ),
'update_evaluation_job': ('evaluation_job', 'update_mask', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: a.keyword.value not in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=datalabelingCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the datalabeling client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
|
googleapis/python-datalabeling
|
scripts/fixup_datalabeling_v1beta1_keywords.py
|
Python
|
apache-2.0
| 8,221
|
[
"VisIt"
] |
6f1ed4daaacf9626f2685b017f174f0aba766601cacc473bc01e353459ccd356
|
__author__ = 'Thomas Rueckstiess, ruecksti@in.tum.de'
from scipy import random
from scipy.ndimage import minimum_position
from scipy import mgrid, zeros, tile, array, floor, sum
from pybrain.structure.modules.module import Module
class KohonenMap(Module):
""" Implements a Self-Organizing Map (SOM), also known as a Kohonen Map.
Clusters the inputs in unsupervised fashion while conserving their
neighbourhood relationship on a 2-dimensional grid. There are two
versions: With the outputFullMap option set to True, it outputs
the full Kohonen map to the next layer, set to False it will only
return 2 values: the x and y coordinate of the winner neuron. """
def __init__(self, dim, nNeurons, name=None, outputFullMap=False):
if outputFullMap:
outdim = nNeurons ** 2
else:
outdim = 2
Module.__init__(self, dim, outdim, name)
# switch modes
self.outputFullMap = outputFullMap
# create neurons
self.neurons = random.random((nNeurons, nNeurons, dim))
self.difference = zeros(self.neurons.shape)
self.winner = zeros(2)
self.nInput = dim
self.nNeurons = nNeurons
self.neighbours = nNeurons
self.learningrate = 0.01
self.neighbourdecay = 0.9999
# distance matrix
distx, disty = mgrid[0:self.nNeurons, 0:self.nNeurons]
self.distmatrix = zeros((self.nNeurons, self.nNeurons, 2))
self.distmatrix[:, :, 0] = distx
self.distmatrix[:, :, 1] = disty
def _forwardImplementation(self, inbuf, outbuf):
""" assigns one of the neurons to the input given in inbuf and writes
the neuron's coordinates to outbuf. """
# calculate the winner neuron with lowest error (square difference)
self.difference = self.neurons - tile(inbuf, (self.nNeurons, self.nNeurons, 1))
error = sum(self.difference ** 2, 2)
self.winner = array(minimum_position(error))
if not self.outputFullMap:
outbuf[:] = self.winner
def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
""" trains the kohonen map in unsupervised manner, moving the
closest neuron and its neighbours closer to the input pattern. """
# calculate neighbourhood and limit to edge of matrix
n = floor(self.neighbours)
self.neighbours *= self.neighbourdecay
tl = (self.winner - n)
br = (self.winner + n + 1)
tl[tl < 0] = 0
br[br > self.nNeurons + 1] = self.nNeurons + 1
# calculate distance matrix
tempm = 1 - sum(abs(self.distmatrix - self.winner.reshape(1, 1, 2)), 2) / self.nNeurons
tempm[tempm < 0] = 0
distm = zeros((self.nNeurons, self.nNeurons, self.nInput))
for i in range(self.nInput):
distm[:, :, i] = tempm
distm[:, :, i] = tempm
self.neurons[tl[0]:br[0], tl[1]:br[1]] -= self.learningrate * self.difference[tl[0]:br[0], tl[1]:br[1]] * distm[tl[0]:br[0], tl[1]:br[1]]
|
pybrain/pybrain
|
pybrain/structure/modules/kohonen.py
|
Python
|
bsd-3-clause
| 3,079
|
[
"NEURON"
] |
7261efcc126518bff5a96d7235783f96ed0390136078a2e24fc4b08760646d2c
|
import numpy as np
import os
try:
import netCDF4 as netCDF
except:
import netCDF3 as netCDF
import matplotlib.pyplot as plt
import time
from datetime import datetime
from matplotlib.dates import date2num, num2date
import pyroms
import pyroms_toolbox
import _remapping
class nctime(object):
pass
def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='./'):
print src_file
# get time
nctime.long_name = 'time'
nctime.units = 'days since 1900-01-01 00:00:00'
# create boundary file
dst_file = src_file.rsplit('/')[-1]
dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_bdry_' + dst_grd.name + '.nc'
print '\nCreating boundary file', dst_file
if os.path.exists(dst_file) is True:
os.remove(dst_file)
pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime)
# open boundary file
nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT')
#load var
cdf = netCDF.Dataset(src_file)
src_var = cdf.variables[src_varname]
time = cdf.variables['ocean_time'][0]
print time
#get missing value
spval = src_var._FillValue
src_var = cdf.variables[src_varname][0]
# determine variable dimension
ndim = len(src_var.shape)
if src_varname == 'ssh':
pos = 't'
Cpos = 'rho'
z = src_grd.z_t
Mp, Lp = dst_grd.hgrid.mask_rho.shape
wts_file = 'remap_weights_GLBa0.08_to_ARCTIC2_bilinear_t_to_rho.nc'
dst_varname = 'zeta'
dimensions = ('ocean_time', 'eta_rho', 'xi_rho')
long_name = 'free-surface'
dst_varname_north = 'zeta_north'
dimensions_north = ('ocean_time', 'xi_rho')
long_name_north = 'free-surface north boundary condition'
field_north = 'zeta_north, scalar, series'
dst_varname_south = 'zeta_south'
dimensions_south = ('ocean_time', 'xi_rho')
long_name_south = 'free-surface south boundary condition'
field_south = 'zeta_south, scalar, series'
dst_varname_east = 'zeta_east'
dimensions_east = ('ocean_time', 'eta_rho')
long_name_east = 'free-surface east boundary condition'
field_east = 'zeta_east, scalar, series'
dst_varname_west = 'zeta_west'
dimensions_west = ('ocean_time', 'eta_rho')
long_name_west = 'free-surface west boundary condition'
field_west = 'zeta_west, scalar, series'
units = 'meter'
elif src_varname == 'temp':
pos = 't'
Cpos = 'rho'
z = src_grd.z_t
Mp, Lp = dst_grd.hgrid.mask_rho.shape
wts_file = 'remap_weights_GLBa0.08_to_ARCTIC2_bilinear_t_to_rho.nc'
dst_varname = 'temperature'
dst_varname_north = 'temp_north'
dimensions_north = ('ocean_time', 's_rho', 'xi_rho')
long_name_north = 'potential temperature north boundary condition'
field_north = 'temp_north, scalar, series'
dst_varname_south = 'temp_south'
dimensions_south = ('ocean_time', 's_rho', 'xi_rho')
long_name_south = 'potential temperature south boundary condition'
field_south = 'temp_south, scalar, series'
dst_varname_east = 'temp_east'
dimensions_east = ('ocean_time', 's_rho', 'eta_rho')
long_name_east = 'potential temperature east boundary condition'
field_east = 'temp_east, scalar, series'
dst_varname_west = 'temp_west'
dimensions_west = ('ocean_time', 's_rho', 'eta_rho')
long_name_west = 'potential temperature west boundary condition'
field_west = 'temp_west, scalar, series'
units = 'Celsius'
elif src_varname == 'salt':
pos = 't'
Cpos = 'rho'
z = src_grd.z_t
Mp, Lp = dst_grd.hgrid.mask_rho.shape
wts_file = 'remap_weights_GLBa0.08_to_ARCTIC2_bilinear_t_to_rho.nc'
dst_varname = 'salinity'
dst_varname_north = 'salt_north'
dimensions_north = ('ocean_time', 's_rho', 'xi_rho')
long_name_north = 'salinity north boundary condition'
field_north = 'salt_north, scalar, series'
dst_varname_south = 'salt_south'
dimensions_south = ('ocean_time', 's_rho', 'xi_rho')
long_name_south = 'salinity south boundary condition'
field_south = 'salt_south, scalar, series'
dst_varname_east = 'salt_east'
dimensions_east = ('ocean_time', 's_rho', 'eta_rho')
long_name_east = 'salinity east boundary condition'
field_east = 'salt_east, scalar, series'
dst_varname_west = 'salt_west'
dimensions_west = ('ocean_time', 's_rho', 'eta_rho')
long_name_west = 'salinity west boundary condition'
field_west = 'salt_west, scalar, series'
units = 'PSU'
else:
raise ValueError, 'Undefined src_varname'
if ndim == 3:
# build intermediate zgrid
zlevel = -z[::-1,0,0]
nzlevel = len(zlevel)
dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel)
dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord)
# create variable in boudary file
print 'Creating variable', dst_varname_north
nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval)
nc.variables[dst_varname_north].long_name = long_name_north
nc.variables[dst_varname_north].units = units
nc.variables[dst_varname_north].field = field_north
print 'Creating variable', dst_varname_south
nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval)
nc.variables[dst_varname_south].long_name = long_name_south
nc.variables[dst_varname_south].units = units
nc.variables[dst_varname_south].field = field_south
print 'Creating variable', dst_varname_east
nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval)
nc.variables[dst_varname_east].long_name = long_name_east
nc.variables[dst_varname_east].units = units
nc.variables[dst_varname_east].field = field_east
print 'Creating variable', dst_varname_west
nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval)
nc.variables[dst_varname_west].long_name = long_name_west
nc.variables[dst_varname_west].units = units
nc.variables[dst_varname_west].field = field_west
# remapping
print 'remapping', dst_varname, 'from', src_grd.name, \
'to', dst_grd.name
print 'time =', time
if ndim == 3:
# flood the grid
print 'flood the grid'
src_varz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_var, src_grd, pos=pos, spval=spval, \
dxy=dxy, cdepth=cdepth, kk=kk)
else:
src_varz = src_var
# horizontal interpolation using scrip weights
print 'horizontal interpolation using scrip weights'
dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval)
if ndim == 3:
# vertical interpolation from standard z level to sigma
print 'vertical interpolation from standard z level to sigma'
dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, :], \
dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \
flood=False, irange=(0,Lp), jrange=(Mp-1,Mp))
dst_var_south = pyroms.remapping.z2roms(dst_varz[::-1, 0:1, :], \
dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \
flood=False, irange=(0,Lp), jrange=(0,1))
dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \
dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \
flood=False, irange=(Lp-1,Lp), jrange=(0,Mp))
dst_var_west = pyroms.remapping.z2roms(dst_varz[::-1, :, 0:1], \
dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \
flood=False, irange=(0,1), jrange=(0,Mp))
else:
dst_var_north = dst_varz[-1, :]
dst_var_south = dst_varz[0, :]
dst_var_east = dst_varz[:, -1]
dst_var_west = dst_varz[:, 0]
# write data in destination file
print 'write data in destination file'
nc.variables['ocean_time'][0] = time
nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north)
nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south)
nc.variables[dst_varname_east][0] = np.squeeze(dst_var_east)
nc.variables[dst_varname_west][0] = np.squeeze(dst_var_west)
# close file
nc.close()
cdf.close()
if src_varname == 'ssh':
return dst_varz
|
kshedstrom/pyroms
|
examples/Arctic_HYCOM/remap_bdry.py
|
Python
|
bsd-3-clause
| 8,604
|
[
"NetCDF"
] |
1a827caea52f98ba30fcacd1ccb08f73f3503e2cb9dcdd91fb1941e56c9e430d
|
#!/usr/bin/env python
#
# Copyright (c) 2010-2011 Appcelerator, Inc. All Rights Reserved.
# Licensed under the Apache Public License (version 2)
#
# parse out Titanium API documentation templates into a
# format that can be used by other documentation generators
# such as PDF, etc.
import os, sys, traceback
import re, optparse
import generators
from common import lazyproperty, dict_has_non_empty_member, not_real_titanium_types
try:
import yaml
except:
print >> sys.stderr, "You don't have pyyaml!\n"
print >> sys.stderr, "You can install it with:\n"
print >> sys.stderr, "> sudo easy_install pyyaml\n"
print >> sys.stderr, ""
sys.exit(1)
this_dir = os.path.dirname(os.path.abspath(__file__))
# We package mako already in support/android/mako.
android_support_dir = os.path.abspath(os.path.join(this_dir, "..", "support", "android"))
sys.path.append(android_support_dir)
from mako.template import Template
# TiLogger is also in support/android
from tilogger import *
log = None
# We package the python markdown module already in /support/module/support/markdown.
module_support_dir = os.path.abspath(os.path.join(this_dir, "..", "support", "module", "support"))
sys.path.append(module_support_dir)
import markdown
DEFAULT_PLATFORMS = ["android", "iphone", "ipad"]
DEFAULT_SINCE = "0.8"
apis = {} # raw conversion from yaml
annotated_apis = {} # made friendlier for templates, etc.
current_api = None
ignore_dirs = (".git", ".svn", "CVS")
ignore_files = ("template.yml",)
def has_ancestor(one_type, ancestor_name):
if one_type["name"] == ancestor_name:
return True
if "extends" in one_type and one_type["extends"] == ancestor_name:
return True
elif "extends" not in one_type:
return False
else:
parent_type_name = one_type["extends"]
if (parent_type_name is None or not isinstance(parent_type_name, basestring) or
parent_type_name.lower() == "object"):
return False
if not parent_type_name in apis:
log.warn("%s extends %s but %s type information not found" % (one_type["name"],
parent_type_name, parent_type_name))
return False
return has_ancestor(apis[parent_type_name], ancestor_name)
def is_titanium_module(one_type):
return has_ancestor(one_type, "Titanium.Module")
def is_titanium_proxy(one_type):
# When you use this, don't forget that modules are also proxies
return has_ancestor(one_type, "Titanium.Proxy")
# iphone -> iPhone, etc.
def pretty_platform_name(name):
if name.lower() == "iphone":
return "iPhone"
if name.lower() == "ipad":
return "iPad"
if name.lower() == "blackberry":
return "Blackberry"
if name.lower() == "android":
return "Android"
def combine_platforms_and_since(annotated_obj):
obj = annotated_obj.api_obj
result = []
platforms = None
since = DEFAULT_SINCE
if dict_has_non_empty_member(obj, "platforms"):
platforms = obj["platforms"]
# Method/property/event can't have more platforms than the types they belong to.
if (platforms is None or
isinstance(annotated_obj, AnnotatedMethod) or isinstance(annotated_obj, AnnotatedProperty) or
isinstance(annotated_obj, AnnotatedEvent)):
if annotated_obj.parent is not None:
if dict_has_non_empty_member(annotated_obj.parent.api_obj, "platforms"):
if platforms is None or len(annotated_obj.parent.api_obj["platforms"]) < len(platforms):
platforms = annotated_obj.parent.api_obj["platforms"]
# Last resort is the default list of platforms
if platforms is None:
platforms = DEFAULT_PLATFORMS
if "since" in obj and len(obj["since"]) > 0:
since = obj["since"]
else:
# If a method/event/property we can check type's "since"
if (isinstance(annotated_obj, AnnotatedMethod) or isinstance(annotated_obj, AnnotatedProperty) or
isinstance(annotated_obj, AnnotatedEvent)):
if (annotated_obj.parent is not None and
dict_has_non_empty_member(annotated_obj.parent.api_obj, "since")):
since = annotated_obj.parent.api_obj["since"]
since_is_dict = isinstance(since, dict)
for name in platforms:
one_platform = {"name": name, "pretty_name": pretty_platform_name(name)}
if not since_is_dict:
one_platform["since"] = since
else:
if name in since:
one_platform["since"] = since[name]
else:
one_platform["since"] = DEFAULT_SINCE
result.append(one_platform)
return result
def load_one_yaml(filepath):
f = None
try:
f = open(filepath, "r")
types = [the_type for the_type in yaml.load_all(f)]
return types
except KeyboardInterrupt:
raise
except:
e = traceback.format_exc()
log.error("Exception occured while processing %s:" % filepath)
for line in e.splitlines():
log.error(line)
return None
finally:
if f is not None:
try:
f.close()
except:
pass
def generate_output(options):
for output_type in options.formats.split(","):
try:
__import__("generators.%s_generator" % output_type)
except:
log.error("Output format %s is not recognized" % output_type)
sys.exit(1)
if annotated_apis is None or len(annotated_apis) == 0:
annotate_apis()
generator = getattr(generators, "%s_generator" % output_type)
generator.generate(apis, annotated_apis, options)
def process_yaml():
global apis
log.info("Parsing YAML files")
for root, dirs, files in os.walk(this_dir):
for name in ignore_dirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directoriess
for filename in files:
if os.path.splitext(filename)[-1] != ".yml" or filename in ignore_files:
continue
filepath = os.path.join(root, filename)
log.trace("Processing: %s" % filepath)
types = None
types = load_one_yaml(filepath)
if types is None:
log.trace("%s skipped" % filepath)
else:
for one_type in types:
if one_type["name"] in apis:
log.warn("%s has a duplicate" % one_type["name"])
apis[one_type["name"]] = one_type
def annotate_apis():
global apis, annotated_apis
log.trace("Annotating api objects")
for name in apis:
log.trace("annotating %s" % name)
one_api = apis[name]
one_annotated_api = None
if is_titanium_module(one_api):
annotated_apis[name] = AnnotatedModule(one_api)
elif is_titanium_proxy(one_api):
annotated_apis[name] = AnnotatedProxy(one_api)
else:
if one_api["name"].startswith("Ti") and one_api["name"] != "Titanium.Event":
# Titanium.Event is an exception because it doesn't extend anything and doesn't need
# to be annotated as a Titanium type.
log.warn("%s not being annotated as a Titanium type. Is its 'extends' property not set correctly?" % one_api["name"])
else:
# Types that are not true Titanium proxies and modules (like pseudo-types)
# are treated as proxies for documentation generation purposes so that
# their methods, properties, etc., can be documented.
annotated_apis[name] = AnnotatedProxy(one_api)
# Give each annotated api a direct link to its annotated parent
for name in annotated_apis:
if "." not in name:
continue # e.g., "Titanium" has no parent
else:
parent_name = ".".join(name.split(".")[:-1])
if parent_name not in annotated_apis:
log.warn("%s's parent, %s, cannot be located" % (name, parent_name))
else:
annotated_apis[name].parent = annotated_apis[parent_name]
# Takes a documented api (module, proxy, method, property, event, etc.)
# originally from YAML and provides convenience properties and methods to
# assist with outputting to templates or other formats.
class AnnotatedApi(object):
def __init__(self, api_obj):
self.api_obj = api_obj
self.name = api_obj["name"]
self.parent = None
self.typestr = "object"
self.yaml_source_folder = ""
self.inherited_from = ""
if "deprecated" in api_obj:
self.deprecated = api_obj["deprecated"]
else:
self.deprecated = None
@lazyproperty
def platforms(self):
return combine_platforms_and_since(self)
class AnnotatedProxy(AnnotatedApi):
def __init__(self, api_obj):
AnnotatedApi.__init__(self, api_obj)
self.typestr = "proxy"
def build_method_list(self):
methods = []
if dict_has_non_empty_member(self.api_obj, "methods"):
methods = [AnnotatedMethod(m, self) for m in self.api_obj["methods"]]
self.append_inherited_methods(methods)
return sorted(methods, key=lambda item: item.name)
@lazyproperty
def methods(self):
return self.build_method_list();
@lazyproperty
def properties(self):
properties = []
if dict_has_non_empty_member(self.api_obj, "properties"):
properties = [AnnotatedProperty(p, self) for p in self.api_obj["properties"]]
self.append_inherited_properties(properties)
return sorted(properties, key=lambda item: item.name)
@lazyproperty
def events(self):
events = []
if dict_has_non_empty_member(self.api_obj, "events"):
events = [AnnotatedEvent(e, self) for e in self.api_obj["events"]]
self.append_inherited_events(events)
return sorted(events, key=lambda item: item.name)
def append_inherited_attributes(self, att_list, att_list_name):
if not "extends" in self.api_obj:
return
super_type_name = self.api_obj["extends"]
class_type = {"properties": AnnotatedProperty, "methods": AnnotatedMethod,
"events": AnnotatedEvent}[att_list_name]
existing_names = [item.name for item in att_list]
while (super_type_name is not None and len(super_type_name) > 0
and super_type_name in apis):
super_type = apis[super_type_name]
if dict_has_non_empty_member(super_type, att_list_name):
for new_item in super_type[att_list_name]:
if new_item["name"] in existing_names:
continue
new_instance = class_type(new_item, self)
new_instance.inherited_from = super_type_name
att_list.append(new_instance)
existing_names.append(new_item["name"])
# Keep going up supertypes
if "extends" in super_type:
super_type_name = super_type["extends"]
else:
super_type_name = None
def append_inherited_methods(self, methods):
self.append_inherited_attributes(methods, "methods")
def append_inherited_properties(self, properties):
self.append_inherited_attributes(properties, "properties")
def append_inherited_events(self, events):
self.append_inherited_attributes(events, "events")
class AnnotatedModule(AnnotatedProxy):
__create_proxy_template = None
@classmethod
def render_create_proxy_method(cls, method_template_obj):
if cls.__create_proxy_template is None:
template_text = open(os.path.join(this_dir, "templates", "create_proxy_method.yml.mako"), "r").read()
cls.__create_proxy_template = Template(template_text)
rendered = cls.__create_proxy_template.render(data=method_template_obj)
return rendered
def __init__(self, api_obj):
AnnotatedProxy.__init__(self, api_obj)
self.typestr = "module"
self.yaml_source_folder = os.path.join(this_dir, self.name.replace(".", os.sep))
def append_creation_methods(self, methods):
proxies = self.member_proxies
if proxies is None or len(proxies) == 0:
return
existing_names = [m.name for m in methods]
for proxy in proxies:
if proxy.name in not_real_titanium_types:
continue
if "createable" in proxy.api_obj and not proxy.api_obj["createable"]:
continue
method_name = "create%s" % proxy.name.split(".")[-1]
if method_name in existing_names:
continue
method_template_obj = {"proxy_name": proxy.name}
if "platforms" in proxy.api_obj:
method_template_obj["platforms"] = yaml.dump(proxy.api_obj["platforms"])
if "since" in proxy.api_obj:
method_template_obj["since"] = yaml.dump(proxy.api_obj["since"])
generated_method = yaml.load(AnnotatedModule.render_create_proxy_method(method_template_obj))
methods.append(AnnotatedMethod(generated_method, self))
@lazyproperty
def member_proxies(self):
global annotated_apis
proxies = []
for one_annotated_type in annotated_apis.values():
if one_annotated_type.parent is self and one_annotated_type.typestr == "proxy":
one_annotated_type.yaml_source_folder = self.yaml_source_folder
proxies.append(one_annotated_type)
return sorted(proxies, key=lambda item: item.name)
@lazyproperty
def methods(self):
methods = self.build_method_list()
self.append_creation_methods(methods)
return sorted(methods, key=lambda item: item.name)
class AnnotatedMethod(AnnotatedApi):
def __init__(self, api_obj, annotated_parent):
AnnotatedApi.__init__(self, api_obj)
self.typestr = "method"
self.parent = annotated_parent
self.yaml_source_folder = self.parent.yaml_source_folder
@lazyproperty
def parameters(self):
parameters = []
if dict_has_non_empty_member(self.api_obj, "parameters"):
parameters = [AnnotatedMethodParameter(p, self) for p in self.api_obj["parameters"]]
return parameters
class AnnotatedMethodParameter(AnnotatedApi):
def __init__(self, api_obj, annotated_parent):
AnnotatedApi.__init__(self, api_obj)
self.parent = annotated_parent
self.typestr = "parameter"
self.yaml_source_folder = self.parent.yaml_source_folder
class AnnotatedProperty(AnnotatedApi):
def __init__(self, api_obj, annotated_parent):
AnnotatedApi.__init__(self, api_obj)
self.typestr = "property"
self.parent = annotated_parent
self.yaml_source_folder = self.parent.yaml_source_folder
class AnnotatedEvent(AnnotatedApi):
def __init__(self, api_obj, annotated_parent):
AnnotatedApi.__init__(self, api_obj)
self.typestr = "event"
self.parent = annotated_parent
self.yaml_source_folder = self.parent.yaml_source_folder
@lazyproperty
def properties(self):
properties = []
if dict_has_non_empty_member(self.api_obj, "properties"):
properties = [AnnotatedProperty(p, self) for p in self.api_obj["properties"]]
# Append properties from Titanium.Event.yml
existing_names = [p.name for p in properties]
event_super_type = apis.get("Titanium.Event")
if event_super_type is not None and dict_has_non_empty_member(event_super_type, "properties"):
for prop in event_super_type["properties"]:
if prop["name"] in existing_names:
continue
new_prop = AnnotatedProperty(prop, self)
new_prop.inherited_from = "Titanium.Event"
properties.append(new_prop)
return sorted(properties, key=lambda item: item.name)
def main():
global this_dir, log
titanium_dir = os.path.dirname(this_dir)
dist_apidoc_dir = os.path.join(titanium_dir, "dist", "apidoc")
sys.path.append(os.path.join(titanium_dir, "build"))
import titanium_version
parser = optparse.OptionParser()
parser.add_option("-f", "--formats",
dest="formats",
help='Comma-separated list of desired output formats. "html" is default.',
default="html")
parser.add_option("--css",
dest="css",
help="Path to a custom CSS stylesheet to use in each HTML page",
default=None)
parser.add_option("-o", "--output",
dest="output",
help="Output directory for generated documentation",
default=dist_apidoc_dir)
parser.add_option("-v", "--version",
dest="version",
help="Version of the API to generate documentation for",
default=titanium_version.version)
parser.add_option("--colorize",
dest="colorize",
action="store_true",
help="Colorize code in examples",
default=False)
parser.add_option("--verbose",
dest="verbose",
action="store_true",
help="Display verbose info messages",
default=False)
(options, args) = parser.parse_args()
log_level = TiLogger.INFO
if options.verbose:
log_level = TiLogger.TRACE
log = TiLogger(None, level=log_level, output_stream=sys.stderr)
process_yaml()
generate_output(options)
titanium_apis = [ta for ta in apis.values() if ta["name"].startswith("Ti")]
log.info("%s Titanium types processed" % len(titanium_apis))
if __name__ == "__main__":
main()
|
arnaudsj/titanium_mobile
|
apidoc/docgen.py
|
Python
|
apache-2.0
| 15,559
|
[
"VisIt"
] |
5a725c1d3b5eb0a84f6b566e612e6ad7f928b0875234c431874a851a46915a8e
|
import unittest
from neural_network_with_connections import *
from unittest import TestCase
class NeuralNetworkTests(TestCase):
def test_connection_adds_to_incoming_and_outgoing_arrays(self):
neuron = Neuron(input=0)
neuron_2 = Neuron()
neuron.connect_child(neuron_2, weight=1)
# Make sure there's only one connection back and forth
assert [neuron == connection.neuron for connection in neuron_2.incoming_neurons].count(True) == 1
assert [neuron_2 == connection.neuron for connection in neuron.outgoing_neurons].count(True) == 1
def test_activate_applies_proper_math(self):
'''Let's make sure the sigmoid function is working properly, 0 * 0 weight should be 0.5'''
neuron = Neuron(input=0)
neuron.activate()
assert neuron.output == 0.5
def test_activate_applies_proper_math_over_many_neurons(self):
neuron = Neuron(input=0)
neuron_2 = Neuron()
neuron.connect_child(neuron_2, weight=1)
neuron_2.activate()
# sucks comparing floats in python apparently, absolute difference check
assert abs(neuron_2.output - 0.622459331202) < 0.000000001
if __name__ == '__main__':
unittest.main()
|
dev-coop/neural-net-hacking-examples
|
python/Part 1/neural_network_with_connections_tests.py
|
Python
|
mit
| 1,233
|
[
"NEURON"
] |
552c6109c5f1aebca60949cf2f837ee1f90241a4e0e925789ecbab5cbd9a3bfc
|
#!/usr/bin/env python
""" Submission of test jobs for use by Jenkins
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wrong-import-position,unused-wildcard-import,wildcard-import
import os.path
from DIRAC.Core.Base.Script import parseCommandLine
parseCommandLine()
from DIRAC import gLogger
from DIRAC.tests.Utilities.utils import find_all
from DIRAC.Interfaces.API.Job import Job
from DIRAC.Interfaces.API.Dirac import Dirac
# from tests.Workflow.Integration.Test_UserJobs import createJob
gLogger.setLevel("DEBUG")
cwd = os.path.realpath(".")
dirac = Dirac()
def base():
job = Job()
job.setName("helloWorld-TEST-TO-Jenkins")
executablePath = find_all("exe-script.py", "..", "/DIRAC/tests/Workflow/")[0]
job.setInputSandbox([executablePath])
job.setExecutable(executablePath, "", "helloWorld.log")
job.setCPUTime(1780)
job.setDestination("DIRAC.Jenkins.ch")
job.setLogLevel("DEBUG")
return job
def helloJob():
"""Simple Hello Word job to DIRAC.Jenkins.ch"""
gLogger.info("\n Submitting hello world job targeting DIRAC.Jenkins.ch")
job = base()
result = dirac.submitJob(job)
gLogger.info("Hello world job: ", result)
if not result["OK"]:
gLogger.error("Problem submitting job", result["Message"])
exit(1)
def helloMP():
"""Simple Hello Word job to DIRAC.Jenkins.ch, that needs to be matched by a MP WN"""
gLogger.info("\n Submitting hello world job targeting DIRAC.Jenkins.ch and a MP WN")
job = base()
job.setNumberOfProcessors(2)
result = dirac.submitJob(job)
gLogger.info("Hello world job MP: ", result)
if not result["OK"]:
gLogger.error("Problem submitting job", result["Message"])
exit(1)
# let's sumbit 6 jobs (3 and 3)
for _ in range(3):
helloJob()
helloMP()
|
ic-hep/DIRAC
|
tests/Jenkins/dirac-test-job.py
|
Python
|
gpl-3.0
| 1,896
|
[
"DIRAC"
] |
d5752d8aa6f74654aea3aa2ee8b6caadb2463dc9698673a6384c28cdf74a0142
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
All SI units
## Plot the firing rate vs current injection curve for a Cerebellar Granule Cell neuron
## Author: Aditya Gilra
## Creation Date: 2012-07-12
## Modification Date: 2012-07-12
Wednesday 15 July 2015 09:46:36 AM IST
Added unittest
Modified for testing with ctest.
"""
import os
os.environ['NUMPTHREADS'] = '1'
import sys
sys.path.append('./../../python/')
import moose
from moose.neuroml.NeuroML import NeuroML
import numpy as np
import unittest
soma_ = None
cellSpikeTable_ = None
def loadModel(filename):
global soma_, cellSpikeTable_
neuromlR = NeuroML()
neuromlR.readNeuroMLFromFile(filename)
libcell = moose.Neuron('/library/CA1soma')
CA1Cellid = moose.copy(libcell,moose.Neutral('/cells'),'CA1')
CA1Cell = moose.Neuron(CA1Cellid)
spikeGen = moose.SpikeGen(CA1Cell.path+'/spikeGen')
spikeGen.threshold = -30e-3 # V
soma_ = moose.Compartment(CA1Cell.path+'/Seg0_soma_0_0')
soma_.inject = 0 # by default the cell has a current injection
moose.connect(soma_,'VmOut',spikeGen,'Vm')
table_path = moose.Neutral(CA1Cell.path+'/data').path
cellSpikeTable_ = moose.Table(table_path+'/spikesTable')
moose.connect(spikeGen,'spikeOut', cellSpikeTable_,'input')
def applyCurrent(currenti):
global soma_, cellSpikeTable_
moose.reinit()
soma_.inject = currenti
moose.start(1.0)
spikesList = np.array(cellSpikeTable_.vector)
if len(spikesList)>0:
spikesList = spikesList[np.where(spikesList>0.0)[0]]
spikesNow = len(spikesList)
else:
spikesNow = 0.0
print("For injected current {0}, no of spikes in {1} second: {2}".format(
currenti, 1.0, spikesNow )
)
return spikesNow
def ca1_main( currenti):
return applyCurrent(currenti)
|
dilawar/moose-full
|
moose-core/tests/python/neuroml/FvsI_CA1.py
|
Python
|
gpl-2.0
| 1,878
|
[
"MOOSE",
"NEURON"
] |
8b7e48685cc2232c59ad07311f8de7a0f00d500cb610845dbf20af3064e0999b
|
#!/usr/bin/env python
#===============================================================================
# Copyright 2015 Geoscience Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
"""
MosaicContents: database interface class.
These classes provide an interface between the database and the top-level
ingest algorithm (AbstractIngester and its subclasses). They also provide
the implementation of the database and tile store side of the ingest
process. They are expected to be independent of the structure of any
particular dataset, but will change if the database schema or tile store
format changes.
"""
from __future__ import absolute_import
import logging
import os
import re
import shutil
from EOtools.execute import execute
from EOtools.utils import log_multiline
from agdc.cube_util import DatasetError, get_file_size_mb, create_directory
from .ingest_db_wrapper import TC_MOSAIC
from osgeo import gdal
import numpy
# Set up logger.
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
#
# Constants for PQA mosaic formation:
#
PQA_CONTIGUITY = 256 # contiguity = bit 8
#
# Classes
#
class MosaicContents(object):
"""MosaicContents database interface class.
This class has 'remove' and 'make_permanent' methods, so can be
used as a tile_contents object with the collection.Collection and
collection.Transaction classes.
"""
def __init__(self, tile_record_list, tile_type_dict,
level_name, temp_tile_dir):
"""Create the mosaic contents."""
assert len(tile_record_list) > 1, \
"Attempt to make a mosaic out of a single tile."
assert len(tile_record_list) <= 2, \
("Attempt to make a mosaic out of more than 2 tiles.\n" +
"Handling for this case is not yet implemented.")
tile_dict = tile_record_list[0]
tile_type_id = tile_dict['tile_type_id']
tile_type_info = tile_type_dict[tile_type_id]
if level_name == 'PQA':
extension = tile_type_info['file_extension']
else:
extension = '.vrt'
(self.mosaic_temp_path, self.mosaic_final_path) = (
self.__get_mosaic_paths(tile_dict['tile_pathname'],
extension,
temp_tile_dir))
if level_name == 'PQA':
self.__make_mosaic_pqa(tile_record_list,
tile_type_info,
self.mosaic_temp_path)
else:
self.__make_mosaic_vrt(tile_record_list,
self.mosaic_temp_path)
self.mosaic_dict = dict(tile_dict)
self.mosaic_dict['tile_id'] = None
self.mosaic_dict['tile_pathname'] = self.mosaic_final_path
self.mosaic_dict['tile_class_id'] = TC_MOSAIC
self.mosaic_dict['tile_size'] = (
get_file_size_mb(self.mosaic_temp_path))
def remove(self):
"""Remove the temporary mosaic file."""
if os.path.isfile(self.mosaic_temp_path):
os.remove(self.mosaic_temp_path)
def make_permanent(self):
"""Move mosaic tile contents to its permanent location."""
shutil.move(self.mosaic_temp_path, self.mosaic_final_path)
def get_output_path(self):
"""Return the final location for the mosaic."""
return self.mosaic_final_path
def create_record(self, db):
"""Create a record for the mosaic in the database."""
db.insert_tile_record(self.mosaic_dict)
@staticmethod
def __get_mosaic_paths(tile_pathname, extension, temp_tile_dir):
"""Generate the temporary and final pathnames for the mosaic.
'tile_pathname' is the path to the first tile in the mosaic.
'extension' is the extension to use for the mosaic filename.
Returns a tuple (mosaic_temp_path, mosaic_final_path).
"""
(tile_dir, tile_basename) = os.path.split(tile_pathname)
mosaic_final_dir = os.path.join(tile_dir, 'mosaic_cache')
create_directory(mosaic_final_dir)
mosaic_temp_dir = os.path.join(temp_tile_dir, 'mosaic_cache')
create_directory(mosaic_temp_dir)
mosaic_basename = re.sub(r'\.\w+$', extension, tile_basename)
mosaic_temp_path = os.path.join(mosaic_temp_dir, mosaic_basename)
mosaic_final_path = os.path.join(mosaic_final_dir, mosaic_basename)
return (mosaic_temp_path, mosaic_final_path)
@staticmethod
def __make_mosaic_pqa(tile_record_list, tile_type_info, mosaic_path):
"""From the PQA tiles in tile_record_list, create a mosaic tile
at mosaic_pathname.
"""
LOGGER.info('Creating PQA mosaic file %s', mosaic_path)
mosaic_file_list = [tr['tile_pathname'] for tr in tile_record_list]
template_dataset = gdal.Open(mosaic_file_list[0])
gdal_driver = gdal.GetDriverByName(tile_type_info['file_format'])
#Set datatype formats appropriate to Create() and numpy
gdal_dtype = template_dataset.GetRasterBand(1).DataType
numpy_dtype = gdal.GetDataTypeName(gdal_dtype)
mosaic_dataset = gdal_driver.Create(
mosaic_path,
template_dataset.RasterXSize,
template_dataset.RasterYSize,
1,
gdal_dtype,
tile_type_info['format_options'].split(','),
)
if not mosaic_dataset:
raise DatasetError(
'Unable to open output dataset %s' % mosaic_dataset)
mosaic_dataset.SetGeoTransform(template_dataset.GetGeoTransform())
mosaic_dataset.SetProjection(template_dataset.GetProjection())
#TODO: make vrt here - not really needed for single-layer file
# if tile_type_info['file_format'] == 'netCDF':
# pass
output_band = mosaic_dataset.GetRasterBand(1)
# Set all background values of data_array to FFFF (i.e. all ones)
data_array = numpy.ones(shape=(template_dataset.RasterYSize,
template_dataset.RasterXSize),
dtype=numpy_dtype
) * -1
# Set all background values of no_data_array to 0 (i.e. all zeroes)
no_data_array = numpy.zeros(shape=(template_dataset.RasterYSize,
template_dataset.RasterXSize),
dtype=numpy_dtype
)
overall_data_mask = numpy.zeros((mosaic_dataset.RasterYSize,
mosaic_dataset.RasterXSize),
dtype=numpy.bool
)
del template_dataset
# Populate data_array with -masked PQA data
for pqa_dataset_index in range(len(mosaic_file_list)):
pqa_dataset_path = mosaic_file_list[pqa_dataset_index]
pqa_dataset = gdal.Open(pqa_dataset_path)
if not pqa_dataset:
raise DatasetError('Unable to open %s' % pqa_dataset_path)
pqa_array = pqa_dataset.ReadAsArray()
del pqa_dataset
LOGGER.debug('Opened %s', pqa_dataset_path)
# Treat contiguous and non-contiguous pixels separately
# Set all contiguous pixels to true in data_mask
pqa_data_mask = (pqa_array & PQA_CONTIGUITY).astype(numpy.bool)
# Expand overall_data_mask to true for any contiguous pixels
overall_data_mask = overall_data_mask | pqa_data_mask
# Perform bitwise-and on contiguous pixels in data_array
data_array[pqa_data_mask] &= pqa_array[pqa_data_mask]
# Perform bitwise-or on non-contiguous pixels in no_data_array
no_data_array[~pqa_data_mask] |= pqa_array[~pqa_data_mask]
# Set all pixels which don't contain data to combined no-data values
# (should be same as original no-data values)
data_array[~overall_data_mask] = no_data_array[~overall_data_mask]
output_band.WriteArray(data_array)
mosaic_dataset.FlushCache()
@staticmethod
def __make_mosaic_vrt(tile_record_list, mosaic_path):
"""From two or more source tiles create a vrt"""
LOGGER.info('Creating mosaic VRT file %s', mosaic_path)
source_file_list = [tr['tile_pathname'] for tr in tile_record_list]
gdalbuildvrt_cmd = ["gdalbuildvrt",
"-q",
"-overwrite",
"%s" % mosaic_path
]
gdalbuildvrt_cmd.extend(source_file_list)
result = execute(gdalbuildvrt_cmd, shell=False)
if result['stdout']:
log_multiline(LOGGER.info, result['stdout'],
'stdout from %s' % gdalbuildvrt_cmd, '\t')
if result['stderr']:
log_multiline(LOGGER.debug, result['stderr'],
'stderr from %s' % gdalbuildvrt_cmd, '\t')
if result['returncode'] != 0:
raise DatasetError('Unable to perform gdalbuildvrt: ' +
'"%s" failed: %s'
% (gdalbuildvrt_cmd, result['stderr']))
|
ama-jharrison/agdc
|
agdc/agdc/abstract_ingester/mosaic_contents.py
|
Python
|
apache-2.0
| 9,888
|
[
"NetCDF"
] |
d2d0e63725e759a913d3bed63827e516a6de1d406fc19165314f48f0d75371d9
|
import numpy as np
from ase.lattice import bulk
from gpaw import GPAW
from gpaw.response.df import DielectricFunction
# Part 1: Ground state calculation
atoms = bulk('Al', 'fcc', a=4.043) # Generate fcc crystal structure for aluminum
calc = GPAW(mode='pw', # GPAW calculator initialization
kpts={'density': 5.0, 'gamma': True})
atoms.set_calculator(calc)
atoms.get_potential_energy() # Ground state calculation is performed
calc.write('Al.gpw','all') # Use 'all' option to write wavefunctions
# Part 2: Spectrum calculation
df = DielectricFunction(calc='Al.gpw') # Ground state gpw file as input
q_c = [1.0 / 13, 0, 0] # Momentum transfer, must be the difference between two kpoints !
df.get_eels_spectrum(q_c=q_c) # By default, a file called 'eels.csv' is generated
|
robwarm/gpaw-symm
|
doc/tutorials/dielectric_response/aluminum_EELS.py
|
Python
|
gpl-3.0
| 849
|
[
"ASE",
"CRYSTAL",
"GPAW"
] |
6b8437936a7db1aaf3e0d7fe87087c794dc57fcd49c8b784bd86ccb6bce8f25a
|
"""
# Majic
# Copyright (C) 2014 CEH
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from netCDF4 import Dataset
import os
from sys import argv
class VariableSplitter(object):
"""
Takes a netCDF file with a variable which has a pseudo dimension
and splits that variable into an equivalent number of single layer variables.
"""
def convert(self, in_path, out_path, variable_prefix='Land Cover Fraction'):
"""
Convert a netCDF file to have multiple variables (rather than a pseudo dimension)
:param in_path: File to convert
:param out_path: Path to output to
:param variable_prefix: Prefix for created variables
:return:
"""
if os.path.exists(out_path):
os.remove(out_path)
ds_in = Dataset(in_path, 'r')
ds_out = Dataset(out_path, 'w')
# Copy across the latitude, longitude dimensions
for key, dim in ds_in.dimensions.iteritems():
dim_len = len(dim)
ds_out.createDimension(key, dim_len)
for var_name, variable in ds_in.variables.iteritems():
if variable.ndim == 3:
# Now split the layer we need and copy it over
long_name = '%s - Index %s'
var_to_split = ds_in.variables[var_name]
dims_to_keep = var_to_split.dimensions[1:]
datatype = self._get_datatype_string(var_to_split)
for layer_index in range(len(var_to_split)):
layer_name = long_name % (variable_prefix, str(layer_index + 1))
layer = ds_out.createVariable(layer_name, datatype, dims_to_keep)
self._copy_attrs(variable, layer)
layer.long_name = layer_name
layer[:] = var_to_split[layer_index]
else:
datatype = self._get_datatype_string(variable)
copied_variable = ds_out.createVariable(var_name, datatype, variable.dimensions)
self._copy_attrs(variable, copied_variable)
if variable.ndim == 0:
copied_variable = variable
elif variable.ndim == 1:
copied_variable[:] = variable[:]
elif variable.ndim == 2:
copied_variable[:,:] = variable[:,:]
else:
raise Exception("The variable %s in the land_cover_fractional_file contains an unexpected number of dimensions" % var_name)
# Copy across the attributes
self._copy_attrs(ds_in, ds_out)
def _copy_attrs(self, cp_from, cp_to):
for attr in cp_from.ncattrs():
value = cp_from.getncattr(attr)
cp_to.setncattr(attr, value)
return cp_from
def _get_datatype_string(self, variable):
return "".join((variable.datatype.kind, str(variable.datatype.itemsize)))
def insert_before_file_extension(path, string):
"""
Add a string to a path immediately before the file extension
:param path: File path to modify
:param string: String to add
:return:
"""
file_extens_idx = path.rfind('.')
return "".join((path[0:file_extens_idx], string, path[file_extens_idx:]))
if __name__ == '__main__':
USER_EDITED_FRACTIONAL_FILENAME = 'user_edited_land_cover_fractional_file.nc'
MODIFIED_FOR_VISUALISATION_EXTENSION = '_MODIFIED_FOR_VISUALISATION'
try:
file_path = USER_EDITED_FRACTIONAL_FILENAME # Default is to try to find a user edited file
if len(argv) > 1:
file_path = str(argv[1])
if os.path.exists(file_path):
vis_path = insert_before_file_extension(file_path, MODIFIED_FOR_VISUALISATION_EXTENSION)
if not os.path.exists(vis_path):
frac_converter = VariableSplitter()
frac_converter.convert(file_path, vis_path)
else:
print "File already exists and has been converted"
else:
print "File does not exist: %s " % file_path
exit(0)
except Exception as e:
print e.message
pass
exit(-1)
|
NERC-CEH/jules-jasmin
|
job_runner/job_runner/post_processing_scripts/convert_fractional_file_for_visualisation.py
|
Python
|
gpl-2.0
| 4,837
|
[
"NetCDF"
] |
35d1af4d318bc1efdba8721db86383598200f79e59bcb43a791e214226f3bc3f
|
#!/usr/bin/env python
##
## @file test.py
## @brief AutoRunner for Python test scripts
## @author Akiya Jouraku
##
##<!---------------------------------------------------------------------------
## This file is part of libSBML. Please visit http://sbml.org for more
## information about SBML, and the latest version of libSBML.
##
## Copyright (C) 2009-2013 jointly by the following organizations:
## 1. California Institute of Technology, Pasadena, CA, USA
## 2. EMBL European Bioinformatics Institute (EBML-EBI), Hinxton, UK
##
## Copyright (C) 2006-2008 by the California Institute of Technology,
## Pasadena, CA, USA
##
## Copyright (C) 2002-2005 jointly by the following organizations:
## 1. California Institute of Technology, Pasadena, CA, USA
## 2. Japan Science and Technology Agency, Japan
##
## This library is free software; you can redistribute it and/or modify it
## under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation. A copy of the license agreement is provided
## in the file named "LICENSE.txt" included with this software distribution
## and also available online as http://sbml.org/software/libsbml/license.html
##----------------------------------------------------------------------- -->*/
import os
import sys
import re
import glob
import unittest
test_basedir = 'test'
test_subdirs = ['sbml','xml','math','annotation']
test_files = "/Test*.py"
def suite():
suite = unittest.TestSuite()
cwd = os.getcwd()
sys.path.append(cwd)
os.chdir(test_basedir + '/..')
for subdir in test_subdirs :
sys.path.append(test_basedir + '/' + subdir)
for file in glob.glob( test_basedir + '/' + subdir + '/' + test_files ) :
module_name = re.compile(r"\.py$").sub('',os.path.basename(file))
module = __import__(module_name)
class_name = getattr(module, module_name)
suite.addTest(unittest.makeSuite(class_name))
return suite
if __name__ == "__main__":
if len(sys.argv) > 1:
# parse additional command line arguments
for index in range(1, len(sys.argv)):
current = sys.argv[index]
hasNext = (index + 1) < len(sys.argv)
nextIndex = (index + 1);
if current == "-b" and hasNext:
# allow to set the base path
test_basedir = sys.argv[nextIndex];
index = nextIndex
elif current == "-p" and hasNext:
# add directory to path
sys.path.append(sys.argv[nextIndex])
index = nextIndex
elif current == "-a" and hasNext:
# allow to test additional directories
test_subdirs = test_subdirs + sys.argv[nextIndex:]
break;
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
|
mgaldzic/antimony
|
src/bindings/python/test.py
|
Python
|
bsd-3-clause
| 2,786
|
[
"VisIt"
] |
88dc1c104c0d9c7939413fc525970a2f73a97ff296a9dd5e4493f9039cf628ea
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy
from pyscf import lib
from pyscf.pbc import gto as pgto
from pyscf.pbc import scf as pscf
from pyscf.pbc.scf import stability
L = 4
n = 15
cell = pgto.Cell()
cell.build(unit = 'B',
verbose = 5,
output = '/dev/null',
a = ((L,0,0),(0,L,0),(0,0,L)),
mesh = [n,n,n],
atom = [['He', (L/2.-.5,L/2.,L/2.-.5)],
['He', (L/2. ,L/2.,L/2.+.5)]],
basis = { 'He': [[0, (0.8, 1.0)],
[0, (1.0, 1.0)],
[0, (1.2, 1.0)]]})
numpy.random.seed(4)
kpts = numpy.random.random((1,3))
def tearDownModule():
global cell
cell.stdout.close()
del cell
class KnownValues(unittest.TestCase):
def test_hf_stability(self):
mf = pscf.RHF(cell, exxdiv='ewald').run(conv_tol=1e-12)
mo_i, mo_e = mf.stability(internal=True, external=True)
self.assertAlmostEqual(abs(mf.mo_coeff-mo_i).max(), 0, 9)
def test_khf_stability(self):
kmf = pscf.KRHF(cell, kpts, exxdiv='ewald').run(conv_tol=1e-12)
mo_i, mo_e = kmf.stability(internal=True, external=True)
self.assertAlmostEqual(abs(kmf.mo_coeff[0]-mo_i[0]).max(), 0, 9)
hop2, hdiag2 = stability._gen_hop_rhf_external(kmf)
self.assertAlmostEqual(lib.fp(hdiag2), 18.528134783454508, 7)
self.assertAlmostEqual(lib.fp(hop2(hdiag2)), 108.99683506471919, 5)
def test_uhf_stability(self):
umf = pscf.UHF(cell, exxdiv='ewald').run(conv_tol=1e-12)
mo_i, mo_e = umf.stability(internal=True, external=True)
self.assertAlmostEqual(abs(umf.mo_coeff[0]-mo_i[0]).max(), 0, 9)
self.assertAlmostEqual(abs(umf.mo_coeff[1]-mo_i[1]).max(), 0, 9)
def test_kuhf_stability(self):
kumf = pscf.KUHF(cell, kpts, exxdiv='ewald').run(conv_tol=1e-12)
mo_i, mo_e = kumf.stability(internal=True, external=True)
self.assertAlmostEqual(abs(kumf.mo_coeff[0][0]-mo_i[0][0]).max(), 0, 9)
self.assertAlmostEqual(abs(kumf.mo_coeff[1][0]-mo_i[1][0]).max(), 0, 9)
hop2, hdiag2 = stability._gen_hop_uhf_external(kumf)
self.assertAlmostEqual(lib.fp(hdiag2), 10.977759629315884, 7)
self.assertAlmostEqual(lib.fp(hop2(hdiag2)), 86.425042652868, 5)
def test_rotate_mo(self):
numpy.random.seed(4)
def occarray(nmo, nocc):
occ = numpy.zeros(nmo)
occ[:nocc] = 2
return occ
mo_coeff = [numpy.random.random((8,8)),
numpy.random.random((8,7)),
numpy.random.random((8,8))]
mo_occ = [occarray(8, 3), occarray(7, 3), occarray(8, 2)]
dx = numpy.random.random(15+12+12)
mo1 = stability._rotate_mo(mo_coeff, mo_occ, dx)
self.assertAlmostEqual(lib.fp(mo1[0]), 1.1090134286653903, 12)
self.assertAlmostEqual(lib.fp(mo1[1]), 1.0665953580532537, 12)
self.assertAlmostEqual(lib.fp(mo1[2]), -5.008202013953201, 12)
if __name__ == "__main__":
print("Full Tests for stability")
unittest.main()
|
sunqm/pyscf
|
pyscf/pbc/scf/test/test_stability.py
|
Python
|
apache-2.0
| 3,701
|
[
"PySCF"
] |
8e396df7b39d9540065320525c858296ee33f3bf8fbacbbde4ba21ba33253f1c
|
# -*- coding: UTF-8 -*-
import threading
from django.conf import settings
from django.db import models
from geonode.maps.owslib_csw import CatalogueServiceWeb
from geoserver.catalog import Catalog
from geonode.core.models import PermissionLevelMixin
from geonode.core.models import AUTHENTICATED_USERS, ANONYMOUS_USERS, CUSTOM_GROUP_USERS
from geonode.geonetwork import Catalog as GeoNetwork
from django.db.models import signals
from taggit.managers import TaggableManager
from django.utils import simplejson as json
from django.utils.safestring import mark_safe
import httplib2
import urllib
from urlparse import urlparse
import uuid
from datetime import datetime
from django.contrib.auth.models import User, Permission
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from lxml import etree
from geonode.maps.gs_helpers import cascading_delete, get_postgis_bbox
import logging
from geonode.maps.encode import num_encode
from django.core.cache import cache
import sys
import re
from geonode.maps.encode import despam, XssCleaner
from geonode.flexidates import FlexiDateField, FlexiDateFormField
logger = logging.getLogger("geonode.maps.models")
ows_sub = re.compile(r"[&\?]+SERVICE=WMS|[&\?]+REQUEST=GetCapabilities", re.IGNORECASE)
def bbox_to_wkt(x0, x1, y0, y1, srid="4326"):
return 'SRID=%s;POLYGON((%s %s,%s %s,%s %s,%s %s,%s %s))' % (srid,
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)
ROLE_VALUES = [
'datasetProvider',
'custodian',
'owner',
'user',
'distributor',
'originator',
'pointOfContact',
'principalInvestigator',
'processor',
'publisher',
'author'
]
COUNTRIES = (
('AFG', _('Afghanistan')),
('ALA', _('Aland Islands')),
('ALB', _('Albania')),
('DZA', _('Algeria')),
('ASM', _('American Samoa')),
('AND', _('Andorra')),
('AGO', _('Angola')),
('AIA', _('Anguilla')),
('ATG', _('Antigua and Barbuda')),
('ARG', _('Argentina')),
('ARM', _('Armenia')),
('ABW', _('Aruba')),
('AUS', _('Australia')),
('AUT', _('Austria')),
('AZE', _('Azerbaijan')),
('BHS', _('Bahamas')),
('BHR', _('Bahrain')),
('BGD', _('Bangladesh')),
('BRB', _('Barbados')),
('BLR', _('Belarus')),
('BEL', _('Belgium')),
('BLZ', _('Belize')),
('BEN', _('Benin')),
('BMU', _('Bermuda')),
('BTN', _('Bhutan')),
('BOL', _('Bolivia')),
('BIH', _('Bosnia and Herzegovina')),
('BWA', _('Botswana')),
('BRA', _('Brazil')),
('VGB', _('British Virgin Islands')),
('BRN', _('Brunei Darussalam')),
('BGR', _('Bulgaria')),
('BFA', _('Burkina Faso')),
('BDI', _('Burundi')),
('KHM', _('Cambodia')),
('CMR', _('Cameroon')),
('CAN', _('Canada')),
('CPV', _('Cape Verde')),
('CYM', _('Cayman Islands')),
('CAF', _('Central African Republic')),
('TCD', _('Chad')),
('CIL', _('Channel Islands')),
('CHL', _('Chile')),
('CHN', _('China')),
('HKG', _('China - Hong Kong')),
('MAC', _('China - Macao')),
('COL', _('Colombia')),
('COM', _('Comoros')),
('COG', _('Congo')),
('COK', _('Cook Islands')),
('CRI', _('Costa Rica')),
('CIV', _('Cote d\'Ivoire')),
('HRV', _('Croatia')),
('CUB', _('Cuba')),
('CYP', _('Cyprus')),
('CZE', _('Czech Republic')),
('PRK', _('Democratic People\'s Republic of Korea')),
('COD', _('Democratic Republic of the Congo')),
('DNK', _('Denmark')),
('DJI', _('Djibouti')),
('DMA', _('Dominica')),
('DOM', _('Dominican Republic')),
('ECU', _('Ecuador')),
('EGY', _('Egypt')),
('SLV', _('El Salvador')),
('GNQ', _('Equatorial Guinea')),
('ERI', _('Eritrea')),
('EST', _('Estonia')),
('ETH', _('Ethiopia')),
('FRO', _('Faeroe Islands')),
('FLK', _('Falkland Islands (Malvinas)')),
('FJI', _('Fiji')),
('FIN', _('Finland')),
('FRA', _('France')),
('GUF', _('French Guiana')),
('PYF', _('French Polynesia')),
('GAB', _('Gabon')),
('GMB', _('Gambia')),
('GEO', _('Georgia')),
('DEU', _('Germany')),
('GHA', _('Ghana')),
('GIB', _('Gibraltar')),
('GRC', _('Greece')),
('GRL', _('Greenland')),
('GRD', _('Grenada')),
('GLP', _('Guadeloupe')),
('GUM', _('Guam')),
('GTM', _('Guatemala')),
('GGY', _('Guernsey')),
('GIN', _('Guinea')),
('GNB', _('Guinea-Bissau')),
('GUY', _('Guyana')),
('HTI', _('Haiti')),
('VAT', _('Holy See (Vatican City)')),
('HND', _('Honduras')),
('HUN', _('Hungary')),
('ISL', _('Iceland')),
('IND', _('India')),
('IDN', _('Indonesia')),
('IRN', _('Iran')),
('IRQ', _('Iraq')),
('IRL', _('Ireland')),
('IMN', _('Isle of Man')),
('ISR', _('Israel')),
('ITA', _('Italy')),
('JAM', _('Jamaica')),
('JPN', _('Japan')),
('JEY', _('Jersey')),
('JOR', _('Jordan')),
('KAZ', _('Kazakhstan')),
('KEN', _('Kenya')),
('KIR', _('Kiribati')),
('KWT', _('Kuwait')),
('KGZ', _('Kyrgyzstan')),
('LAO', _('Lao People\'s Democratic Republic')),
('LVA', _('Latvia')),
('LBN', _('Lebanon')),
('LSO', _('Lesotho')),
('LBR', _('Liberia')),
('LBY', _('Libyan Arab Jamahiriya')),
('LIE', _('Liechtenstein')),
('LTU', _('Lithuania')),
('LUX', _('Luxembourg')),
('MKD', _('Macedonia')),
('MDG', _('Madagascar')),
('MWI', _('Malawi')),
('MYS', _('Malaysia')),
('MDV', _('Maldives')),
('MLI', _('Mali')),
('MLT', _('Malta')),
('MHL', _('Marshall Islands')),
('MTQ', _('Martinique')),
('MRT', _('Mauritania')),
('MUS', _('Mauritius')),
('MYT', _('Mayotte')),
('MEX', _('Mexico')),
('FSM', _('Micronesia, Federated States of')),
('MCO', _('Monaco')),
('MNG', _('Mongolia')),
('MNE', _('Montenegro')),
('MSR', _('Montserrat')),
('MAR', _('Morocco')),
('MOZ', _('Mozambique')),
('MMR', _('Myanmar')),
('NAM', _('Namibia')),
('NRU', _('Nauru')),
('NPL', _('Nepal')),
('NLD', _('Netherlands')),
('ANT', _('Netherlands Antilles')),
('NCL', _('New Caledonia')),
('NZL', _('New Zealand')),
('NIC', _('Nicaragua')),
('NER', _('Niger')),
('NGA', _('Nigeria')),
('NIU', _('Niue')),
('NFK', _('Norfolk Island')),
('MNP', _('Northern Mariana Islands')),
('NOR', _('Norway')),
('PSE', _('Occupied Palestinian Territory')),
('OMN', _('Oman')),
('PAK', _('Pakistan')),
('PLW', _('Palau')),
('PAN', _('Panama')),
('PNG', _('Papua New Guinea')),
('PRY', _('Paraguay')),
('PER', _('Peru')),
('PHL', _('Philippines')),
('PCN', _('Pitcairn')),
('POL', _('Poland')),
('PRT', _('Portugal')),
('PRI', _('Puerto Rico')),
('QAT', _('Qatar')),
('KOR', _('Republic of Korea')),
('MDA', _('Republic of Moldova')),
('REU', _('Reunion')),
('ROU', _('Romania')),
('RUS', _('Russian Federation')),
('RWA', _('Rwanda')),
('BLM', _('Saint-Barthelemy')),
('SHN', _('Saint Helena')),
('KNA', _('Saint Kitts and Nevis')),
('LCA', _('Saint Lucia')),
('MAF', _('Saint-Martin (French part)')),
('SPM', _('Saint Pierre and Miquelon')),
('VCT', _('Saint Vincent and the Grenadines')),
('WSM', _('Samoa')),
('SMR', _('San Marino')),
('STP', _('Sao Tome and Principe')),
('SAU', _('Saudi Arabia')),
('SEN', _('Senegal')),
('SRB', _('Serbia')),
('SYC', _('Seychelles')),
('SLE', _('Sierra Leone')),
('SGP', _('Singapore')),
('SVK', _('Slovakia')),
('SVN', _('Slovenia')),
('SLB', _('Solomon Islands')),
('SOM', _('Somalia')),
('ZAF', _('South Africa')),
('ESP', _('Spain')),
('LKA', _('Sri Lanka')),
('SDN', _('Sudan')),
('SUR', _('Suriname')),
('SJM', _('Svalbard and Jan Mayen Islands')),
('SWZ', _('Swaziland')),
('SWE', _('Sweden')),
('CHE', _('Switzerland')),
('SYR', _('Syrian Arab Republic')),
('TJK', _('Tajikistan')),
('THA', _('Thailand')),
('TLS', _('Timor-Leste')),
('TGO', _('Togo')),
('TKL', _('Tokelau')),
('TON', _('Tonga')),
('TTO', _('Trinidad and Tobago')),
('TUN', _('Tunisia')),
('TUR', _('Turkey')),
('TKM', _('Turkmenistan')),
('TCA', _('Turks and Caicos Islands')),
('TUV', _('Tuvalu')),
('UGA', _('Uganda')),
('UKR', _('Ukraine')),
('ARE', _('United Arab Emirates')),
('GBR', _('United Kingdom')),
('TZA', _('United Republic of Tanzania')),
('USA', _('United States of America')),
('VIR', _('United States Virgin Islands')),
('URY', _('Uruguay')),
('UZB', _('Uzbekistan')),
('VUT', _('Vanuatu')),
('VEN', _('Venezuela (Bolivarian Republic of)')),
('VNM', _('Viet Nam')),
('WLF', _('Wallis and Futuna Islands')),
('ESH', _('Western Sahara')),
('YEM', _('Yemen')),
('ZMB', _('Zambia')),
('ZWE', _('Zimbabwe')),
)
KEYWORD_REGIONS= (
('GLO', _('Global')),
('NAM', _('North America')),
('CAM',_('Central America')),
('SAM',_('South America')),
('EUR',_('Europe')),
('ASI',_('Asia')),
('SEA',_('Southeast Asia')),
('CTA',_('Central Asia')),
('SAS',_('South Asia')),
('AFR',_('Africa')),
('NAF',_('North Africa')),
('EAF',_('East Africa')),
('WAF',_('West Africa')),
('SAF',_('South Africa')),
('MES',_('Middle East')),
('ANT',_('Antarctica')),
)
# Taken from http://www.w3.org/WAI/ER/IG/ert/iso639.htm
ALL_LANGUAGES = (
('abk', 'Abkhazian'),
('aar', 'Afar'),
('afr', 'Afrikaans'),
('amh', 'Amharic'),
('ara', 'Arabic'),
('asm', 'Assamese'),
('aym', 'Aymara'),
('aze', 'Azerbaijani'),
('bak', 'Bashkir'),
('ben', 'Bengali'),
('bih', 'Bihari'),
('bis', 'Bislama'),
('bre', 'Breton'),
('bul', 'Bulgarian'),
('bel', 'Byelorussian'),
('cat', 'Catalan'),
('chi', 'Chinese'),
('cos', 'Corsican'),
('dan', 'Danish'),
('dzo', 'Dzongkha'),
('eng', 'English'),
('fra', 'French'),
('epo', 'Esperanto'),
('est', 'Estonian'),
('fao', 'Faroese'),
('fij', 'Fijian'),
('fin', 'Finnish'),
('fry', 'Frisian'),
('glg', 'Gallegan'),
('kal', 'Greenlandic'),
('grn', 'Guarani'),
('guj', 'Gujarati'),
('hau', 'Hausa'),
('heb', 'Hebrew'),
('hin', 'Hindi'),
('hun', 'Hungarian'),
('ind', 'Indonesian'),
('ina', 'Interlingua (International Auxiliary language Association)'),
('iku', 'Inuktitut'),
('ipk', 'Inupiak'),
('ita', 'Italian'),
('jpn', 'Japanese'),
('kan', 'Kannada'),
('kas', 'Kashmiri'),
('kaz', 'Kazakh'),
('khm', 'Khmer'),
('kin', 'Kinyarwanda'),
('kir', 'Kirghiz'),
('kor', 'Korean'),
('kur', 'Kurdish'),
('oci', 'Langue d \'Oc (post 1500)'),
('lao', 'Lao'),
('lat', 'Latin'),
('lav', 'Latvian'),
('lin', 'Lingala'),
('lit', 'Lithuanian'),
('mlg', 'Malagasy'),
('mlt', 'Maltese'),
('mar', 'Marathi'),
('mol', 'Moldavian'),
('mon', 'Mongolian'),
('nau', 'Nauru'),
('nep', 'Nepali'),
('nor', 'Norwegian'),
('ori', 'Oriya'),
('orm', 'Oromo'),
('pan', 'Panjabi'),
('pol', 'Polish'),
('por', 'Portuguese'),
('pus', 'Pushto'),
('que', 'Quechua'),
('roh', 'Rhaeto-Romance'),
('run', 'Rundi'),
('rus', 'Russian'),
('smo', 'Samoan'),
('sag', 'Sango'),
('san', 'Sanskrit'),
('scr', 'Serbo-Croatian'),
('sna', 'Shona'),
('snd', 'Sindhi'),
('sin', 'Singhalese'),
('ssw', 'Siswant'),
('slv', 'Slovenian'),
('som', 'Somali'),
('sot', 'Sotho'),
('spa', 'Spanish'),
('sun', 'Sudanese'),
('swa', 'Swahili'),
('tgl', 'Tagalog'),
('tgk', 'Tajik'),
('tam', 'Tamil'),
('tat', 'Tatar'),
('tel', 'Telugu'),
('tha', 'Thai'),
('tir', 'Tigrinya'),
('tog', 'Tonga (Nyasa)'),
('tso', 'Tsonga'),
('tsn', 'Tswana'),
('tur', 'Turkish'),
('tuk', 'Turkmen'),
('twi', 'Twi'),
('uig', 'Uighur'),
('ukr', 'Ukrainian'),
('urd', 'Urdu'),
('uzb', 'Uzbek'),
('vie', 'Vietnamese'),
('vol', 'Volapük'),
('wol', 'Wolof'),
('xho', 'Xhosa'),
('yid', 'Yiddish'),
('yor', 'Yoruba'),
('zha', 'Zhuang'),
('zul', 'Zulu'),
)
CHARSETS = [
['', 'None/Unknown'],
['UTF-8', 'UTF-8/Unicode'],
['ISO-8859-1', 'Latin1/ISO-8859-1'],
['ISO-8859-2', 'Latin2/ISO-8859-2'],
['ISO-8859-3', 'Latin3/ISO-8859-3'],
['ISO-8859-4', 'Latin4/ISO-8859-4'],
['ISO-8859-5', 'Latin5/ISO-8859-5'],
['ISO-8859-6', 'Latin6/ISO-8859-6'],
['ISO-8859-7', 'Latin7/ISO-8859-7'],
['ISO-8859-8', 'Latin8/ISO-8859-8'],
['ISO-8859-9', 'Latin9/ISO-8859-9'],
['ISO-8859-10','Latin10/ISO-8859-10'],
['ISO-8859-13','Latin13/ISO-8859-13'],
['ISO-8859-14','Latin14/ISO-8859-14'],
['ISO8859-15','Latin15/ISO-8859-15'],
['Big5', 'BIG5'],
['EUC-JP','EUC-JP'],
['EUC-KR','EUC-KR'],
['GBK','GBK'],
['GB18030','GB18030'],
['Shift_JIS','Shift_JIS'],
['KOI8-R','KOI8-R'],
['KOI8-U','KOI8-U'],
['windows-874', 'Windows CP874'],
['windows-1250', 'Windows CP1250'],
['windows-1251', 'Windows CP1251'],
['windows-1252', 'Windows CP1252'],
['windows-1253', 'Windows CP1253'],
['windows-1254', 'Windows CP1254'],
['windows-1255', 'Windows CP1255'],
['windows-1256', 'Windows CP1256'],
['windows-1257', 'Windows CP1257'],
['windows-1258', 'Windows CP1258']
]
UPDATE_FREQUENCIES = [
['annually', _('Annually')],
['asNeeded', _('As Needed')],
['biannually', _('Biannually')],
['continual', _('Continual')],
['daily', _('Daily')],
['fortnightly', _('Fortnightly')],
['irregular', _('Irregular')],
['monthly', _('Monthly')],
['notPlanned', _('Not Planned')],
['quarterly', _('Quarterly')],
['unknown', _('Unknown')],
['weekly', _('Weekly')]
]
CONSTRAINT_OPTIONS = [
# Shortcuts added for convenience in Open Data cases.
['Public Domain Dedication and License (PDDL)',_('Public Domain Dedication and License (PDDL)')],
['Attribution License (ODC-By)', _('Attribution License (ODC-By)')],
['Open Database License (ODC-ODbL)',_('Open Database License (ODC-ODbL)')],
['CC-BY-SA',_('CC-BY-SA')],
# ISO standard constraint options.
['copyright', _('Copyright')],
['intellectualPropertyRights', _('Intellectual Porperty Rights')],
['license', _('License')],
['otherRestrictions', _('Other Restrictions')],
['patent', _('patent')],
['patentPending', _('Patent Pending')],
['restricted', _('Restricted')],
['trademark', _('Trademark')],
['public', _('Public')],
['no restrictions', _('No Restrictions')]
]
SPATIAL_REPRESENTATION_TYPES = [
['grid', _('Grid')],
['steroModel', _('Stereo Model')],
['textTable', _('Text Table')],
['tin', 'TIN'],
['vector', 'Vector']
]
CONTACT_FIELDS = [
["name", _("Name")],
["organization", _("Organization")],
["position", _("Position")],
["voice", _("Voice")],
["facsimile", _("Fax")],
["delivery_point", _("Delivery Point")],
["city", _("City")],
["administrative_area", _("Administrative Area")],
["postal_code", _("Postal Code")],
["country", _("Country")],
["email", _("Email")],
["role", _("Role")]
]
DEFAULT_SUPPLEMENTAL_INFORMATION=''
DEFAULT_CONTENT=_(
'<h3>The Harvard WorldMap Project</h3>\
<p>WorldMap is an open source web mapping system that is currently\
under construction. It is built to assist academic research and\
teaching as well as the general public and supports discovery,\
investigation, analysis, visualization, communication and archiving\
of multi-disciplinary, multi-source and multi-format data,\
organized spatially and temporally.</p>\
<p>The first instance of WorldMap, focused on the continent of\
Africa, is called AfricaMap. Since its beta release in November of\
2008, the framework has been implemented in several geographic\
locations with different research foci, including metro Boston,\
East Asia, Vermont, Harvard Forest and the city of Paris. These web\
mapping applications are used in courses as well as by individual\
researchers.</p>\
<h3>Introduction to the WorldMap Project</h3>\
<p>WorldMap solves the problem of discovering where things happen.\
It draws together an array of public maps and scholarly data to\
create a common source where users can:</p>\
<ol>\
<li>Interact with the best available public data for a\
city/region/continent</li>\
<li>See the whole of that area yet also zoom in to particular\
places</li>\
<li>Accumulate both contemporary and historical data supplied by\
researchers and make it permanently accessible online</li>\
<li>Work collaboratively across disciplines and organizations with\
spatial information in an online environment</li>\
</ol>\
<p>The WorldMap project aims to accomplish these goals in stages,\
with public and private support. It draws on the basic insight of\
geographic information systems that spatiotemporal data becomes\
more meaningful as more "layers" are added, and makes use of tiling\
and indexing approaches to facilitate rapid search and\
visualization of large volumes of disparate data.</p>\
<p>WorldMap aims to augment existing initiatives for globally\
sharing spatial data and technology such as <a target="_blank" href="http://www.gsdi.org/">GSDI</a> (Global Spatial Data\
Infrastructure).WorldMap makes use of <a target="_blank" href="http://www.opengeospatial.org/">OGC</a> (Open Geospatial\
Consortium) compliant web services such as <a target="_blank" href="http://en.wikipedia.org/wiki/Web_Map_Service">WMS</a> (Web\
Map Service), emerging open standards such as <a target="_blank" href="http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification">WMS-C</a>\
(cached WMS), and standards-based metadata formats, to enable\
WorldMap data layers to be inserted into existing data\
infrastructures. <br>\
<br>\
All WorldMap source code will be made available as <a target="_blank" href="http://www.opensource.org/">Open Source</a> for others to use\
and improve upon.</p>'
)
class GeoNodeException(Exception):
pass
#class ResourceBase(models.Model):
# pass
class Contact(models.Model):
user = models.ForeignKey(User, blank=True, null=True)
name = models.CharField(_('Individual Name'), max_length=255, blank=True, null=True)
organization = models.CharField(_('Organization Name'), max_length=255, blank=True, null=True)
position = models.CharField(_('Position Name'), max_length=255, blank=True, null=True)
voice = models.CharField(_('Phone'), max_length=255, blank=True, null=True)
fax = models.CharField(_('Fax'), max_length=255, blank=True, null=True)
delivery = models.CharField(_('Address'), max_length=255, blank=True, null=True)
city = models.CharField(_('City'), max_length=255, blank=True, null=True)
area = models.CharField(_('State/Province'), max_length=255, blank=True, null=True)
zipcode = models.CharField(_('Postal Code'), max_length=255, blank=True, null=True)
country = models.CharField(choices=COUNTRIES, max_length=3, blank=True, null=True)
email = models.EmailField(blank=True, null=True, unique=False)
display_email = models.BooleanField(_('Display my email address on my profile'), blank=False, default=False, null=False)
is_org_member = models.BooleanField(_('Affiliated with Harvard'), blank=True, null=False, default=False)
member_expiration_dt = models.DateField(_('Harvard affiliation expires on: '), blank=False, null=False, default=datetime.today())
keywords = TaggableManager(_('keywords'), help_text=_("A space or comma-separated list of keywords"), blank=True)
is_certifier = models.BooleanField(_('Allowed to certify maps & layers'), blank=False, null=False, default=False)
created_dttm = models.DateTimeField(auto_now_add=True)
"""
The date/time the object was created.
"""
last_modified = models.DateTimeField(auto_now=True)
"""
The last time the object was modified.
"""
def clean(self):
# the specification says that either name or organization should be provided
valid_name = (self.name != None and self.name != '')
valid_organization = (self.organization != None and self.organization !='')
if not (valid_name or valid_organization):
raise ValidationError(_('Either name or organization should be provided'))
if self.email and User.objects.filter(email=self.email).exclude(username=self.user.username if self.user else '').count():
raise ValidationError(_('The email address is already registered.'))
def get_absolute_url(self):
return ('profiles_profile_detail', (), { 'username': self.user.username })
get_absolute_url = models.permalink(get_absolute_url)
def __unicode__(self):
return u"%s (%s)" % (self.name if self.name else self.user.username, self.organization)
def username(self):
return u"%s" % (self.name if self.name else self.user.username)
def create_user_profile(sender, instance, created, **kwargs):
profile, created = Contact.objects.get_or_create(user=instance, defaults={'name': instance.username})
signals.post_save.connect(create_user_profile, sender=User)
_viewer_projection_lookup = {
"EPSG:900913": {
"maxResolution": 156543.03390625,
"units": "m",
"maxExtent": [-20037508.34,-20037508.34,20037508.34,20037508.34],
},
"EPSG:4326": {
"max_resolution": (180 - (-180)) / 256,
"units": "degrees",
"maxExtent": [-180, -90, 180, 90]
}
}
def _get_viewer_projection_info(srid):
#TODO: Look up projection details in EPSG database
return _viewer_projection_lookup.get(srid, {})
_wms = None
_csw = None
_user, _password = settings.GEOSERVER_CREDENTIALS
#def get_wms():
# global _wms
# wms_url = settings.GEOSERVER_BASE_URL + "wms?request=GetCapabilities&version=1.1.0"
# netloc = urlparse(wms_url).netloc
# http = httplib2.Http()
# http.add_credentials(_user, _password)
# http.authorizations.append(
# httplib2.BasicAuthentication(
# (_user, _password),
# netloc,
# wms_url,
# {},
# None,
# None,
# http
# )
# )
# body = http.request(wms_url)[1]
# _wms = WebMapService(wms_url, xml=body)
def get_csw():
global _csw
csw_url = "%ssrv/en/csw" % settings.GEONETWORK_BASE_URL
_csw = CatalogueServiceWeb(csw_url)
return _csw
class LayerManager(models.Manager):
def __init__(self):
models.Manager.__init__(self)
url = "%srest" % settings.GEOSERVER_BASE_URL
self.gs_catalog = Catalog(url, _user, _password)
self.geonetwork = GeoNetwork(settings.GEONETWORK_BASE_URL, settings.GEONETWORK_CREDENTIALS[0], settings.GEONETWORK_CREDENTIALS[1])
@property
def gn_catalog(self):
# check if geonetwork is logged in
if not self.geonetwork.connected:
self.geonetwork.login()
# Make sure to logout after you have finished using it.
return self.geonetwork
def admin_contact(self):
# this assumes there is at least one superuser
superusers = User.objects.filter(is_superuser=True).order_by('id')
if superusers.count() == 0:
raise RuntimeError('GeoNode needs at least one admin/superuser set')
contact = Contact.objects.get_or_create(user=superusers[0],
defaults={"name": "Geonode Admin"})[0]
return contact
def default_poc(self):
return self.admin_contact()
def default_metadata_author(self):
return self.admin_contact()
def drop_incomplete_layers(self, ignore_errors=True, verbosity=1, console=sys.stdout, owner=None, max_views=0):
bad_layers = Layer.objects.filter(topic_category_id__isnull=True).filter(created_dttm__lt=datetime.today) \
.exclude(owner__isnull=True).exclude(owner_id=1)
lc = 0
for layer in bad_layers:
maplayers = MapLayer.objects.filter(name=layer.typename)
if maplayers.count() == 0:
stats = LayerStats.objects.filter(layer=layer)
if len(stats) == 0 or stats[0].visits <= max_views:
print >> console, "Delete %s" % layer.typename
lc+=1
else:
print >> console, "Skip %s, has been viewed more than %d times" % (layer.typename, max_views)
else:
print >> console, "Skip %s, has been included in a map" % (layer.typename)
print >> console, "%d layers deleted" % lc
def slurp(self, ignore_errors=True, verbosity=1, console=sys.stdout, owner=None, new_only=False, lnames=None, workspace=None):
"""Configure the layers available in GeoServer in GeoNode.
It returns a list of dictionaries with the name of the layer,
the result of the operation and the errors and traceback if it failed.
"""
if verbosity > 1:
print >> console, "Inspecting the available layers in GeoServer ..."
cat = self.gs_catalog
resources = []
if workspace is not None:
print >> console, "Workspace is %s" % workspace
workspace = cat.get_workspace(workspace)
resources = cat.get_resources(workspace=workspace)
output = []
# check lnames
if lnames is not None:
for l in lnames:
if verbosity > 1:
print >> console, "Getting %s" % l
resource = cat.get_resource(l)
if resource:
resources.append(resource)
else:
if verbosity > 1:
print >> console, "Getting all resources"
resources = cat.get_resources()
number = len(resources)
if verbosity > 1:
msg = "Found %d layers, starting processing" % number
print >> console, msg
for i, resource in enumerate(resources):
name = resource.name
store = resource.store
workspace = store.workspace
if new_only and Layer.objects.filter(name=name).exists():
continue
elif lnames is not None and name not in lnames:
continue
try:
layer, created = Layer.objects.get_or_create(name=name, defaults = {
"workspace": workspace.name,
"store": store.name,
"storeType": store.resource_type,
"typename": "%s:%s" % (workspace.name, resource.name),
"title": resource.title or 'No title provided',
"abstract": resource.abstract or 'No abstract provided',
"owner": owner,
"uuid": str(uuid.uuid4())
})
if layer is not None and layer.bbox is None:
layer._populate_from_gs()
layer.save()
except Exception, e:
if ignore_errors:
status = 'failed'
exception_type, error, traceback = sys.exc_info()
else:
if verbosity > 0:
msg = "Stopping process because --ignore-errors was not set and an error was found."
print >> sys.stderr, msg
raise Exception('Failed to process %s' % resource.name, e), None, sys.exc_info()[2]
else:
if created:
layer.set_default_permissions()
status = 'created'
else:
status = 'updated'
#Create layer attributes if they don't already exist
try:
if layer.attribute_names is not None:
iter = 1
for field, ftype in layer.attribute_names.iteritems():
if field is not None:
la, created = LayerAttribute.objects.get_or_create(layer=layer, attribute=field, attribute_type=ftype)
if created:
la.attribute_label = field
la.searchable = (ftype == "xsd:string")
la.display_order = iter
la.save()
msg = ("Created [%s] attribute for [%s]", field, layer.name)
iter += 1
print >> console, msg
except Exception, e:
msg = ("Could not create attributes for [%s] : [%s]", layer.name, str(e))
print >> console, msg
finally:
pass
msg = "[%s] Layer %s (%d/%d)" % (status, name, i+1, number)
info = {'name': name, 'status': status}
if status == 'failed':
info['traceback'] = traceback
info['exception_type'] = exception_type
info['error'] = error
output.append(info)
if verbosity > 0:
print >> console, msg
return output
def update_bboxes(self):
for layer in Layer.objects.all():
logger.debug('Process %s', layer.name)
if layer.srs is None or layer.llbbox is None or layer.bbox is None:
logger.debug('Process %s', layer.name)
layer._populate_from_gs()
layer.save()
def update_stores(self):
cat = self.gs_catalog
for layer in Layer.objects.all():
logger.debug('Process %s', layer.name)
resource = cat.get_resource(layer.name)
if resource:
store = resource.store
if layer.store != store.name:
logger.debug('Change store name of %s from %s to %s', layer.name, layer.store, store.name)
layer.store = store.name
layer.save()
class LayerCategory(models.Model):
name = models.CharField(_('Category Name'), max_length=255, blank=True, null=True, unique=True)
title = models.CharField(_('Category Title'), max_length=255, blank=True, null=True, unique=True)
description = models.TextField(_('Category Description'), blank=True, null=True)
created_dttm = models.DateTimeField(auto_now_add=True)
"""
The date/time the object was created.
"""
last_modified = models.DateTimeField(auto_now=True)
"""
The last time the object was modified.
"""
def __str__(self):
return "%s" % self.name
class Meta:
verbose_name_plural = 'Layer Categories'
class Layer(models.Model, PermissionLevelMixin):
#class Layer(ResourceBase, PermissionLevelMixin):
"""
Layer Object loosely based on ISO 19115:2003
"""
VALID_DATE_TYPES = [(x.lower(), _(x)) for x in ['Creation', 'Publication', 'Revision']]
# internal fields
objects = LayerManager()
workspace = models.CharField(max_length=128)
store = models.CharField(max_length=128)
storeType = models.CharField(max_length=128)
name = models.CharField(max_length=128)
uuid = models.CharField(max_length=36)
typename = models.CharField(max_length=128, unique=True)
owner = models.ForeignKey(User, blank=True, null=True)
contacts = models.ManyToManyField(Contact, through='ContactRole')
# section 1
title = models.CharField(_('title'), max_length=255)
date = models.DateTimeField(_('date'), default = datetime.now) # passing the method itself, not the result
date_type = models.CharField(_('date type'), max_length=255, choices=VALID_DATE_TYPES, default='publication')
edition = models.CharField(_('edition'), max_length=255, blank=True, null=True)
abstract = models.TextField(_('abstract'), blank=False)
purpose = models.TextField(_('purpose'), null=True, blank=True)
maintenance_frequency = models.CharField(_('maintenance frequency'), max_length=255, choices=UPDATE_FREQUENCIES, blank=True, null=True)
# section 2
# see poc property definition below
# section 3
keywords = TaggableManager(_('keywords'), help_text=_("A space or comma-separated list of keywords"), blank=True)
keywords_region = models.CharField(_('keywords region'), max_length=3, choices=KEYWORD_REGIONS + COUNTRIES, default = 'GLO')
constraints_use = models.CharField(_('constraints use'), max_length=255, choices=CONSTRAINT_OPTIONS, default='copyright')
constraints_other = models.TextField(_('constraints other'), blank=True, null=True)
spatial_representation_type = models.CharField(_('spatial representation type'), max_length=255, choices=SPATIAL_REPRESENTATION_TYPES, blank=True, null=True)
# Section 4
language = models.CharField(_('language'), max_length=3, choices=ALL_LANGUAGES, default='eng')
topic_category = models.ForeignKey(LayerCategory, blank=True, null=True)
# Section 5
temporal_extent_start = models.CharField(_('temporal extent start'), max_length=24, blank=True, null=True)
temporal_extent_end = models.CharField(_('temporal extent end'), max_length=24, blank=True, null=True)
geographic_bounding_box = models.TextField(_('geographic bounding box'))
supplemental_information = models.TextField(_('supplemental information'), blank=True, null=True, default='')
# Section 6
distribution_url = models.TextField(_('distribution URL'), blank=True, null=True)
distribution_description = models.TextField(_('distribution description'), blank=True, null=True)
# WMS attributes
srs = models.CharField(_('SRS'), max_length=24, blank=True, null=True, default="EPSG:4326")
bbox = models.TextField(_('bbox'), blank=True, null=True)
llbbox = models.TextField(_('llbbox'), blank=True, null=True)
created_dttm = models.DateTimeField(auto_now_add=True)
"""
The date/time the object was created.
"""
last_modified = models.DateTimeField(auto_now=True)
"""
The last time the object was modified.
"""
downloadable = models.BooleanField(_('Downloadable?'), blank=False, null=False, default=True)
"""
Is the layer downloadable?
"""
in_gazetteer = models.BooleanField(_('In Gazetteer?'), blank=False, null=False, default=False)
"""
Is the layer in the gazetteer?
"""
gazetteer_project = models.CharField(_("Gazetteer Project"), max_length=128, blank=True, null=True)
"""
Gazetteer project that the layer is associated with
"""
# Section 8
data_quality_statement = models.TextField(_('data quality statement'), blank=True, null=True)
# Section 9
# see metadata_author property definition below
def add_as_join_target(self):
if not self.id:
return 'n/a'
admin_url = reverse('admin:datatables_jointarget_add', args=())
add_as_target_link = '%s?layer=%s' % (admin_url, self.id)
return '<a href="%s">Add as Join Target</a>' % (add_as_target_link)
add_as_join_target.allow_tags = True
def llbbox_coords(self):
try:
return [float(n) for n in re.findall('[0-9\.\-]+', self.llbbox)]
except:
return [-180.0,-90.0,180.0,90.0]
def bbox_coords(self):
try:
return [float(n) for n in re.findall('[0-9\.\-]+', self.bbox)]
except:
return self.llbbox_coords
def download_links(self):
"""Returns a list of (mimetype, URL) tuples for downloads of this data
in various formats."""
if not self.downloadable:
return None
bbox = self.llbbox_coords()
dx = float(min(180,bbox[2])) - float(max(-180,(bbox[0])))
dy = float(min(90,bbox[3])) - float(max(-90,bbox[1]))
dataAspect = 1 if dy == 0 else dx / dy
height = 550
width = int(height * dataAspect)
# bbox: this.adjustBounds(widthAdjust, heightAdjust, values.llbbox).toString(),
srs = 'EPSG:4326' # bbox[4] might be None
bbox_string = ",".join([str(bbox[0]), str(bbox[1]), str(bbox[2]), str(bbox[3])])
links = []
if self.resource.resource_type == "featureType":
def wfs_link(mime,extra_params,ext):
return settings.SITEURL + "download/wfs/" + str(self.id) + "/" + ext + "?" + urllib.urlencode({
'service': 'WFS',
'version': '1.0.0',
'request': 'GetFeature',
'typename': self.typename,
'outputFormat': mime,
'format_options': 'charset:UTF-8' #TODO: make this a settings property?
})
types = [
("zip", _("Zipped Shapefile"), "SHAPE-ZIP", {'format_options': 'charset:UTF-8'}),
("gml", _("GML 2.0"), "gml2", {}),
("gml", _("GML 3.1.1"), "text/xml; subtype=gml/3.1.1", {}),
("csv", _("CSV"), "csv", {}),
("xls", _("Excel"), "excel", {}),
("json", _("GeoJSON"), "json", {})
]
links.extend((ext, name, wfs_link(mime, extra_params, ext)) for ext, name, mime, extra_params in types)
elif self.resource.resource_type == "coverage":
try:
client = httplib2.Http()
description_url = settings.SITEURL + "download/wcs/" + str(self.id) + "/mime" + "?" + urllib.urlencode({
"service": "WCS",
"version": "1.0.0",
"request": "DescribeCoverage",
"coverage": self.typename
})
content = client.request(description_url)[1]
doc = etree.fromstring(content)
extent = doc.find(".//%(gml)slimits/%(gml)sGridEnvelope" % {"gml": "{http://www.opengis.net/gml}"})
low = extent.find("{http://www.opengis.net/gml}low").text.split()
high = extent.find("{http://www.opengis.net/gml}high").text.split()
w, h = [int(h) - int(l) for (h, l) in zip(high, low)]
def wcs_link(mime,ext):
return settings.SITEURL + "download/wcs/" + str(self.id) + "/" + ext + "?" + urllib.urlencode({
"service": "WCS",
"version": "1.0.0",
"request": "GetCoverage",
"CRS": "EPSG:4326",
"height": h,
"width": w,
"coverage": self.typename,
"bbox": bbox_string,
"format": mime
})
types = [("tif", "GeoTIFF", "geotiff")]
links.extend([(ext, name, wcs_link(mime,ext)) for (ext, name, mime) in types])
except Exception, e:
# if something is wrong with WCS we probably don't want to link
# to it anyway
# But at least this indicates a problem
notiff = mark_safe("<del>GeoTIFF</del>")
links.extend([("tiff",notiff,"#")])
def wms_link(mime, ext):
return settings.SITEURL + "download/wms/" + str(self.id) + "/" + ext + "?" + urllib.urlencode({
'service': 'WMS',
'request': 'GetMap',
'layers': self.typename,
'format': mime,
'height': height,
'width': width,
'srs': srs,
'bbox': bbox_string
})
types = [
("tiff", _("GeoTIFF"), "image/geotiff"),
("jpg", _("JPEG"), "image/jpeg"),
("pdf", _("PDF"), "application/pdf"),
("png", _("PNG"), "image/png")
]
links.extend((ext, name, wms_link(mime,ext)) for ext, name, mime in types)
kml_reflector_link_download = settings.SITEURL + "download/wms_kml/" + str(self.id) + "/kml" + "?" + urllib.urlencode({
'layers': self.typename,
'mode': "download"
})
kml_reflector_link_view = settings.SITEURL + "download/wms_kml/" + str(self.id) + "/kml" + "?" + urllib.urlencode({
'layers': self.typename,
'mode': "refresh"
})
links.append(("KML", _("KML"), kml_reflector_link_download))
links.append(("KML", _("View in Google Earth"), kml_reflector_link_view))
return links
def verify(self):
"""Makes sure the state of the layer is consistent in GeoServer and GeoNetwork.
"""
# Check the layer is in the GeoNetwork catalog and points back to get_absolute_url
if(_csw is None): # Might need to re-cache, nothing equivalent to _wms.contents?
get_csw()
if _csw is not None:
try:
_csw.getrecordbyid([self.uuid])
csw_layer = _csw.records.get(self.uuid)
except:
msg = "CSW Record Missing for layer [%s]" % self.typename
raise GeoNodeException(msg)
@property
def attributes(self):
"""
Used for table joins. See geonode.contrib.datatables
"""
return self.attribute_set.exclude(attribute='the_geom')
def layer_attributes(self):
attribute_fields = cache.get('layer_searchfields_' + self.typename)
if attribute_fields is None:
logger.debug("Create searchfields for %s", self.typename)
attribute_fields = []
attributes = self.attribute_set.filter(visible=True).order_by('display_order')
for la in attributes:
attribute_fields.append( {"id": la.attribute, "header": la.attribute_label, "searchable" : la.searchable})
cache.add('layer_searchfields_' + self.typename, attribute_fields)
logger.debug("cache created for layer %s", self.typename)
return attribute_fields
def attribute_config(self):
#Get custom attribute sort order and labels if any
cfg = {}
visible_attributes = self.attribute_set.visible()
if (visible_attributes.count() > 0):
cfg["getFeatureInfo"] = {
"fields": [l.attribute for l in visible_attributes],
"propertyNames": dict([(l.attribute,l.attribute_label) for l in visible_attributes])
}
return cfg
def maps(self):
"""Return a list of all the maps that use this layer"""
local_wms = "%swms" % settings.GEOSERVER_BASE_URL
return set([layer.map for layer in MapLayer.objects.filter(ows_url=local_wms, name=self.typename).select_related()])
# def metadata(self):
# if (_wms is None) or (self.typename not in _wms.contents):
# get_wms()
# wms_url = "%swms?request=GetCapabilities" % settings.GEOSERVER_BASE_URL
# netloc = urlparse(wms_url).netloc
# http = httplib2.Http()
# http.add_credentials(_user, _password)
# http.authorizations.append(
# httplib2.BasicAuthentication(
# (_user, _password),
# netloc,
# wms_url,
# {},
# None,
# None,
# http
# )
# )
# response, body = http.request(wms_url)
# _wms = WebMapService(wms_url, xml=body)
# return _wms[self.typename]
def __setattr__(self, name, value):
return super(Layer, self).__setattr__(name, value)
def metadata_csw(self):
global _csw
if(_csw is None):
_csw = get_csw()
_csw.getrecordbyid([self.uuid], outputschema = 'http://www.isotc211.org/2005/gmd')
return _csw.records.get(self.uuid)
@property
def attribute_names(self):
from ordereddict import OrderedDict
if self.resource.resource_type == "featureType":
dft_url = settings.GEOSERVER_BASE_URL + "wfs?" + urllib.urlencode({
"service": "wfs",
"version": "1.0.0",
"request": "DescribeFeatureType",
"typename": self.typename
})
try:
http = httplib2.Http()
http.add_credentials(_user, _password)
netloc = urlparse(dft_url).netloc
http.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
netloc,
dft_url,
{},
None,
None,
http
))
response, body = http.request(dft_url)
doc = etree.fromstring(body)
path = ".//{xsd}extension/{xsd}sequence/{xsd}element".format(xsd="{http://www.w3.org/2001/XMLSchema}")
atts = OrderedDict({})
for n in doc.findall(path):
logger.info("RESOURCE ATT %s", n.attrib["name"])
atts[n.attrib["name"]] = n.attrib["type"]
except Exception:
atts = {}
return atts
elif self.resource.resource_type == "coverage":
dc_url = settings.GEOSERVER_BASE_URL + "wcs?" + urllib.urlencode({
"service": "wcs",
"version": "1.1.0",
"request": "DescribeCoverage",
"identifiers": self.typename
})
try:
http = httplib2.Http()
http.add_credentials(_user, _password)
netloc = urlparse(dc_url).netloc
http.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
netloc,
dc_url,
{},
None,
None,
http
))
response, body = http.request(dc_url)
doc = etree.fromstring(body)
path = ".//{wcs}Axis/{wcs}AvailableKeys/{wcs}Key".format(wcs="{http://www.opengis.net/wcs/1.1.1}")
atts = OrderedDict({})
for n in doc.findall(path):
atts[n.attrib["name"]] = n.attrib["type"]
except Exception:
atts = {}
return atts
@property
def display_type(self):
return ({
"dataStore" : "Vector Data",
"coverageStore": "Raster Data",
}).get(self.storeType, "Data")
def delete_from_geoserver(self):
cascading_delete(Layer.objects.gs_catalog, self.resource)
def delete_from_geonetwork(self):
gn = Layer.objects.gn_catalog
gn.delete_layer(self)
gn.logout()
def save_to_geonetwork(self):
gn = Layer.objects.gn_catalog
record = gn.get_by_uuid(self.uuid)
if record is None:
md_link = gn.create_from_layer(self)
self.metadata_links = [("text/xml", "TC211", md_link)]
else:
gn.update_layer(self)
gn.logout()
@property
def resource(self):
if not hasattr(self, "_resource_cache"):
cat = Layer.objects.gs_catalog
try:
ws = cat.get_workspace(self.workspace)
except AttributeError:
# Geoserver is not running
raise RuntimeError("Geoserver cannot be accessed, are you sure it is running in: %s" %
(settings.GEOSERVER_BASE_URL))
try:
store = cat.get_store(self.store, ws)
self._resource_cache = cat.get_resource(self.name, store)
except Exception as e:
logger.error("Store for %s does not exist: %s" % (self.name, str(e)))
return None
return self._resource_cache
def _get_metadata_links(self):
return self.resource.metadata_links
def _set_metadata_links(self, md_links):
try:
self.resource.metadata_links = md_links
except Exception, ex:
logger.error("Exception occurred in _set_metadata_links for %s: %s", str(ex))
metadata_links = property(_get_metadata_links, _set_metadata_links)
def _get_default_style(self):
return self.publishing.default_style
def _set_default_style(self, style):
self.publishing.default_style = style
default_style = property(_get_default_style, _set_default_style)
def _get_styles(self):
return self.publishing.styles
def _set_styles(self, styles):
self.publishing.styles = styles
styles = property(_get_styles, _set_styles)
@property
def service_type(self):
if self.storeType == 'coverageStore':
return "WCS"
if self.storeType == 'dataStore':
return "WFS"
@property
def publishing(self):
if not hasattr(self, "_publishing_cache"):
cat = Layer.objects.gs_catalog
self._publishing_cache = cat.get_layer(self.name)
return self._publishing_cache
@property
def poc_role(self):
role = Role.objects.get(value='pointOfContact')
return role
@property
def metadata_author_role(self):
role = Role.objects.get(value='author')
return role
def _set_poc(self, poc):
# reset any poc asignation to this layer
ContactRole.objects.filter(role=self.poc_role, layer=self).delete()
#create the new assignation
ContactRole.objects.create(role=self.poc_role, layer=self, contact=poc)
def _get_poc(self):
try:
the_poc = ContactRole.objects.get(role=self.poc_role, layer=self).contact
except ContactRole.DoesNotExist:
the_poc = None
return the_poc
poc = property(_get_poc, _set_poc)
def _set_metadata_author(self, metadata_author):
# reset any metadata_author asignation to this layer
ContactRole.objects.filter(role=self.metadata_author_role, layer=self).delete()
#create the new assignation
ContactRole.objects.create(role=self.metadata_author_role,
layer=self, contact=metadata_author)
def _get_metadata_author(self):
try:
the_ma = ContactRole.objects.get(role=self.metadata_author_role, layer=self).contact
except ContactRole.DoesNotExist:
the_ma = None
return the_ma
metadata_author = property(_get_metadata_author, _set_metadata_author)
def save_to_geoserver(self):
if self.resource is None:
return
if hasattr(self, "_resource_cache"):
gn = Layer.objects.gn_catalog
self.resource.title = self.title
self.resource.abstract = self.abstract
self.resource.name= self.name
self.resource.metadata_links = [('text/xml', 'TC211', gn.url_for_uuid(self.uuid))]
self.resource.keywords = self.keyword_list()
Layer.objects.gs_catalog.save(self._resource_cache)
gn.logout()
if self.poc and self.poc.user:
self.publishing.attribution = str(self.poc.user)
profile = Contact.objects.get(user=self.poc.user)
self.publishing.attribution_link = settings.SITEURL[:-1] + profile.get_absolute_url()
Layer.objects.gs_catalog.save(self.publishing)
def _populate_from_gs(self):
gs_store = Layer.objects.gs_catalog.get_store(self.store)
gs_resource = Layer.objects.gs_catalog.get_resource(self.name, gs_store)
if gs_resource is None:
return
self.srs = gs_resource.projection
self.llbbox = str([ max(-180,float(gs_resource.latlon_bbox[0])),max(-90,float(gs_resource.latlon_bbox[2])),min(180,float(gs_resource.latlon_bbox[1])),min(90,float(gs_resource.latlon_bbox[3]))])
if self.srs == 'EPSG:4326':
self.bbox = self.llbbox
else:
self.bbox = str([ float(gs_resource.native_bbox[0]),float(gs_resource.native_bbox[2]),float(gs_resource.native_bbox[1]),float(gs_resource.native_bbox[3])])
if self.geographic_bounding_box is '' or self.geographic_bounding_box is None:
self.set_bbox(gs_resource.native_bbox, srs=self.srs)
## Save using filter/update to avoid triggering post_save_layer
Layer.objects.filter(id=self.id).update(srs = self.srs, llbbox = self.llbbox, bbox=self.bbox, geographic_bounding_box = self.geographic_bounding_box)
def _autopopulate(self):
if self.poc is None:
self.poc = Layer.objects.default_poc()
if self.metadata_author is None:
self.metadata_author = Layer.objects.default_metadata_author()
if self.abstract == '' or self.abstract is None:
self.abstract = 'No abstract provided'
if self.title == '' or self.title is None:
self.title = self.name
def _populate_from_gn(self):
meta = self.metadata_csw()
if meta is None:
return
kw_list = reduce(
lambda x, y: x + y["keywords"],
meta.identification.keywords,
[])
kw_list = [l for l in kw_list if l is not None]
self.keywords.add(*kw_list)
if hasattr(meta.distribution, 'online'):
onlineresources = [r for r in meta.distribution.online if r.protocol == "WWW:LINK-1.0-http--link"]
if len(onlineresources) == 1:
res = onlineresources[0]
self.distribution_url = res.url
self.distribution_description = res.description
def keyword_list(self):
keywords_qs = self.keywords.all()
if keywords_qs:
return [kw.name for kw in keywords_qs]
else:
return []
def set_bbox(self, box, srs=None):
"""
Sets a bounding box based on the gsconfig native_box param.
"""
if srs:
srid = srs
else:
srid = box[4]
self.geographic_bounding_box = bbox_to_wkt(box[0], box[1], box[2], box[3], srid=srid )
def get_absolute_url(self):
return "/data/%s" % (self.typename)
def __str__(self):
return "%s Layer" % self.typename
class Meta:
# custom permissions,
# change and delete are standard in django
permissions = (('view_layer', 'Can view'),
('change_layer_permissions', "Can change permissions"), )
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'layer_readonly'
LEVEL_WRITE = 'layer_readwrite'
LEVEL_ADMIN = 'layer_admin'
def set_default_permissions(self):
self.set_gen_level(ANONYMOUS_USERS, self.LEVEL_READ)
self.set_gen_level(AUTHENTICATED_USERS, self.LEVEL_READ)
self.set_gen_level(CUSTOM_GROUP_USERS, self.LEVEL_READ)
# remove specific user permissions
current_perms = self.get_all_level_info()
for username in current_perms['users'].keys():
user = User.objects.get(username=username)
self.set_user_level(user, self.LEVEL_NONE)
# assign owner admin privs
if self.owner:
self.set_user_level(self.owner, self.LEVEL_ADMIN)
def layer_config(self, user):
"""
Generate a dict that can be serialized to a GXP layer configuration
suitable for loading this layer.
The "source" property will be left unset; the layer is not aware of the
name assigned to its source plugin. See
:method:`geonode.maps.models.Map.viewer_json` for an example of
generating a full map configuration.
"""
cfg = dict()
cfg['name'] = self.typename
cfg['title'] =self.title
cfg['transparent'] = True
if self.topic_category:
cfg['group'] = self.topic_category.title
else:
cfg['group'] = 'General'
cfg['url'] = settings.GEOSERVER_BASE_URL + "wms"
cfg['srs'] = self.srs
cfg['bbox'] = json.loads(self.bbox)
cfg['llbbox'] = json.loads(self.llbbox)
cfg['queryable'] = (self.storeType == 'dataStore')
cfg['attributes'] = self.layer_attributes()
cfg['disabled'] = user is not None and not user.has_perm('maps.view_layer', obj=self)
cfg['visibility'] = True
cfg['abstract'] = self.abstract
cfg['styles'] = ''
return cfg
def queue_gazetteer_update(self):
from geonode.queue.models import GazetteerUpdateJob
if GazetteerUpdateJob.objects.filter(layer=self.id).exists() == 0:
newJob = GazetteerUpdateJob(layer=self)
newJob.save()
def update_gazetteer(self):
from geonode.gazetteer.utils import add_to_gazetteer, delete_from_gazetteer
if not self.in_gazetteer:
delete_from_gazetteer(self.name)
else:
includedAttributes = []
gazetteerAttributes = self.attribute_set.filter(in_gazetteer=True)
for attribute in gazetteerAttributes:
includedAttributes.append(attribute.attribute)
startAttribute = self.attribute_set.filter(is_gaz_start_date=True)[0].attribute if self.attribute_set.filter(is_gaz_start_date=True).exists() > 0 else None
endAttribute = self.attribute_set.filter(is_gaz_end_date=True)[0].attribute if self.attribute_set.filter(is_gaz_end_date=True).exists() > 0 else None
add_to_gazetteer(self.name, includedAttributes, start_attribute=startAttribute, end_attribute=endAttribute, project=self.gazetteer_project)
def queue_bounds_update(self):
from geonode.queue.models import LayerBoundsUpdateJob
if LayerBoundsUpdateJob.objects.filter(layer=self.id).exists() == 0:
newJob = LayerBoundsUpdateJob(layer=self)
newJob.save()
def update_bounds(self):
#Get extent for layer from PostGIS
bboxes = get_postgis_bbox(self.name)
if len(bboxes) != 1 and len(bboxes[0]) != 2:
return
if bboxes[0][0] is None or bboxes[0][1] is None:
return
bbox = re.findall(r"[+-]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][+-]?\d+)?", bboxes[0][0])
llbbox = re.findall(r"[+-]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][+-]?\d+)?", bboxes[0][1])
#Assign new bbox to Layer
self.bbox = str([float(l) for l in bbox])
self.llbbox = str([float(l) for l in llbbox])
self.set_bbox(bbox, srs=self.srs)
#Update Geoserver bounding boxes
resource_bbox = list(self.resource.native_bbox)
resource_llbbox = list(self.resource.latlon_bbox)
(resource_bbox[0],resource_bbox[1],resource_bbox[2],resource_bbox[3]) = str(bbox[0]), str(bbox[2]), str(bbox[1]), str(bbox[3])
(resource_llbbox[0],resource_llbbox[1],resource_llbbox[2],resource_llbbox[3]) = str(llbbox[0]), str(llbbox[2]), str(llbbox[1]), str(llbbox[3])
self.resource.native_bbox = tuple(resource_bbox)
self.resource.latlon_bbox = tuple(resource_llbbox)
Layer.objects.gs_catalog.save(self._resource_cache)
# Use update to avoid unnecessary post_save signal
Layer.objects.filter(id=self.id).update(bbox=self.bbox,llbbox=self.llbbox,geographic_bounding_box=self.geographic_bounding_box )
#Update geonetwork record with latest extent
logger.info("Save new bounds to geonetwork")
self.save_to_geonetwork()
class LayerAttributeManager(models.Manager):
"""Helper class to access filtered attributes
"""
def visible(self):
return self.get_query_set().filter(visible=True).order_by('display_order')
class LayerAttribute(models.Model):
objects = LayerAttributeManager()
layer = models.ForeignKey(Layer, blank=False, null=False, unique=False, related_name='attribute_set')
#layer = models.ForeignKey(ResourceBase, blank=False, null=False, unique=False, related_name='attribute_set')
attribute = models.CharField(_('Attribute Name'), max_length=255, blank=False, null=True, unique=False)
attribute_label = models.CharField(_('Attribute Label'), max_length=255, blank=False, null=True, unique=False)
attribute_type = models.CharField(_('Attribute Type'), max_length=50, blank=False, null=False, default='xsd:string', unique=False)
searchable = models.BooleanField(_('Searchable?'), default=False)
visible = models.BooleanField(_('Visible?'), default=True)
display_order = models.IntegerField(_('Display Order'), default=1)
in_gazetteer = models.BooleanField(_('In Gazetteer?'), default=False)
is_gaz_start_date = models.BooleanField(_('Gazetteer Start Date'), default=False)
is_gaz_end_date = models.BooleanField(_('Gazetteer End Date'), default=False)
date_format = models.CharField(_('Date Format'), max_length=255, blank=True, null=True)
created_dttm = models.DateTimeField(auto_now_add=True)
"""
The date/time the object was created.
"""
last_modified = models.DateTimeField(auto_now=True)
"""
The last time the object was modified.
"""
def __str__(self):
return "%s" % self.attribute
def __unicode__(self):
return self.attribute
class Map(models.Model, PermissionLevelMixin):
"""
A Map aggregates several layers together and annotates them with a viewport
configuration.
"""
title = models.TextField(_('Title'))
# A display name suitable for search results and page headers
abstract = models.TextField(_('Abstract'), blank=True)
# A longer description of the themes in the map.
# viewer configuration
zoom = models.IntegerField(_('zoom'))
# The zoom level to use when initially loading this map. Zoom levels start
# at 0 (most zoomed out) and each increment doubles the resolution.
projection = models.CharField(_('projection'),max_length=32)
# The projection used for this map. This is stored as a string with the
# projection's SRID.
center_x = models.FloatField(_('center X'))
# The x coordinate to center on when loading this map. Its interpretation
# depends on the projection.
center_y = models.FloatField(_('center Y'))
# The y coordinate to center on when loading this map. Its interpretation
# depends on the projection.
owner = models.ForeignKey(User, verbose_name=_('owner'), blank=True, null=True)
# The user that created/owns this map.
created_dttm = models.DateTimeField(_("Date Created"), auto_now_add=True)
"""
The date/time the map was created.
"""
keywords = TaggableManager(_('keywords'), help_text=_("A space or comma-separated list of keywords"), blank=True)
last_modified = models.DateTimeField(_("Date Last Modified"),auto_now=True)
"""
The last time the map was modified.
"""
urlsuffix = models.CharField(_('Site URL'), max_length=255, blank=True)
"""
Alphanumeric alternative to referencing maps by id, appended to end of URL instead of id, ie http://domain/maps/someview
"""
officialurl = models.CharField(_('Official Harvard Site URL'), max_length=255, blank=True)
"""
Full URL for official/sponsored map view, ie http://domain/someview
"""
content = models.TextField(_('Site Content'), blank=True, null=True, default=DEFAULT_CONTENT)
"""
HTML content to be displayed in modal window on 1st visit
"""
use_custom_template = models.BooleanField(_('Use a custom template'),default=False)
"""
Whether to show default banner/styles or custom ones.
"""
group_params = models.TextField(_('Layer Category Parameters'), blank=True)
"""
Layer categories (names, expanded)
"""
template_page = models.CharField('Map template page', max_length=255, blank=True)
"""
The map view template page to use, if different from default
"""
def __unicode__(self):
return '%s by %s' % (self.title, (self.owner.username if self.owner else "<Anonymous>"))
@property
def center(self):
"""
A handy shortcut for the center_x and center_y properties as a tuple
(read only)
"""
return (self.center_x, self.center_y)
@property
def maplayers(self):
layers = cache.get('maplayerset_' + str(self.id))
if layers is None:
logger.debug('maplayerset cache was None')
layers = MapLayer.objects.filter(map=self.id).order_by('stack_order')
cache.add('maplayerset_' + str(self.id), layers)
return [layer for layer in layers]
@property
def snapshots(self):
snapshots = MapSnapshot.objects.exclude(user=None).filter(map=self.id)
return [snapshot for snapshot in snapshots]
@property
def layers(self):
layers = MapLayer.objects.filter(map=self.id)
return [layer for layer in layers]
@property
def local_layers(self):
return True
def json(self, layer_filter):
map_layers = MapLayer.objects.filter(map=self.id)
layers = []
for map_layer in map_layers:
if map_layer.local():
layer = Layer.objects.get(typename=map_layer.name)
layers.append(layer)
else:
pass
if layer_filter:
layers = [l for l in layers if layer_filter(l)]
readme = (
"Title: %s\n" +
"Author: %s\n"
"Abstract: %s\n"
) % (self.title, "The GeoNode Team", self.abstract)
def layer_json(lyr):
return {
"name": lyr.typename,
"service": lyr.service_type,
"serviceURL": "",
"metadataURL": ""
}
map_config = {
"map" : { "readme": readme },
"layers" : [layer_json(lyr) for lyr in layers]
}
return json.dumps(map_config)
def viewer_json(self, user=None, *added_layers):
"""
Convert this map to a nested dictionary structure matching the JSON
configuration for GXP Viewers.
The ``added_layers`` parameter list allows a list of extra MapLayer
instances to append to the Map's layer list when generating the
configuration. These are not persisted; if you want to add layers you
should use ``.layer_set.create()``.
"""
layers = list(self.maplayers) + list(added_layers) #implicitly sorted by stack_order
sejumps = self.jump_set.all()
server_lookup = {}
sources = {'local': settings.DEFAULT_LAYER_SOURCE }
def uniqify(seq):
"""
get a list of unique items from the input sequence.
This relies only on equality tests, so you can use it on most
things. If you have a sequence of hashables, list(set(seq)) is
better.
"""
results = []
for x in seq:
if x not in results: results.append(x)
return results
def uniqifydict(seq, item):
"""
get a list of unique dictionary elements based on a certain item (ie 'group').
"""
results = []
items = []
for x in seq:
if x[item] not in items:
items.append(x[item])
results.append(x)
return results
configs = [l.source_config() for l in layers]
configs.append({"ptype":"gxp_gnsource", "url": settings.GEOSERVER_BASE_URL + "wms", "restUrl":"/gs/rest"})
i = 0
for source in uniqify(configs):
while str(i) in sources: i = i + 1
sources[str(i)] = source
server_lookup[json.dumps(source)] = str(i)
def source_lookup(source):
for k, v in sources.iteritems():
if v == source: return k
return None
def layer_config(l, user):
logger.debug("_________CALLING viewer_json.layer_config for %s", l)
cfg = l.layer_config(user)
src_cfg = l.source_config()
source = source_lookup(src_cfg)
if source: cfg["source"] = source
if src_cfg.get("ptype", "gxp_wmscsource") == "gxp_wmscsource" or src_cfg.get("ptype", "gxp_gnsource") == "gxp_gnsource" : cfg["buffer"] = 0
return cfg
config = {
'id': self.id,
'about': {
'title': self.title,
'abstract': self.abstract,
'urlsuffix': self.urlsuffix,
'introtext' : self.content,
'officialurl' : self.officialurl
},
'defaultSourceType': "gxp_gnsource",
'sources': sources,
'map': {
'layers': [layer_config(l, user) for l in layers],
'center': [self.center_x, self.center_y],
'projection': self.projection,
'zoom': self.zoom,
},
'social_explorer': [se.json() for se in sejumps]
}
if self.group_params:
#config["treeconfig"] = json.loads(self.group_params)
config["map"]["groups"] = uniqifydict(json.loads(self.group_params), 'group')
'''
# Mark the last added layer as selected - important for data page
'''
config["map"]["layers"][len(layers)-1]["selected"] = True
config["map"].update(_get_viewer_projection_info(self.projection))
return config
def update_from_viewer(self, conf):
from django.utils.html import escape
"""
Update this Map's details by parsing a JSON object as produced by
a GXP Viewer.
This method automatically persists to the database!
"""
if isinstance(conf, basestring):
conf = json.loads(conf)
self.title = despam(conf['about']['title'])
self.abstract = despam(conf['about']['abstract'])
self.urlsuffix = conf['about']['urlsuffix']
x = XssCleaner()
self.content = despam(x.strip(conf['about']['introtext']))
#self.content = re.sub(r'<script.*(<\/script>|\/>)|javascript:|\$\(|jQuery|Ext\.', r'', conf['about']['introtext']) #Remove any scripts
#self.keywords = despam(conf['about']['keywords'])
self.zoom = conf['map']['zoom']
self.center_x = conf['map']['center'][0]
self.center_y = conf['map']['center'][1]
self.projection = conf['map']['projection']
self.featured = conf['about'].get('featured', False)
logger.debug("Try to save treeconfig")
if 'groups' in conf['map']:
self.group_params = json.dumps(conf['map']['groups'])
logger.debug("Saved treeconfig")
def source_for(layer):
return conf["sources"][layer["source"]]
layers = [l for l in conf["map"]["layers"]]
for layer in self.layer_set.all():
layer.delete()
self.keywords.add(*conf['map'].get('keywords', []))
for ordering, layer in enumerate(layers):
self.layer_set.add(
self.layer_set.from_viewer_config(
self, layer, source_for(layer), ordering
))
self.save()
cache.delete('maplayerset_' + str(self.id))
def keyword_list(self):
keywords_qs = self.keywords.all()
if keywords_qs:
return [kw.name for kw in keywords_qs]
else:
return []
def get_absolute_url(self):
return '/maps/%i' % self.id
class Meta:
# custom permissions,
# change and delete are standard in django
permissions = (('view_map', 'Can view'),
('change_map_permissions', "Can change permissions"), )
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'map_readonly'
LEVEL_WRITE = 'map_readwrite'
LEVEL_ADMIN = 'map_admin'
def set_default_permissions(self):
self.set_gen_level(ANONYMOUS_USERS, self.LEVEL_READ)
self.set_gen_level(AUTHENTICATED_USERS, self.LEVEL_READ)
self.set_gen_level(CUSTOM_GROUP_USERS, self.LEVEL_READ)
# remove specific user permissions
current_perms = self.get_all_level_info()
for username in current_perms['users'].keys():
user = User.objects.get(username=username)
self.set_user_level(user, self.LEVEL_NONE)
# assign owner admin privs
if self.owner:
self.set_user_level(self.owner, self.LEVEL_ADMIN)
class MapSnapshot(models.Model):
map = models.ForeignKey(Map, related_name="snapshot_set")
"""
The ID of the map this snapshot was generated from.
"""
config = models.TextField(_('JSON Configuration'))
"""
Map configuration in JSON format
"""
created_dttm = models.DateTimeField(auto_now_add=True)
"""
The date/time the snapshot was created.
"""
user = models.ForeignKey(User, blank=True, null=True)
"""
The user who created the snapshot.
"""
def json(self):
return {
"map": self.map.id,
"created": self.created_dttm.isoformat(),
"user": self.user.username if self.user else None,
"url": num_encode(self.id)
}
class SocialExplorerLocation(models.Model):
map = models.ForeignKey(Map, related_name="jump_set")
url = models.URLField(_("Jump URL"), blank=False, null=False, default='http://www.socialexplorer.com/pub/maps/map3.aspx?g=0&mapi=SE0012&themei=B23A1CEE3D8D405BA2B079DDF5DE9402')
title = models.TextField(_("Jump Site"), blank=False, null=False)
def json(self):
logger.debug("JSON url: %s", self.url)
return {
"url": self.url,
"title" : self.title
}
class MapLayerManager(models.Manager):
def from_viewer_config(self, map_model, layer, source, ordering):
"""
Parse a MapLayer object out of a parsed layer configuration from a GXP
viewer.
``map`` is the Map instance to associate with the new layer
``layer`` is the parsed dict for the layer
``source`` is the parsed dict for the layer's source
``ordering`` is the index of the layer within the map's layer list
"""
layer_cfg = dict(layer)
for k in ["format", "name", "opacity", "styles", "transparent",
"fixed", "group", "visibility", "source"]:
if k in layer_cfg: del layer_cfg[k]
source_cfg = dict(source)
for k in ["url", "projection"]:
if k in source_cfg: del source_cfg[k]
return self.model(
map = map_model,
stack_order = ordering,
format = layer.get("format", None),
name = layer.get("name", None),
opacity = layer.get("opacity", 1),
styles = layer.get("styles", None),
transparent = layer.get("transparent", False),
fixed = layer.get("fixed", False),
group = layer.get('group', None),
visibility = layer.get("visibility", True),
ows_url = source.get("url", None),
layer_params = json.dumps(layer_cfg),
source_params = json.dumps(source_cfg)
)
class MapLayer(models.Model):
"""
The MapLayer model represents a layer included in a map. This doesn't just
identify the dataset, but also extra options such as which style to load
and the file format to use for image tiles.
"""
objects = MapLayerManager()
# see :class:`geonode.maps.models.MapLayerManager`
map = models.ForeignKey(Map, related_name="layer_set")
# The map containing this layer
stack_order = models.IntegerField(_('stack order'))
# The z-index of this layer in the map; layers with a higher stack_order will
# be drawn on top of others.
format = models.CharField(_('format'), blank=True, null=True, max_length=200)
# The mimetype of the image format to use for tiles (image/png, image/jpeg,
# image/gif...)
name = models.CharField(_('name'), blank=True, null=True, max_length=200)
# The name of the layer to load.
# The interpretation of this name depends on the source of the layer (Google
# has a fixed set of names, WMS services publish a list of available layers
# in their capabilities documents, etc.)
opacity = models.FloatField(_('opacity'), default=1.0)
# The opacity with which to render this layer, on a scale from 0 to 1.
styles = models.CharField(_('styles'), blank=True, null=True,max_length=200)
# The name of the style to use for this layer (only useful for WMS layers.)
transparent = models.BooleanField(_('transparent'))
# A boolean value, true if we should request tiles with a transparent background.
fixed = models.BooleanField(_('fixed'), default=False)
# A boolean value, true if we should prevent the user from dragging and
# dropping this layer in the layer chooser.
group = models.CharField(_('group'), null=True,max_length=200)
# A group label to apply to this layer. This affects the hierarchy displayed
# in the map viewer's layer tree.
visibility = models.BooleanField(_('visibility'), default=True)
# A boolean value, true if this layer should be visible when the map loads.
ows_url = models.URLField(_('ows URL'), blank=True, null=True)
# The URL of the OWS service providing this layer, if any exists.
layer_params = models.TextField(_('layer params'))
# A JSON-encoded dictionary of arbitrary parameters for the layer itself when
# passed to the GXP viewer.
# If this dictionary conflicts with options that are stored in other fields
# (such as format, styles, etc.) then the fields override.
source_params = models.TextField(_('source params'))
# A JSON-encoded dictionary of arbitrary parameters for the GXP layer source
# configuration for this layer.
# If this dictionary conflicts with options that are stored in other fields
# (such as ows_url) then the fields override.
created_dttm = models.DateTimeField(auto_now_add=True)
"""
The date/time the object was created.
"""
last_modified = models.DateTimeField(auto_now=True)
"""
The last time the object was modified.
"""
def local(self):
"""
Tests whether this layer is served by the GeoServer instance that is
paired with the GeoNode site. Currently this is based on heuristics,
but we try to err on the side of false negatives.
"""
isLocal = False
if self.ows_url:
ows_url = urlparse(self.ows_url)
settings_url = urlparse(settings.GEOSERVER_BASE_URL + "wms")
if settings_url.netloc == ows_url.netloc and settings_url.path == ows_url.path:
isLocal = cache.get('islocal_' + self.name)
if isLocal is None:
isLocal = Layer.objects.filter(typename=self.name).count() != 0
cache.add('islocal_' + self.name, isLocal)
return isLocal
def source_config(self):
"""
Generate a dict that can be serialized to a GXP layer source
configuration suitable for loading this layer.
"""
try:
cfg = json.loads(self.source_params)
except Exception:
cfg = dict(ptype = "gxp_gnsource", restUrl="/gs/rest")
if self.ows_url:
cfg["url"] = ows_sub.sub('',self.ows_url)
if "ptype" not in cfg:
cfg["ptype"] = "gxp_wmscsource"
if "ptype" in cfg and cfg["ptype"] == "gxp_gnsource":
cfg["restUrl"] = "/gs/rest"
return cfg
def layer_config(self, user):
"""
Generate a dict that can be serialized to a GXP layer configuration
suitable for loading this layer.
The "source" property will be left unset; the layer is not aware of the
name assigned to its source plugin. See
:method:`geonode.maps.models.Map.viewer_json` for an example of
generating a full map configuration.
"""
# Caching of maplayer config, per user (due to permissions)
if self.id is not None:
cfg = cache.get("maplayer_config_" + str(self.id) + "_" + str(0 if user is None else user.id))
if cfg is not None:
logger.debug("Cached cfg: %s", str(cfg))
return cfg
try:
cfg = json.loads(self.layer_params)
except Exception:
cfg = dict()
if self.format: cfg['format'] = self.format
if self.name: cfg["name"] = self.name
if self.opacity: cfg['opacity'] = self.opacity
if self.styles: cfg['styles'] = self.styles
if self.transparent: cfg['transparent'] = True
cfg["fixed"] = self.fixed
if 'url' not in cfg:
cfg['url'] = self.ows_url
if cfg['url']:
cfg['url'] = ows_sub.sub('', cfg['url'])
if self.group: cfg["group"] = self.group
cfg["visibility"] = self.visibility
if self.name is not None and self.source_params.find( "gxp_gnsource") > -1:
#Get parameters from GeoNode instead of WMS GetCapabilities
try:
gnLayer = Layer.objects.get(typename=self.name)
if gnLayer.srs: cfg['srs'] = gnLayer.srs
if gnLayer.bbox: cfg['bbox'] = json.loads(gnLayer.bbox)
if gnLayer.llbbox: cfg['llbbox'] = json.loads(gnLayer.llbbox)
cfg['attributes'] = (gnLayer.layer_attributes())
attribute_cfg = gnLayer.attribute_config()
if "getFeatureInfo" in attribute_cfg:
cfg["getFeatureInfo"] = attribute_cfg["getFeatureInfo"]
cfg['queryable'] = (gnLayer.storeType == 'dataStore'),
cfg['disabled'] = user is not None and not user.has_perm('maps.view_layer', obj=gnLayer)
#cfg["displayOutsideMaxExtent"] = user is not None and user.has_perm('maps.change_layer', obj=gnLayer)
cfg['visibility'] = cfg['visibility'] and not cfg['disabled']
cfg['abstract'] = gnLayer.abstract
cfg['styles'] = self.styles
except Exception, e:
# Give it some default values so it will still show up on the map, but disable it in the layer tree
cfg['srs'] = 'EPSG:900913'
cfg['llbbox'] = [-180,-90,180,90]
cfg['attributes'] = []
cfg['queryable'] =False,
cfg['disabled'] = True
cfg['visibility'] = False
cfg['abstract'] = ''
cfg['styles'] =''
logger.info("Could not retrieve Layer with typename of %s : %s", self.name, str(e))
elif self.source_params.find( "gxp_hglsource") > -1:
# call HGL ServiceStarter asynchronously to load the layer into HGL geoserver
from geonode.queue.tasks import loadHGL
loadHGL.delay(self.name)
#Create cache of maplayer config that will last for 60 seconds (in case permissions or maplayer properties are changed)
if self.id is not None:
cache.set("maplayer_config_" + str(self.id) + "_" + str(0 if user is None else user.id), cfg, 60)
return cfg
@property
def local_link(self):
if self.local():
layer = Layer.objects.get(typename=self.name)
link = "<a href=\"%s\">%s</a>" % (layer.get_absolute_url(),layer.title)
else:
link = "<span>%s</span> " % self.name
return link
class Meta:
ordering = ["stack_order"]
def __unicode__(self):
return '%s?layers=%s' % (self.ows_url, self.name)
def pre_save_maplayer(instance, sender, **kwargs):
if instance.local():
print 'Fixing layer_params url for layer %s' % instance.name
instance.layer_params = instance.layer_params.replace('https://', 'http://')
signals.pre_save.connect(pre_save_maplayer, sender=MapLayer)
class Role(models.Model):
"""
Roles are a generic way to create groups of permissions.
"""
value = models.CharField('Role', choices= [(x, x) for x in ROLE_VALUES], max_length=255, unique=True)
permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True)
created_dttm = models.DateTimeField(auto_now_add=True)
"""
The date/time the object was created.
"""
last_modified = models.DateTimeField(auto_now=True)
"""
The last time the object was modified.
"""
def __unicode__(self):
return self.get_value_display()
class ContactRole(models.Model):
"""
ContactRole is an intermediate model to bind Contacts and Layers and apply roles.
"""
contact = models.ForeignKey(Contact)
layer = models.ForeignKey(Layer)
role = models.ForeignKey(Role)
def clean(self):
"""
Make sure there is only one poc and author per layer
"""
if (self.role == self.layer.poc_role) or (self.role == self.layer.metadata_author_role):
contacts = self.layer.contacts.filter(contactrole__role=self.role)
if contacts.count() == 1:
# only allow this if we are updating the same contact
if self.contact != contacts.get():
raise ValidationError('There can be only one %s for a given layer' % self.role)
if self.contact.user is None:
# verify that any unbound contact is only associated to one layer
bounds = ContactRole.objects.filter(contact=self.contact).count()
if bounds > 1:
raise ValidationError('There can be one and only one layer linked to an unbound contact' % self.role)
elif bounds == 1:
# verify that if there was one already, it corresponds to this instace
if ContactRole.objects.filter(contact=self.contact).get().id != self.id:
raise ValidationError('There can be one and only one layer linked to an unbound contact' % self.role)
class Meta:
unique_together = (("contact", "layer", "role"),)
def delete_layer(instance, sender, **kwargs):
"""
Removes the layer from GeoServer and GeoNetwork
"""
instance.delete_from_geoserver()
instance.delete_from_geonetwork()
def post_save_layer(instance, sender, **kwargs):
instance._autopopulate()
#Don't save to geoserver if storeType isn't populated yet; do it later
if (re.search("coverageStore|dataStore", instance.storeType)):
logger.info("Call save_to_geoserver for %s", instance.name)
instance.save_to_geoserver()
if kwargs['created']:
instance._populate_from_gs()
signals.pre_delete.connect(delete_layer, sender=Layer)
signals.post_save.connect(post_save_layer, sender=Layer)
#===================#
# NEW WORLDMAP MODELS #
#===================#
class MapStats(models.Model):
map = models.ForeignKey(Map, unique=True)
visits = models.IntegerField(_("Visits"), default= 0)
uniques = models.IntegerField(_("Unique Visitors"), default = 0)
last_modified = models.DateTimeField(auto_now=True,null=True)
class Meta:
verbose_name_plural = 'Map stats'
class LayerStats(models.Model):
layer = models.ForeignKey(Layer, unique=True)
visits = models.IntegerField(_("Visits"), default = 0)
uniques = models.IntegerField(_("Unique Visitors"), default = 0)
downloads = models.IntegerField(_("Downloads"), default = 0)
last_modified = models.DateTimeField(auto_now=True, null=True)
class Meta:
verbose_name_plural = 'Layer stats'
|
mbertrand/cga-worldmap
|
geonode/maps/models.py
|
Python
|
gpl-3.0
| 88,413
|
[
"BWA",
"VisIt"
] |
d944b0a38b685078234438d8d48640133e75fc8acc1551e9803f207025c9112d
|
# Copyright (C) 2003 CAMP
# Please see the accompanying LICENSE file for further information.
import sys
from math import pi, sqrt
import numpy as np
from numpy.linalg import solve, inv
from ase.data import atomic_names
from gpaw.setup_data import SetupData
from gpaw.atom.configurations import configurations
from gpaw.version import version
from gpaw.atom.all_electron import AllElectron, shoot
from gpaw.utilities.lapack import general_diagonalize
from gpaw.utilities import hartree
from gpaw.xc.hybrid import constructX, atomic_exact_exchange
from gpaw.atom.filter import Filter
class Generator(AllElectron):
def __init__(self, symbol, xcname='LDA', scalarrel=False, corehole=None,
configuration=None,
nofiles=True, txt='-', gpernode=150):
AllElectron.__init__(self, symbol, xcname, scalarrel, corehole,
configuration, nofiles, txt, gpernode)
def run(self, core='', rcut=1.0, extra=None,
logderiv=False, vbar=None, exx=False, name=None,
normconserving='', filter=(0.4, 1.75), rcutcomp=None,
write_xml=True, use_restart_file=True,
empty_states=''):
self.name = name
self.core = core
if type(rcut) is float:
rcut_l = [rcut]
else:
rcut_l = rcut
rcutmax = max(rcut_l)
rcutmin = min(rcut_l)
self.rcut_l = rcut_l
if rcutcomp is None:
rcutcomp = rcutmin
self.rcutcomp = rcutcomp
hfilter, xfilter = filter
Z = self.Z
n_j = self.n_j
l_j = self.l_j
f_j = self.f_j
e_j = self.e_j
if vbar is None:
vbar = ('poly', rcutmin * 0.9)
vbar_type, rcutvbar = vbar
normconserving_l = [x in normconserving for x in 'spdf']
# Parse core string:
j = 0
if core.startswith('['):
a, core = core.split(']')
core_symbol = a[1:]
j = len(configurations[core_symbol][1])
while core != '':
assert n_j[j] == int(core[0])
assert l_j[j] == 'spdf'.find(core[1])
if j != self.jcorehole:
assert f_j[j] == 2 * (2 * l_j[j] + 1)
j += 1
core = core[2:]
njcore = j
self.njcore = njcore
lmaxocc = max(l_j[njcore:])
while empty_states != '':
n = int(empty_states[0])
l = 'spdf'.find(empty_states[1])
assert n == 1 + l + l_j.count(l)
n_j.append(n)
l_j.append(l)
f_j.append(0.0)
e_j.append(-0.01)
empty_states = empty_states[2:]
if 2 in l_j[njcore:]:
# We have a bound valence d-state. Add bound s- and
# p-states if not already there:
for l in [0, 1]:
if l not in l_j[njcore:]:
n_j.append(1 + l + l_j.count(l))
l_j.append(l)
f_j.append(0.0)
e_j.append(-0.01)
if l_j[njcore:] == [0] and Z > 2:
# We have only a bound valence s-state and we are not
# hydrogen and not helium. Add bound p-state:
n_j.append(n_j[njcore])
l_j.append(1)
f_j.append(0.0)
e_j.append(-0.01)
nj = len(n_j)
self.Nv = sum(f_j[njcore:])
self.Nc = sum(f_j[:njcore])
# Do all-electron calculation:
AllElectron.run(self, use_restart_file)
# Highest occupied atomic orbital:
self.emax = max(e_j)
N = self.N
r = self.r
dr = self.dr
d2gdr2 = self.d2gdr2
beta = self.beta
dv = r**2 * dr
t = self.text
t()
t('Generating PAW setup')
if core != '':
t('Frozen core:', core)
# So far - no ghost-states:
self.ghost = False
# Calculate the kinetic energy of the core states:
Ekincore = 0.0
j = 0
for f, e, u in zip(f_j[:njcore], e_j[:njcore], self.u_j[:njcore]):
u = np.where(abs(u) < 1e-160, 0, u) # XXX Numeric!
k = e - np.sum((u**2 * self.vr * dr)[1:] / r[1:])
Ekincore += f * k
if j == self.jcorehole:
self.Ekincorehole = k
j += 1
# Calculate core density:
if njcore == 0:
nc = np.zeros(N)
else:
uc_j = self.u_j[:njcore]
uc_j = np.where(abs(uc_j) < 1e-160, 0, uc_j) # XXX Numeric!
nc = np.dot(f_j[:njcore], uc_j**2) / (4 * pi)
nc[1:] /= r[1:]**2
nc[0] = nc[1]
self.nc = nc
# Calculate core kinetic energy density
if njcore == 0:
tauc = np.zeros(N)
else:
tauc = self.radial_kinetic_energy_density(f_j[:njcore],
l_j[:njcore],
self.u_j[:njcore])
t('Kinetic energy of the core from tauc =',
np.dot(tauc * r * r, dr) * 4 * pi)
lmax = max(l_j[njcore:])
# Order valence states with respect to angular momentum
# quantum number:
self.n_ln = n_ln = []
self.f_ln = f_ln = []
self.e_ln = e_ln = []
for l in range(lmax + 1):
n_n = []
f_n = []
e_n = []
for j in range(njcore, nj):
if l_j[j] == l:
n_n.append(n_j[j])
f_n.append(f_j[j])
e_n.append(e_j[j])
n_ln.append(n_n)
f_ln.append(f_n)
e_ln.append(e_n)
# Add extra projectors:
if extra is not None:
if len(extra) == 0:
lmaxextra = 0
else:
lmaxextra = max(extra.keys())
if lmaxextra > lmax:
for l in range(lmax, lmaxextra):
n_ln.append([])
f_ln.append([])
e_ln.append([])
lmax = lmaxextra
for l in extra:
nn = -1
for e in extra[l]:
n_ln[l].append(nn)
f_ln[l].append(0.0)
e_ln[l].append(e)
nn -= 1
else:
# Automatic:
# Make sure we have two projectors for each occupied channel:
for l in range(lmaxocc + 1):
if len(n_ln[l]) < 2 and not normconserving_l[l]:
# Only one - add one more:
assert len(n_ln[l]) == 1
n_ln[l].append(-1)
f_ln[l].append(0.0)
e_ln[l].append(1.0 + e_ln[l][0])
if lmaxocc < 2 and lmaxocc == lmax:
# Add extra projector for l = lmax + 1:
n_ln.append([-1])
f_ln.append([0.0])
e_ln.append([0.0])
lmax += 1
self.lmax = lmax
rcut_l.extend([rcutmin] * (lmax + 1 - len(rcut_l)))
t('Cutoffs:')
for rc, s in zip(rcut_l, 'spdf'):
t('rc(%s)=%.3f' % (s, rc))
t('rc(vbar)=%.3f' % rcutvbar)
t('rc(comp)=%.3f' % rcutcomp)
t('rc(nct)=%.3f' % rcutmax)
t()
t('Kinetic energy of the core states: %.6f' % Ekincore)
# Allocate arrays:
self.u_ln = u_ln = [] # phi * r
self.s_ln = s_ln = [] # phi-tilde * r
self.q_ln = q_ln = [] # p-tilde * r
for l in range(lmax + 1):
nn = len(n_ln[l])
u_ln.append(np.zeros((nn, N)))
s_ln.append(np.zeros((nn, N)))
q_ln.append(np.zeros((nn, N)))
# Fill in all-electron wave functions:
for l in range(lmax + 1):
# Collect all-electron wave functions:
u_n = [self.u_j[j] for j in range(njcore, nj) if l_j[j] == l]
for n, u in enumerate(u_n):
u_ln[l][n] = u
# Grid-index corresponding to rcut:
gcut_l = [1 + int(rc * N / (rc + beta)) for rc in rcut_l]
rcutfilter = xfilter * rcutmax
self.rcutfilter = rcutfilter
gcutfilter = 1 + int(rcutfilter * N / (rcutfilter + beta))
gcutmax = 1 + int(rcutmax * N / (rcutmax + beta))
# Outward integration of unbound states stops at 3 * rcut:
gmax = int(3 * rcutmax * N / (3 * rcutmax + beta))
assert gmax > gcutfilter
# Calculate unbound extra states:
c2 = -(r / dr)**2
c10 = -d2gdr2 * r**2
for l, (n_n, e_n, u_n) in enumerate(zip(n_ln, e_ln, u_ln)):
for n, e, u in zip(n_n, e_n, u_n):
if n < 0:
u[:] = 0.0
shoot(u, l, self.vr, e, self.r2dvdr, r, dr, c10, c2,
self.scalarrel, gmax=gmax)
u *= 1.0 / u[gcut_l[l]]
charge = Z - self.Nv - self.Nc
t('Charge: %.1f' % charge)
t('Core electrons: %.1f' % self.Nc)
t('Valence electrons: %.1f' % self.Nv)
# Construct smooth wave functions:
coefs = []
for l, (u_n, s_n) in enumerate(zip(u_ln, s_ln)):
nodeless = True
gc = gcut_l[l]
for u, s in zip(u_n, s_n):
s[:] = u
if normconserving_l[l]:
A = np.zeros((5, 5))
A[:4, 0] = 1.0
A[:4, 1] = r[gc - 2:gc + 2]**2
A[:4, 2] = A[:4, 1]**2
A[:4, 3] = A[:4, 1] * A[:4, 2]
A[:4, 4] = A[:4, 2]**2
A[4, 4] = 1.0
a = u[gc - 2:gc + 3] / r[gc - 2:gc + 3]**(l + 1)
a = np.log(a)
def f(x):
a[4] = x
b = solve(A, a)
r1 = r[:gc]
r2 = r1**2
rl1 = r1**(l + 1)
y = b[0] + r2 * (b[1] + r2 * (b[2] + r2 * (b[3] + r2
* b[4])))
y = np.exp(y)
s[:gc] = rl1 * y
return np.dot(s**2, dr) - 1
x1 = 0.0
x2 = 0.001
f1 = f(x1)
f2 = f(x2)
while abs(f1) > 1e-6:
x0 = (x1 / f1 - x2 / f2) / (1 / f1 - 1 / f2)
f0 = f(x0)
if abs(f1) < abs(f2):
x2, f2 = x1, f1
x1, f1 = x0, f0
else:
A = np.ones((4, 4))
A[:, 0] = 1.0
A[:, 1] = r[gc - 2:gc + 2]**2
A[:, 2] = A[:, 1]**2
A[:, 3] = A[:, 1] * A[:, 2]
a = u[gc - 2:gc + 2] / r[gc - 2:gc + 2]**(l + 1)
if 0:#l < 2 and nodeless:
a = np.log(a)
a = solve(A, a)
r1 = r[:gc]
r2 = r1**2
rl1 = r1**(l + 1)
y = a[0] + r2 * (a[1] + r2 * (a[2] + r2 * (a[3])))
if 0:#l < 2 and nodeless:
y = np.exp(y)
s[:gc] = rl1 * y
coefs.append(a)
if nodeless:
if not np.alltrue(s[1:gc] > 0.0):
raise RuntimeError(
'Error: The %d%s pseudo wave has a node!' %
(n_ln[l][0], 'spdf'[l]))
# Only the first state for each l must be nodeless:
nodeless = False
# Calculate pseudo core density:
gcutnc = 1 + int(rcutmax * N / (rcutmax + beta))
self.nct = nct = nc.copy()
A = np.ones((4, 4))
A[0] = 1.0
A[1] = r[gcutnc - 2:gcutnc + 2]**2
A[2] = A[1]**2
A[3] = A[1] * A[2]
a = nc[gcutnc - 2:gcutnc + 2]
a = solve(np.transpose(A), a)
r2 = r[:gcutnc]**2
nct[:gcutnc] = a[0] + r2 * (a[1] + r2 * (a[2] + r2 * a[3]))
t('Pseudo-core charge: %.6f' % (4 * pi * np.dot(nct, dv)))
# ... and the pseudo core kinetic energy density:
tauct = tauc.copy()
a = tauc[gcutnc - 2:gcutnc + 2]
a = solve(np.transpose(A), a)
tauct[:gcutnc] = a[0] + r2 * (a[1] + r2 * (a[2] + r2 * a[3]))
# ... and the soft valence density:
nt = np.zeros(N)
for f_n, s_n in zip(f_ln, s_ln):
nt += np.dot(f_n, s_n**2) / (4 * pi)
nt[1:] /= r[1:]**2
nt[0] = nt[1]
nt += nct
self.nt = nt
# Calculate the shape function:
x = r / rcutcomp
gaussian = np.zeros(N)
self.gamma = gamma = 10.0
gaussian[:gmax] = np.exp(-gamma * x[:gmax]**2)
gt = 4 * (gamma / rcutcomp**2)**1.5 / sqrt(pi) * gaussian
t('Shape function alpha=%.3f' % (gamma / rcutcomp**2))
norm = np.dot(gt, dv)
#print norm, norm-1
assert abs(norm - 1) < 1e-2
gt /= norm
# Calculate smooth charge density:
Nt = np.dot(nt, dv)
rhot = nt - (Nt + charge / (4 * pi)) * gt
t('Pseudo-electron charge', 4 * pi * Nt)
vHt = np.zeros(N)
hartree(0, rhot * r * dr, self.beta, self.N, vHt)
vHt[1:] /= r[1:]
vHt[0] = vHt[1]
vXCt = np.zeros(N)
extra_xc_data = {}
if self.xc.type != 'GLLB':
Exct = self.xc.calculate_spherical(self.rgd,
nt.reshape((1, -1)),
vXCt.reshape((1, -1)))
else:
Exct = self.xc.get_smooth_xc_potential_and_energy_1d(vXCt)
# Calculate extra-stuff for non-local functionals
self.xc.get_extra_setup_data(extra_xc_data)
vt = vHt + vXCt
# Construct zero potential:
gc = 1 + int(rcutvbar * N / (rcutvbar + beta))
if vbar_type == 'f':
assert lmax == 2
uf = np.zeros(N)
l = 3
# Solve for all-electron f-state:
eps = 0.0
shoot(uf, l, self.vr, eps, self.r2dvdr, r, dr, c10, c2,
self.scalarrel, gmax=gmax)
uf *= 1.0 / uf[gc]
# Fit smooth pseudo f-state polynomium:
A = np.ones((4, 4))
A[:, 0] = 1.0
A[:, 1] = r[gc - 2:gc + 2]**2
A[:, 2] = A[:, 1]**2
A[:, 3] = A[:, 1] * A[:, 2]
a = uf[gc - 2:gc + 2] / r[gc - 2:gc + 2]**(l + 1)
a0, a1, a2, a3 = solve(A, a)
r1 = r[:gc]
r2 = r1**2
rl1 = r1**(l + 1)
y = a0 + r2 * (a1 + r2 * (a2 + r2 * a3))
sf = uf.copy()
sf[:gc] = rl1 * y
# From 0 to gc, use analytic formula for kinetic energy operator:
r4 = r2**2
r6 = r4 * r2
enumerator = (a0 * l * (l + 1) +
a1 * (l + 2) * (l + 3) * r2 +
a2 * (l + 4) * (l + 5) * r4 +
a3 * (l + 6) * (l + 7) * r6)
denominator = a0 + a1 * r2 + a2 * r4 + a3 * r6
ekin_over_phit = - 0.5 * (enumerator / denominator - l * (l + 1))
ekin_over_phit[1:] /= r2[1:]
vbar = eps - vt
vbar[:gc] -= ekin_over_phit
vbar[0] = vbar[1] # Actually we can collect the terms into
# a single fraction without poles, so as to avoid doing this,
# but this is good enough
# From gc to gmax, use finite-difference formula for kinetic
# energy operator:
vbar[gc:gmax] -= self.kin(l, sf)[gc:gmax] / sf[gc:gmax]
vbar[gmax:] = 0.0
else:
assert vbar_type == 'poly'
A = np.ones((2, 2))
A[0] = 1.0
A[1] = r[gc - 1:gc + 1]**2
a = vt[gc - 1:gc + 1]
a = solve(np.transpose(A), a)
r2 = r**2
vbar = a[0] + r2 * a[1] - vt
vbar[gc:] = 0.0
vt += vbar
# Construct projector functions:
for l, (e_n, s_n, q_n) in enumerate(zip(e_ln, s_ln, q_ln)):
for e, s, q in zip(e_n, s_n, q_n):
q[:] = self.kin(l, s) + (vt - e) * s
q[gcutmax:] = 0.0
filter = Filter(r, dr, gcutfilter, hfilter).filter
vbar = filter(vbar * r)
# Calculate matrix elements:
self.dK_lnn = dK_lnn = []
self.dH_lnn = dH_lnn = []
self.dO_lnn = dO_lnn = []
for l, (e_n, u_n, s_n, q_n) in enumerate(zip(e_ln, u_ln,
s_ln, q_ln)):
A_nn = np.inner(s_n, q_n * dr)
# Do a LU decomposition of A:
nn = len(e_n)
L_nn = np.identity(nn, float)
U_nn = A_nn.copy()
# Keep all bound states normalized
if sum([n > 0 for n in n_ln[l]]) <= 1:
for i in range(nn):
for j in range(i + 1, nn):
L_nn[j, i] = 1.0 * U_nn[j, i] / U_nn[i, i]
U_nn[j, :] -= U_nn[i, :] * L_nn[j, i]
dO_nn = (np.inner(u_n, u_n * dr) -
np.inner(s_n, s_n * dr))
e_nn = np.zeros((nn, nn))
e_nn.ravel()[::nn + 1] = e_n
dH_nn = np.dot(dO_nn, e_nn) - A_nn
q_n[:] = np.dot(inv(np.transpose(U_nn)), q_n)
s_n[:] = np.dot(inv(L_nn), s_n)
u_n[:] = np.dot(inv(L_nn), u_n)
dO_nn = np.dot(np.dot(inv(L_nn), dO_nn),
inv(np.transpose(L_nn)))
dH_nn = np.dot(np.dot(inv(L_nn), dH_nn),
inv(np.transpose(L_nn)))
ku_n = [self.kin(l, u, e) for u, e in zip(u_n, e_n)]
ks_n = [self.kin(l, s) for s in s_n]
dK_nn = 0.5 * (np.inner(u_n, ku_n * dr) -
np.inner(s_n, ks_n * dr))
dK_nn += np.transpose(dK_nn).copy()
dK_lnn.append(dK_nn)
dO_lnn.append(dO_nn)
dH_lnn.append(dH_nn)
for n, q in enumerate(q_n):
q[:] = filter(q, l) * r**(l + 1)
A_nn = np.inner(s_n, q_n * dr)
q_n[:] = np.dot(inv(np.transpose(A_nn)), q_n)
self.vt = vt
self.vbar = vbar
t('state eigenvalue norm')
t('--------------------------------')
for l, (n_n, f_n, e_n) in enumerate(zip(n_ln, f_ln, e_ln)):
for n in range(len(e_n)):
if n_n[n] > 0:
f = '(%d)' % f_n[n]
t('%d%s%-4s: %12.6f %12.6f' % (
n_n[n], 'spdf'[l], f, e_n[n],
np.dot(s_ln[l][n]**2, dr)))
else:
t('*%s : %12.6f' % ('spdf'[l], e_n[n]))
t('--------------------------------')
self.logd = {}
if logderiv:
ni = 300
self.elog = np.linspace(-5.0, 1.0, ni)
# Calculate logarithmic derivatives:
gld = gcutmax + 10
self.rlog = r[gld]
assert gld < gmax
t('Calculating logarithmic derivatives at r=%.3f' % r[gld])
t('(skip with [Ctrl-C])')
try:
u = np.zeros(N)
for l in range(4):
self.logd[l] = (np.empty(ni), np.empty(ni))
if l <= lmax:
dO_nn = dO_lnn[l]
dH_nn = dH_lnn[l]
q_n = q_ln[l]
fae = open(self.symbol + '.ae.ld.' + 'spdf'[l], 'w')
fps = open(self.symbol + '.ps.ld.' + 'spdf'[l], 'w')
for i, e in enumerate(self.elog):
# All-electron logarithmic derivative:
u[:] = 0.0
shoot(u, l, self.vr, e, self.r2dvdr, r, dr, c10, c2,
self.scalarrel, gmax=gld)
dudr = 0.5 * (u[gld + 1] - u[gld - 1]) / dr[gld]
ld = dudr / u[gld] - 1.0 / r[gld]
print >> fae, e, ld
self.logd[l][0][i] = ld
# PAW logarithmic derivative:
s = self.integrate(l, vt, e, gld)
if l <= lmax:
A_nn = dH_nn - e * dO_nn
s_n = [self.integrate(l, vt, e, gld, q)
for q in q_n]
B_nn = np.inner(q_n, s_n * dr)
a_n = np.dot(q_n, s * dr)
B_nn = np.dot(A_nn, B_nn)
B_nn.ravel()[::len(a_n) + 1] += 1.0
c_n = solve(B_nn, np.dot(A_nn, a_n))
s -= np.dot(c_n, s_n)
dsdr = 0.5 * (s[gld + 1] - s[gld - 1]) / dr[gld]
ld = dsdr / s[gld] - 1.0 / r[gld]
print >> fps, e, ld
self.logd[l][1][i] = ld
except KeyboardInterrupt:
pass
self.write(nc,'nc')
self.write(nt, 'nt')
self.write(nct, 'nct')
self.write(vbar, 'vbar')
self.write(vt, 'vt')
self.write(tauc, 'tauc')
self.write(tauct, 'tauct')
for l, (n_n, f_n, u_n, s_n, q_n) in enumerate(zip(n_ln, f_ln,
u_ln, s_ln, q_ln)):
for n, f, u, s, q in zip(n_n, f_n, u_n, s_n, q_n):
if n < 0:
self.write(u, 'ae', n=n, l=l)
self.write(s, 'ps', n=n, l=l)
self.write(q, 'proj', n=n, l=l)
# Test for ghost states:
for h in [0.05]:
self.diagonalize(h)
self.vn_j = vn_j = []
self.vl_j = vl_j = []
self.vf_j = vf_j = []
self.ve_j = ve_j = []
self.vu_j = vu_j = []
self.vs_j = vs_j = []
self.vq_j = vq_j = []
j_ln = [[0 for f in f_n] for f_n in f_ln]
j = 0
for l, n_n in enumerate(n_ln):
for n, nn in enumerate(n_n):
if nn > 0:
vf_j.append(f_ln[l][n])
vn_j.append(nn)
vl_j.append(l)
ve_j.append(e_ln[l][n])
vu_j.append(u_ln[l][n])
vs_j.append(s_ln[l][n])
vq_j.append(q_ln[l][n])
j_ln[l][n] = j
j += 1
for l, n_n in enumerate(n_ln):
for n, nn in enumerate(n_n):
if nn < 0:
vf_j.append(0)
vn_j.append(nn)
vl_j.append(l)
ve_j.append(e_ln[l][n])
vu_j.append(u_ln[l][n])
vs_j.append(s_ln[l][n])
vq_j.append(q_ln[l][n])
j_ln[l][n] = j
j += 1
nj = j
self.dK_jj = np.zeros((nj, nj))
for l, j_n in enumerate(j_ln):
for n1, j1 in enumerate(j_n):
for n2, j2 in enumerate(j_n):
self.dK_jj[j1, j2] = self.dK_lnn[l][n1, n2]
if exx:
X_p = constructX(self)
ExxC = atomic_exact_exchange(self, 'core-core')
else:
X_p = None
ExxC = None
sqrt4pi = sqrt(4 * pi)
setup = SetupData(self.symbol, self.xc.name, self.name,
readxml=False)
def divide_by_r(x_g, l):
r = self.r
#for x_g, l in zip(x_jg, l_j):
p = x_g.copy()
p[1:] /= self.r[1:]
# XXXXX go to higher order!!!!!
if l == 0:#l_j[self.jcorehole] == 0:
p[0] = (p[2] +
(p[1] - p[2]) * (r[0] - r[2]) / (r[1] - r[2]))
return p
def divide_all_by_r(x_jg):
return [divide_by_r(x_g, l) for x_g, l in zip(x_jg, vl_j)]
setup.l_j = vl_j
setup.n_j = vn_j
setup.f_j = vf_j
setup.eps_j = ve_j
setup.rcut_j = [rcut_l[l] for l in vl_j]
setup.nc_g = nc * sqrt4pi
setup.nct_g = nct * sqrt4pi
setup.nvt_g = (nt - nct) * sqrt4pi
setup.e_kinetic_core = Ekincore
setup.vbar_g = vbar * sqrt4pi
setup.tauc_g = tauc * sqrt4pi
setup.tauct_g = tauct * sqrt4pi
setup.extra_xc_data = extra_xc_data
setup.Z = Z
setup.Nc = self.Nc
setup.Nv = self.Nv
setup.e_kinetic = self.Ekin
setup.e_xc = self.Exc
setup.e_electrostatic = self.Epot
setup.e_total = self.Epot + self.Exc + self.Ekin
setup.rgd = self.rgd
setup.rcgauss = self.rcutcomp / sqrt(self.gamma)
setup.e_kin_jj = self.dK_jj
setup.ExxC = ExxC
setup.phi_jg = divide_all_by_r(vu_j)
setup.phit_jg = divide_all_by_r(vs_j)
setup.pt_jg = divide_all_by_r(vq_j)
setup.X_p = X_p
if self.jcorehole is not None:
setup.has_corehole = True
setup.lcorehole = l_j[self.jcorehole] # l_j or vl_j ????? XXX
setup.ncorehole = n_j[self.jcorehole]
setup.phicorehole_g = divide_by_r(self.u_j[self.jcorehole],
setup.lcorehole)
setup.core_hole_e = self.e_j[self.jcorehole]
setup.core_hole_e_kin = self.Ekincorehole
setup.fcorehole = self.fcorehole
if self.ghost:
raise RuntimeError('Ghost!')
if self.scalarrel:
reltype = 'scalar-relativistic'
else:
reltype = 'non-relativistic'
attrs = [('type', reltype), ('name', 'gpaw-%s' % version)]
data = 'Frozen core: '+ (self.core or 'none')
setup.generatorattrs = attrs
setup.generatordata = data
self.id_j = []
for l, n in zip(vl_j, vn_j):
if n > 0:
id = '%s-%d%s' % (self.symbol, n, 'spdf'[l])
else:
id = '%s-%s%d' % (self.symbol, 'spdf'[l], -n)
self.id_j.append(id)
setup.id_j = self.id_j
if write_xml:
setup.write_xml()
return setup
def diagonalize(self, h):
ng = 350
t = self.text
t()
t('Diagonalizing with gridspacing h=%.3f' % h)
R = h * np.arange(1, ng + 1)
G = (self.N * R / (self.beta + R) + 0.5).astype(int)
G = np.clip(G, 1, self.N - 2)
R1 = np.take(self.r, G - 1)
R2 = np.take(self.r, G)
R3 = np.take(self.r, G + 1)
x1 = (R - R2) * (R - R3) / (R1 - R2) / (R1 - R3)
x2 = (R - R1) * (R - R3) / (R2 - R1) / (R2 - R3)
x3 = (R - R1) * (R - R2) / (R3 - R1) / (R3 - R2)
def interpolate(f):
f1 = np.take(f, G - 1)
f2 = np.take(f, G)
f3 = np.take(f, G + 1)
return f1 * x1 + f2 * x2 + f3 * x3
vt = interpolate(self.vt)
t()
t('state all-electron PAW')
t('-------------------------------')
for l in range(4):
if l <= self.lmax:
q_n = np.array([interpolate(q) for q in self.q_ln[l]])
H = np.dot(np.transpose(q_n),
np.dot(self.dH_lnn[l], q_n)) * h
S = np.dot(np.transpose(q_n),
np.dot(self.dO_lnn[l], q_n)) * h
else:
H = np.zeros((ng, ng))
S = np.zeros((ng, ng))
H.ravel()[::ng + 1] += vt + 1.0 / h**2 + l * (l + 1) / 2.0 / R**2
H.ravel()[1::ng + 1] -= 0.5 / h**2
H.ravel()[ng::ng + 1] -= 0.5 / h**2
S.ravel()[::ng + 1] += 1.0
e_n = np.zeros(ng)
general_diagonalize(H, e_n, S)
ePAW = e_n[0]
if l <= self.lmax and self.n_ln[l][0] > 0:
eAE = self.e_ln[l][0]
t('%d%s: %12.6f %12.6f' % (self.n_ln[l][0],
'spdf'[l], eAE, ePAW), end='')
if abs(eAE - ePAW) > 0.014:
t(' GHOST-STATE!')
self.ghost = True
else:
t()
else:
t('*%s: %12.6f' % ('spdf'[l], ePAW), end='')
if ePAW < self.emax:
t(' GHOST-STATE!')
self.ghost = True
else:
t()
t('-------------------------------')
def integrate(self, l, vt, e, gld, q=None):
r = self.r[1:]
dr = self.dr[1:]
s = np.zeros(self.N)
c0 = 0.5 * l * (l + 1) / r**2
c1 = -0.5 * self.d2gdr2[1:]
c2 = -0.5 * dr**-2
fp = c2 + 0.5 * c1
fm = c2 - 0.5 * c1
f0 = c0 - 2 * c2
f0 += vt[1:] - e
if q is None:
s[1] = r[1]**(l + 1)
for g in range(gld):
s[g + 2] = (-fm[g] * s[g] - f0[g] * s[g + 1]) / fp[g]
return s
s[1] = q[1] / (vt[0] - e)
for g in range(gld):
s[g + 2] = (q[g + 1] - fm[g] * s[g] - f0[g] * s[g + 1]) / fp[g]
return s
def write_xml(self, vl_j, vn_j, vf_j, ve_j, vu_j, vs_j, vq_j,
nc, nct, nt, Ekincore, X_p, ExxC, vbar,
tauc, tauct, extra_xc_data):
raise DeprecationWarning('use gpaw/setup_data.py')
xcname = self.xc.name
if self.name is None:
xml = open('%s.%s' % (self.symbol, xcname), 'w')
else:
xml = open('%s.%s.%s' % (self.symbol, self.name, xcname), 'w')
if self.ghost:
raise RuntimeError('Ghost!')
print >> xml, '<?xml version="1.0"?>'
print >> xml, '<paw_setup version="0.6">'
name = atomic_names[self.Z].title()
comment1 = name + ' setup for the Projector Augmented Wave method.'
comment2 = 'Units: Hartree and Bohr radii.'
comment2 += ' ' * (len(comment1) - len(comment2))
print >> xml, ' <!--', comment1, '-->'
print >> xml, ' <!--', comment2, '-->'
print >> xml, (' <atom symbol="%s" Z="%d" core="%.1f" valence="%d"/>'
% (self.symbol, self.Z, self.Nc, self.Nv))
if self.xcname == 'LDA':
type = 'LDA'
name = 'PW'
else:
type = 'GGA'
name = self.xcname
print >> xml, ' <xc_functional type="%s" name="%s"/>' % (type, name)
if self.scalarrel:
type = 'scalar-relativistic'
else:
type = 'non-relativistic'
print >> xml, ' <generator type="%s" name="gpaw-%s">' % \
(type, version)
print >> xml, ' Frozen core:', self.core or 'none'
print >> xml, ' </generator>'
print >> xml, ' <ae_energy kinetic="%f" xc="%f"' % \
(self.Ekin, self.Exc)
print >> xml, ' electrostatic="%f" total="%f"/>' % \
(self.Epot, self.Ekin + self.Exc + self.Epot)
print >> xml, ' <core_energy kinetic="%f"/>' % Ekincore
print >> xml, ' <valence_states>'
ids = []
line1 = ' <state n="%d" l="%d" f=%s rc="%5.3f" e="%8.5f" id="%s"/>'
line2 = ' <state l="%d" rc="%5.3f" e="%8.5f" id="%s"/>'
for l, n, f, e in zip(vl_j, vn_j, vf_j, ve_j):
if n > 0:
f = '%-4s' % ('"%d"' % f)
id = '%s-%d%s' % (self.symbol, n, 'spdf'[l])
print >> xml, line1 % (n, l, f, self.rcut_l[l], e, id)
else:
id = '%s-%s%d' % (self.symbol, 'spdf'[l], -n)
print >> xml, line2 % (l, self.rcut_l[l], e, id)
ids.append(id)
print >> xml, ' </valence_states>'
print >> xml, (' <radial_grid eq="r=a*i/(n-i)" a="%f" n="%d" ' +
'istart="0" iend="%d" id="g1"/>') % \
(self.beta, self.N, self.N - 1)
rcgauss = self.rcutcomp / sqrt(self.gamma)
print >> xml, (' <shape_function type="gauss" rc="%.12e"/>' %
rcgauss)
r = self.r
if self.jcorehole != None:
print "self.jcorehole", self.jcorehole
print >> xml, ((' <core_hole_state state="%d%s" ' +
'removed="%.1f" eig="%.8f" ekin="%.8f">') %
(self.ncorehole, 'spdf'[self.lcorehole],
self.fcorehole,
self.e_j[self.jcorehole],self.Ekincorehole))
#print 'normalized?', np.dot(self.dr, self.u_j[self.jcorehole]**2)
p = self.u_j[self.jcorehole].copy()
p[1:] /= r[1:]
if self.l_j[self.jcorehole] == 0:
p[0] = (p[2] +
(p[1] - p[2]) * (r[0] - r[2]) / (r[1] - r[2]))
for x in p:
print >> xml, '%16.12e' % x,
print >> xml, '\n </core_hole_state>'
for name, a in [('ae_core_density', nc),
('pseudo_core_density', nct),
('pseudo_valence_density', nt - nct),
('zero_potential', vbar),
('ae_core_kinetic_energy_density',tauc),
('pseudo_core_kinetic_energy_density',tauct)]:
print >> xml, ' <%s grid="g1">\n ' % name,
for x in a * sqrt(4 * pi):
print >> xml, '%16.12e' % x,
print >> xml, '\n </%s>' % name
# Print xc-specific data to setup file (used so for KLI and GLLB)
for name, a in extra_xc_data.iteritems():
newname = 'GLLB_'+name
print >> xml, ' <%s grid="g1">\n ' % newname,
for x in a:
print >> xml, '%16.12e' % x,
print >> xml, '\n </%s>' % newname
for l, u, s, q, in zip(vl_j, vu_j, vs_j, vq_j):
id = ids.pop(0)
for name, a in [('ae_partial_wave', u),
('pseudo_partial_wave', s),
('projector_function', q)]:
print >> xml, (' <%s state="%s" grid="g1">\n ' %
(name, id)),
p = a.copy()
p[1:] /= r[1:]
if l == 0:
# XXXXX go to higher order!!!!!
p[0] = (p[2] +
(p[1] - p[2]) * (r[0] - r[2]) / (r[1] - r[2]))
for x in p:
print >> xml, '%16.12e' % x,
print >> xml, '\n </%s>' % name
print >> xml, ' <kinetic_energy_differences>',
nj = len(self.dK_jj)
for j1 in range(nj):
print >> xml, '\n ',
for j2 in range(nj):
print >> xml, '%16.12e' % self.dK_jj[j1, j2],
print >> xml, '\n </kinetic_energy_differences>'
if X_p is not None:
print >>xml, ' <exact_exchange_X_matrix>\n ',
for x in X_p:
print >> xml, '%16.12e' % x,
print >>xml, '\n </exact_exchange_X_matrix>'
print >> xml, ' <exact_exchange core-core="%f"/>' % ExxC
print >> xml, '</paw_setup>'
def construct_smooth_wavefunction(u, l, gc, r, s):
# Do a linear regression to a wave function
# s = a + br^2 + cr^4 + dr^6, such that
# the fitting is as good as possible in region gc-2:gc+2
A = np.ones((4, 4))
A[:, 0] = 1.0
A[:, 1] = r[gc - 2:gc + 2]**2
A[:, 2] = A[:, 1]**2
A[:, 3] = A[:, 1] * A[:, 2]
a = u[gc - 2:gc + 2] / r[gc - 2:gc + 2]**(l + 1)
a = solve(A, a)
r1 = r[:gc]
r2 = r1**2
rl1 = r1**(l + 1)
y = a[0] + r2 * (a[1] + r2 * (a[2] + r2 * (a[3])))
s[:gc] = rl1 * y
if __name__ == '__main__':
import os
from gpaw.atom.basis import BasisMaker
from gpaw.atom.configurations import parameters
for xcname in ['LDA', 'PBE', 'RPBE', 'revPBE', 'GLLBSC']:
for symbol, par in parameters.items():
filename = symbol + '.' + xcname
if os.path.isfile(filename) or os.path.isfile(filename + '.gz'):
continue
g = Generator(symbol, xcname, scalarrel=True, nofiles=True)
g.run(exx=True, logderiv=False, use_restart_file=False, **par)
if xcname == 'PBE':
bm = BasisMaker(g, name='dzp', run=False)
basis = bm.generate()
basis.write_xml()
|
robwarm/gpaw-symm
|
gpaw/atom/generator.py
|
Python
|
gpl-3.0
| 36,950
|
[
"ASE",
"GPAW",
"Gaussian"
] |
d97c06ebe0adb66fe426134d5d982db562053b28512640daf3bb36d65509da4f
|
"""
Dashboard view and supporting methods
"""
import datetime
import logging
from collections import defaultdict
from completion.exceptions import UnavailableCompletionData
from completion.utilities import get_key_to_last_completed_course_block
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import ensure_csrf_cookie
from edx_django_utils import monitoring as monitoring_utils
from opaque_keys.edx.keys import CourseKey
from pytz import UTC
from six import text_type, iteritems
import track.views
from bulk_email.models import BulkEmailFlag, Optout # pylint: disable=import-error
from course_modes.models import CourseMode
from courseware.access import has_access
from edxmako.shortcuts import render_to_response, render_to_string
from entitlements.models import CourseEntitlement
from lms.djangoapps.commerce.utils import EcommerceService # pylint: disable=import-error
from lms.djangoapps.verify_student.services import IDVerificationService
from openedx.core.djangoapps.catalog.utils import (
get_programs,
get_pseudo_session_for_entitlement,
get_visible_sessions_for_entitlement
)
from openedx.core.djangoapps.credit.email_utils import get_credit_provider_display_names, make_providers_strings
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs.utils import ProgramDataExtender, ProgramProgressMeter
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.util.maintenance_banner import add_maintenance_banner
from openedx.core.djangoapps.waffle_utils import WaffleFlag, WaffleFlagNamespace
from openedx.core.djangoapps.user_api.accounts.utils import is_secondary_email_feature_enabled_for_user
from openedx.core.djangolib.markup import HTML, Text
from openedx.features.enterprise_support.api import get_dashboard_consent_notification
from openedx.features.enterprise_support.utils import is_enterprise_learner
from openedx.features.journals.api import journals_enabled
from shoppingcart.api import order_history
from shoppingcart.models import CourseRegistrationCode, DonationConfiguration
from openedx.core.djangoapps.user_authn.cookies import set_logged_in_cookies
from student.helpers import cert_info, check_verify_status_by_course
from student.models import (
AccountRecovery,
CourseEnrollment,
CourseEnrollmentAttribute,
DashboardConfiguration,
UserProfile
)
from util.milestones_helpers import get_pre_requisite_courses_not_completed
from xmodule.modulestore.django import modulestore
log = logging.getLogger("edx.student")
def get_org_black_and_whitelist_for_site():
"""
Returns the org blacklist and whitelist for the current site.
Returns:
(org_whitelist, org_blacklist): A tuple of lists of orgs that serve as
either a blacklist or a whitelist of orgs for the current site. The
whitelist takes precedence, and the blacklist is used if the
whitelist is None.
"""
# Default blacklist is empty.
org_blacklist = None
# Whitelist the orgs configured for the current site. Each site outside
# of edx.org has a list of orgs associated with its configuration.
org_whitelist = configuration_helpers.get_current_site_orgs()
if not org_whitelist:
# If there is no whitelist, the blacklist will include all orgs that
# have been configured for any other sites. This applies to edx.org,
# where it is easier to blacklist all other orgs.
org_blacklist = configuration_helpers.get_all_orgs()
return org_whitelist, org_blacklist
def _get_recently_enrolled_courses(course_enrollments):
"""
Given a list of enrollments, filter out all but recent enrollments.
Args:
course_enrollments (list[CourseEnrollment]): A list of course enrollments.
Returns:
list[CourseEnrollment]: A list of recent course enrollments.
"""
seconds = DashboardConfiguration.current().recent_enrollment_time_delta
time_delta = (datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds))
return [
enrollment for enrollment in course_enrollments
# If the enrollment has no created date, we are explicitly excluding the course
# from the list of recent enrollments.
if enrollment.is_active and enrollment.created > time_delta
]
def _allow_donation(course_modes, course_id, enrollment):
"""
Determines if the dashboard will request donations for the given course.
Check if donations are configured for the platform, and if the current course is accepting donations.
Args:
course_modes (dict): Mapping of course ID's to course mode dictionaries.
course_id (str): The unique identifier for the course.
enrollment(CourseEnrollment): The enrollment object in which the user is enrolled
Returns:
True if the course is allowing donations.
"""
if course_id not in course_modes:
flat_unexpired_modes = {
text_type(course_id): [mode for mode in modes]
for course_id, modes in iteritems(course_modes)
}
flat_all_modes = {
text_type(course_id): [mode.slug for mode in modes]
for course_id, modes in iteritems(CourseMode.all_modes_for_courses([course_id]))
}
log.error(
u'Can not find `%s` in course modes.`%s`. All modes: `%s`',
course_id,
flat_unexpired_modes,
flat_all_modes
)
donations_enabled = configuration_helpers.get_value(
'ENABLE_DONATIONS',
DonationConfiguration.current().enabled
)
return (
donations_enabled and
enrollment.mode in course_modes[course_id] and
course_modes[course_id][enrollment.mode].min_price == 0
)
def _create_recent_enrollment_message(course_enrollments, course_modes): # pylint: disable=invalid-name
"""
Builds a recent course enrollment message.
Constructs a new message template based on any recent course enrollments
for the student.
Args:
course_enrollments (list[CourseEnrollment]): a list of course enrollments.
course_modes (dict): Mapping of course ID's to course mode dictionaries.
Returns:
A string representing the HTML message output from the message template.
None if there are no recently enrolled courses.
"""
recently_enrolled_courses = _get_recently_enrolled_courses(course_enrollments)
if recently_enrolled_courses:
enrollments_count = len(recently_enrolled_courses)
course_name_separator = ', '
# If length of enrolled course 2, join names with 'and'
if enrollments_count == 2:
course_name_separator = _(' and ')
course_names = course_name_separator.join(
[enrollment.course_overview.display_name for enrollment in recently_enrolled_courses]
)
allow_donations = any(
_allow_donation(course_modes, enrollment.course_overview.id, enrollment)
for enrollment in recently_enrolled_courses
)
platform_name = configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)
return render_to_string(
'enrollment/course_enrollment_message.html',
{
'course_names': course_names,
'enrollments_count': enrollments_count,
'allow_donations': allow_donations,
'platform_name': platform_name,
'course_id': recently_enrolled_courses[0].course_overview.id if enrollments_count == 1 else None
}
)
def get_course_enrollments(user, org_whitelist, org_blacklist):
"""
Given a user, return a filtered set of his or her course enrollments.
Arguments:
user (User): the user in question.
org_whitelist (list[str]): If not None, ONLY courses of these orgs will be returned.
org_blacklist (list[str]): Courses of these orgs will be excluded.
Returns:
generator[CourseEnrollment]: a sequence of enrollments to be displayed
on the user's dashboard.
"""
for enrollment in CourseEnrollment.enrollments_for_user_with_overviews_preload(user):
# If the course is missing or broken, log an error and skip it.
course_overview = enrollment.course_overview
if not course_overview:
log.error(
"User %s enrolled in broken or non-existent course %s",
user.username,
enrollment.course_id
)
continue
# Filter out anything that is not in the whitelist.
if org_whitelist and course_overview.location.org not in org_whitelist:
continue
# Conversely, filter out any enrollments in the blacklist.
elif org_blacklist and course_overview.location.org in org_blacklist:
continue
# Else, include the enrollment.
else:
yield enrollment
def get_filtered_course_entitlements(user, org_whitelist, org_blacklist):
"""
Given a user, return a filtered set of his or her course entitlements.
Arguments:
user (User): the user in question.
org_whitelist (list[str]): If not None, ONLY entitlements of these orgs will be returned.
org_blacklist (list[str]): CourseEntitlements of these orgs will be excluded.
Returns:
generator[CourseEntitlement]: a sequence of entitlements to be displayed
on the user's dashboard.
"""
course_entitlement_available_sessions = {}
unfulfilled_entitlement_pseudo_sessions = {}
course_entitlements = list(CourseEntitlement.get_active_entitlements_for_user(user))
filtered_entitlements = []
pseudo_session = None
course_run_key = None
for course_entitlement in course_entitlements:
course_entitlement.update_expired_at()
available_runs = get_visible_sessions_for_entitlement(course_entitlement)
if not course_entitlement.enrollment_course_run:
# Unfulfilled entitlements need a mock session for metadata
pseudo_session = get_pseudo_session_for_entitlement(course_entitlement)
unfulfilled_entitlement_pseudo_sessions[str(course_entitlement.uuid)] = pseudo_session
# Check the org of the Course and filter out entitlements that are not available.
if course_entitlement.enrollment_course_run:
course_run_key = course_entitlement.enrollment_course_run.course_id
elif available_runs:
course_run_key = CourseKey.from_string(available_runs[0]['key'])
elif pseudo_session:
course_run_key = CourseKey.from_string(pseudo_session['key'])
if course_run_key:
# If there is no course_run_key at this point we will be unable to determine if it should be shown.
# Therefore it should be excluded by default.
if org_whitelist and course_run_key.org not in org_whitelist:
continue
elif org_blacklist and course_run_key.org in org_blacklist:
continue
course_entitlement_available_sessions[str(course_entitlement.uuid)] = available_runs
filtered_entitlements.append(course_entitlement)
return filtered_entitlements, course_entitlement_available_sessions, unfulfilled_entitlement_pseudo_sessions
def complete_course_mode_info(course_id, enrollment, modes=None):
"""
We would like to compute some more information from the given course modes
and the user's current enrollment
Returns the given information:
- whether to show the course upsell information
- numbers of days until they can't upsell anymore
"""
if modes is None:
modes = CourseMode.modes_for_course_dict(course_id)
mode_info = {'show_upsell': False, 'days_for_upsell': None}
# we want to know if the user is already enrolled as verified or credit and
# if verified is an option.
if CourseMode.VERIFIED in modes and enrollment.mode in CourseMode.UPSELL_TO_VERIFIED_MODES:
mode_info['show_upsell'] = True
mode_info['verified_sku'] = modes['verified'].sku
mode_info['verified_bulk_sku'] = modes['verified'].bulk_sku
# if there is an expiration date, find out how long from now it is
if modes['verified'].expiration_datetime:
today = datetime.datetime.now(UTC).date()
mode_info['days_for_upsell'] = (modes['verified'].expiration_datetime.date() - today).days
return mode_info
def is_course_blocked(request, redeemed_registration_codes, course_key):
"""
Checking if registration is blocked or not.
"""
blocked = False
for redeemed_registration in redeemed_registration_codes:
# registration codes may be generated via Bulk Purchase Scenario
# we have to check only for the invoice generated registration codes
# that their invoice is valid or not
if redeemed_registration.invoice_item:
if not redeemed_registration.invoice_item.invoice.is_valid:
blocked = True
# disabling email notifications for unpaid registration courses
Optout.objects.get_or_create(user=request.user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
request.user.username,
request.user.email,
course_key,
)
track.views.server_track(
request,
"change-email1-settings",
{"receive_emails": "no", "course": text_type(course_key)},
page='dashboard',
)
break
return blocked
def get_verification_error_reasons_for_display(verification_error_codes):
"""
Returns the display text for the given verification error codes.
"""
verification_errors = []
verification_error_map = {
'photos_mismatched': _('Photos are mismatched'),
'id_image_missing_name': _('Name missing from ID photo'),
'id_image_missing': _('ID photo not provided'),
'id_invalid': _('ID is invalid'),
'user_image_not_clear': _('Learner photo is blurry'),
'name_mismatch': _('Name on ID does not match name on account'),
'user_image_missing': _('Learner photo not provided'),
'id_image_not_clear': _('ID photo is blurry'),
}
for error in verification_error_codes:
error_text = verification_error_map.get(error)
if error_text:
verification_errors.append(error_text)
return verification_errors
def reverification_info(statuses):
"""
Returns reverification-related information for *all* of user's enrollments whose
reverification status is in statuses.
Args:
statuses (list): a list of reverification statuses we want information for
example: ["must_reverify", "denied"]
Returns:
dictionary of lists: dictionary with one key per status, e.g.
dict["must_reverify"] = []
dict["must_reverify"] = [some information]
"""
reverifications = defaultdict(list)
# Sort the data by the reverification_end_date
for status in statuses:
if reverifications[status]:
reverifications[status].sort(key=lambda x: x.date)
return reverifications
def _credit_statuses(user, course_enrollments):
"""
Retrieve the status for credit courses.
A credit course is a course for which a user can purchased
college credit. The current flow is:
1. User becomes eligible for credit (submits verifications, passes the course, etc.)
2. User purchases credit from a particular credit provider.
3. User requests credit from the provider, usually creating an account on the provider's site.
4. The credit provider notifies us whether the user's request for credit has been accepted or rejected.
The dashboard is responsible for communicating the user's state in this flow.
Arguments:
user (User): The currently logged-in user.
course_enrollments (list[CourseEnrollment]): List of enrollments for the
user.
Returns: dict
The returned dictionary has keys that are `CourseKey`s and values that
are dictionaries with:
* eligible (bool): True if the user is eligible for credit in this course.
* deadline (datetime): The deadline for purchasing and requesting credit for this course.
* purchased (bool): Whether the user has purchased credit for this course.
* provider_name (string): The display name of the credit provider.
* provider_status_url (string): A URL the user can visit to check on their credit request status.
* request_status (string): Either "pending", "approved", or "rejected"
* error (bool): If true, an unexpected error occurred when retrieving the credit status,
so the user should contact the support team.
Example:
>>> _credit_statuses(user, course_enrollments)
{
CourseKey.from_string("edX/DemoX/Demo_Course"): {
"course_key": "edX/DemoX/Demo_Course",
"eligible": True,
"deadline": 2015-11-23 00:00:00 UTC,
"purchased": True,
"provider_name": "Hogwarts",
"provider_status_url": "http://example.com/status",
"request_status": "pending",
"error": False
}
}
"""
from openedx.core.djangoapps.credit import api as credit_api
# Feature flag off
if not settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY"):
return {}
request_status_by_course = {
request["course_key"]: request["status"]
for request in credit_api.get_credit_requests_for_user(user.username)
}
credit_enrollments = {
enrollment.course_id: enrollment
for enrollment in course_enrollments
if enrollment.mode == "credit"
}
# When a user purchases credit in a course, the user's enrollment
# mode is set to "credit" and an enrollment attribute is set
# with the ID of the credit provider. We retrieve *all* such attributes
# here to minimize the number of database queries.
purchased_credit_providers = {
attribute.enrollment.course_id: attribute.value
for attribute in CourseEnrollmentAttribute.objects.filter(
namespace="credit",
name="provider_id",
enrollment__in=credit_enrollments.values()
).select_related("enrollment")
}
provider_info_by_id = {
provider["id"]: provider
for provider in credit_api.get_credit_providers()
}
statuses = {}
for eligibility in credit_api.get_eligibilities_for_user(user.username):
course_key = CourseKey.from_string(text_type(eligibility["course_key"]))
providers_names = get_credit_provider_display_names(course_key)
status = {
"course_key": text_type(course_key),
"eligible": True,
"deadline": eligibility["deadline"],
"purchased": course_key in credit_enrollments,
"provider_name": make_providers_strings(providers_names),
"provider_status_url": None,
"provider_id": None,
"request_status": request_status_by_course.get(course_key),
"error": False,
}
# If the user has purchased credit, then include information about the credit
# provider from which the user purchased credit.
# We retrieve the provider's ID from the an "enrollment attribute" set on the user's
# enrollment when the user's order for credit is fulfilled by the E-Commerce service.
if status["purchased"]:
provider_id = purchased_credit_providers.get(course_key)
if provider_id is None:
status["error"] = True
log.error(
u"Could not find credit provider associated with credit enrollment "
u"for user %s in course %s. The user will not be able to see his or her "
u"credit request status on the student dashboard. This attribute should "
u"have been set when the user purchased credit in the course.",
user.id, course_key
)
else:
provider_info = provider_info_by_id.get(provider_id, {})
status["provider_name"] = provider_info.get("display_name")
status["provider_status_url"] = provider_info.get("status_url")
status["provider_id"] = provider_id
statuses[course_key] = status
return statuses
def _get_urls_for_resume_buttons(user, enrollments):
'''
Checks whether a user has made progress in any of a list of enrollments.
'''
resume_button_urls = []
for enrollment in enrollments:
try:
block_key = get_key_to_last_completed_course_block(user, enrollment.course_id)
url_to_block = reverse(
'jump_to',
kwargs={'course_id': enrollment.course_id, 'location': block_key}
)
except UnavailableCompletionData:
url_to_block = ''
resume_button_urls.append(url_to_block)
return resume_button_urls
@login_required
@ensure_csrf_cookie
@add_maintenance_banner
def student_dashboard(request):
"""
Provides the LMS dashboard view
TODO: This is lms specific and does not belong in common code.
Arguments:
request: The request object.
Returns:
The dashboard response.
"""
from openedx.features.student_account.helpers import get_non_active_course
user = request.user
if not UserProfile.objects.filter(user=user).exists():
return redirect(reverse('update_account_settings'))
platform_name = configuration_helpers.get_value("platform_name", settings.PLATFORM_NAME)
enable_verified_certificates = configuration_helpers.get_value(
'ENABLE_VERIFIED_CERTIFICATES',
settings.FEATURES.get('ENABLE_VERIFIED_CERTIFICATES')
)
display_course_modes_on_dashboard = configuration_helpers.get_value(
'DISPLAY_COURSE_MODES_ON_DASHBOARD',
settings.FEATURES.get('DISPLAY_COURSE_MODES_ON_DASHBOARD', True)
)
activation_email_support_link = configuration_helpers.get_value(
'ACTIVATION_EMAIL_SUPPORT_LINK', settings.ACTIVATION_EMAIL_SUPPORT_LINK
) or settings.SUPPORT_SITE_LINK
hide_dashboard_courses_until_activated = configuration_helpers.get_value(
'HIDE_DASHBOARD_COURSES_UNTIL_ACTIVATED',
settings.FEATURES.get('HIDE_DASHBOARD_COURSES_UNTIL_ACTIVATED', False)
)
empty_dashboard_message = configuration_helpers.get_value(
'EMPTY_DASHBOARD_MESSAGE', None
)
# Get the org whitelist or the org blacklist for the current site
site_org_whitelist, site_org_blacklist = get_org_black_and_whitelist_for_site()
course_enrollments = list(get_course_enrollments(user, site_org_whitelist, site_org_blacklist))
# Get the entitlements for the user and a mapping to all available sessions for that entitlement
# If an entitlement has no available sessions, pass through a mock course overview object
(course_entitlements,
course_entitlement_available_sessions,
unfulfilled_entitlement_pseudo_sessions) = get_filtered_course_entitlements(
user,
site_org_whitelist,
site_org_blacklist
)
# Record how many courses there are so that we can get a better
# understanding of usage patterns on prod.
monitoring_utils.accumulate('num_courses', len(course_enrollments))
# Sort the enrollment pairs by the enrollment date
course_enrollments.sort(key=lambda x: x.created, reverse=True)
# Retrieve the course modes for each course
enrolled_course_ids = [enrollment.course_id for enrollment in course_enrollments]
__, unexpired_course_modes = CourseMode.all_and_unexpired_modes_for_courses(enrolled_course_ids)
course_modes_by_course = {
course_id: {
mode.slug: mode
for mode in modes
}
for course_id, modes in iteritems(unexpired_course_modes)
}
# Check to see if the student has recently enrolled in a course.
# If so, display a notification message confirming the enrollment.
enrollment_message = _create_recent_enrollment_message(
course_enrollments, course_modes_by_course
)
course_optouts = Optout.objects.filter(user=user).values_list('course_id', flat=True)
# Display activation message
activate_account_message = ''
if not user.is_active:
activate_account_message = Text(_(
"Check your {email_start}{email}{email_end} inbox for an account activation link from {platform_name}. "
"If you need help, contact {link_start}{platform_name} Support{link_end}."
)).format(
platform_name=platform_name,
email_start=HTML("<strong>"),
email_end=HTML("</strong>"),
email=user.email,
link_start=HTML("<a target='_blank' href='{activation_email_support_link}'>").format(
activation_email_support_link=activation_email_support_link,
),
link_end=HTML("</a>"),
)
enterprise_message = get_dashboard_consent_notification(request, user, course_enrollments)
recovery_email_message = recovery_email_activation_message = None
if is_secondary_email_feature_enabled_for_user(user=user):
try:
account_recovery_obj = AccountRecovery.objects.get(user=user)
except AccountRecovery.DoesNotExist:
recovery_email_message = Text(
_(
"Add a recovery email to retain access when single-sign on is not available. "
"Go to {link_start}your Account Settings{link_end}.")
).format(
link_start=HTML("<a target='_blank' href='{account_setting_page}'>").format(
account_setting_page=reverse('account_settings'),
),
link_end=HTML("</a>")
)
else:
if not account_recovery_obj.is_active:
recovery_email_activation_message = Text(
_(
"Recovery email is not activated yet. "
"Kindly visit your email and follow the instructions to activate it."
)
)
# Disable lookup of Enterprise consent_required_course due to ENT-727
# Will re-enable after fixing WL-1315
consent_required_courses = set()
enterprise_customer_name = None
# Account activation message
account_activation_messages = [
message for message in messages.get_messages(request) if 'account-activation' in message.tags
]
# Global staff can see what courses encountered an error on their dashboard
staff_access = False
errored_courses = {}
if has_access(user, 'staff', 'global'):
# Show any courses that encountered an error on load
staff_access = True
errored_courses = modulestore().get_errored_courses()
show_courseware_links_for = {
enrollment.course_id: has_access(request.user, 'load', enrollment.course_overview)
for enrollment in course_enrollments
}
# Find programs associated with course runs being displayed. This information
# is passed in the template context to allow rendering of program-related
# information on the dashboard.
meter = ProgramProgressMeter(request.site, user, enrollments=course_enrollments)
ecommerce_service = EcommerceService()
inverted_programs = meter.invert_programs()
urls, programs_data = {}, {}
bundles_on_dashboard_flag = WaffleFlag(WaffleFlagNamespace(name=u'student.experiments'), u'bundles_on_dashboard')
# TODO: Delete this code and the relevant HTML code after testing LEARNER-3072 is complete
if bundles_on_dashboard_flag.is_enabled() and inverted_programs and inverted_programs.items():
if len(course_enrollments) < 4:
for program in inverted_programs.values():
try:
program_uuid = program[0]['uuid']
program_data = get_programs(request.site, uuid=program_uuid)
program_data = ProgramDataExtender(program_data, request.user).extend()
skus = program_data.get('skus')
checkout_page_url = ecommerce_service.get_checkout_page_url(*skus)
program_data['completeProgramURL'] = checkout_page_url + '&bundle=' + program_data.get('uuid')
programs_data[program_uuid] = program_data
except: # pylint: disable=bare-except
pass
# Construct a dictionary of course mode information
# used to render the course list. We re-use the course modes dict
# we loaded earlier to avoid hitting the database.
course_mode_info = {
enrollment.course_id: complete_course_mode_info(
enrollment.course_id, enrollment,
modes=course_modes_by_course[enrollment.course_id]
)
for enrollment in course_enrollments
}
# Determine the per-course verification status
# This is a dictionary in which the keys are course locators
# and the values are one of:
#
# VERIFY_STATUS_NEED_TO_VERIFY
# VERIFY_STATUS_SUBMITTED
# VERIFY_STATUS_APPROVED
# VERIFY_STATUS_MISSED_DEADLINE
#
# Each of which correspond to a particular message to display
# next to the course on the dashboard.
#
# If a course is not included in this dictionary,
# there is no verification messaging to display.
verify_status_by_course = check_verify_status_by_course(user, course_enrollments)
cert_statuses = {
enrollment.course_id: cert_info(request.user, enrollment.course_overview)
for enrollment in course_enrollments
}
# only show email settings for Mongo course and when bulk email is turned on
show_email_settings_for = frozenset(
enrollment.course_id for enrollment in course_enrollments if (
BulkEmailFlag.feature_enabled(enrollment.course_id)
)
)
# Verification Attempts
# Used to generate the "you must reverify for course x" banner
verification_status = IDVerificationService.user_status(user)
verification_errors = get_verification_error_reasons_for_display(verification_status['error'])
# Gets data for midcourse reverifications, if any are necessary or have failed
statuses = ["approved", "denied", "pending", "must_reverify"]
reverifications = reverification_info(statuses)
block_courses = frozenset(
enrollment.course_id for enrollment in course_enrollments
if is_course_blocked(
request,
CourseRegistrationCode.objects.filter(
course_id=enrollment.course_id,
registrationcoderedemption__redeemed_by=request.user
),
enrollment.course_id
)
)
enrolled_courses_either_paid = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.is_paid_course()
)
# If there are *any* denied reverifications that have not been toggled off,
# we'll display the banner
denied_banner = any(item.display for item in reverifications["denied"])
# Populate the Order History for the side-bar.
order_history_list = order_history(
user,
course_org_filter=site_org_whitelist,
org_filter_out_set=site_org_blacklist
)
# get list of courses having pre-requisites yet to be completed
courses_having_prerequisites = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.course_overview.pre_requisite_courses
)
courses_requirements_not_met = get_pre_requisite_courses_not_completed(user, courses_having_prerequisites)
if 'notlive' in request.GET:
redirect_message = _("The course you are looking for does not start until {date}.").format(
date=request.GET['notlive']
)
elif 'course_closed' in request.GET:
redirect_message = _("The course you are looking for is closed for enrollment as of {date}.").format(
date=request.GET['course_closed']
)
elif 'access_response_error' in request.GET:
# This can be populated in a generalized way with fields from access response errors
redirect_message = request.GET['access_response_error']
else:
redirect_message = ''
valid_verification_statuses = ['approved', 'must_reverify', 'pending', 'expired']
display_sidebar_on_dashboard = (len(order_history_list) or
(verification_status['status'] in valid_verification_statuses and
verification_status['should_display']))
# Filter out any course enrollment course cards that are associated with fulfilled entitlements
for entitlement in [e for e in course_entitlements if e.enrollment_course_run is not None]:
course_enrollments = [
enr for enr in course_enrollments if entitlement.enrollment_course_run.course_id != enr.course_id
]
context = {
'urls': urls,
'programs_data': programs_data,
'enterprise_message': enterprise_message,
'consent_required_courses': consent_required_courses,
'enterprise_customer_name': enterprise_customer_name,
'enrollment_message': enrollment_message,
'redirect_message': Text(redirect_message),
'account_activation_messages': account_activation_messages,
'activate_account_message': activate_account_message,
'course_enrollments': course_enrollments,
'course_entitlements': course_entitlements,
'course_entitlement_available_sessions': course_entitlement_available_sessions,
'unfulfilled_entitlement_pseudo_sessions': unfulfilled_entitlement_pseudo_sessions,
'course_optouts': course_optouts,
'staff_access': staff_access,
'errored_courses': errored_courses,
'show_courseware_links_for': show_courseware_links_for,
'all_course_modes': course_mode_info,
'cert_statuses': cert_statuses,
'credit_statuses': _credit_statuses(user, course_enrollments),
'show_email_settings_for': show_email_settings_for,
'reverifications': reverifications,
'verification_display': verification_status['should_display'],
'verification_status': verification_status['status'],
'verification_status_by_course': verify_status_by_course,
'verification_errors': verification_errors,
'block_courses': block_courses,
'denied_banner': denied_banner,
'billing_email': settings.PAYMENT_SUPPORT_EMAIL,
'user': user,
'logout_url': reverse('logout'),
'platform_name': platform_name,
'enrolled_courses_either_paid': enrolled_courses_either_paid,
'provider_states': [],
'order_history_list': order_history_list,
'courses_requirements_not_met': courses_requirements_not_met,
'nav_hidden': True,
'inverted_programs': inverted_programs,
'show_program_listing': ProgramsApiConfig.is_enabled(),
'show_journal_listing': journals_enabled(), # TODO: Dashboard Plugin required
'show_dashboard_tabs': True,
'disable_courseware_js': True,
'display_course_modes_on_dashboard': enable_verified_certificates and display_course_modes_on_dashboard,
'display_sidebar_on_dashboard': display_sidebar_on_dashboard,
'display_sidebar_account_activation_message': not(user.is_active or hide_dashboard_courses_until_activated),
'display_dashboard_courses': (user.is_active or not hide_dashboard_courses_until_activated),
'empty_dashboard_message': empty_dashboard_message,
'recovery_email_message': recovery_email_message,
'recovery_email_activation_message': recovery_email_activation_message,
'alert_messages': get_non_active_course(user),
'is_poc': user.extended_profile.is_organization_admin,
}
if ecommerce_service.is_enabled(request.user):
context.update({
'use_ecommerce_payment_flow': True,
'ecommerce_payment_page': ecommerce_service.payment_page_url(),
})
# Gather urls for course card resume buttons.
resume_button_urls = ['' for entitlement in course_entitlements]
for url in _get_urls_for_resume_buttons(user, course_enrollments):
resume_button_urls.append(url)
# There must be enough urls for dashboard.html. Template creates course
# cards for "enrollments + entitlements".
context.update({
'resume_button_urls': resume_button_urls
})
response = render_to_response('dashboard.html', context)
set_logged_in_cookies(request, response, user)
return response
|
philanthropy-u/edx-platform
|
common/djangoapps/student/views/dashboard.py
|
Python
|
agpl-3.0
| 37,548
|
[
"VisIt"
] |
8c1347b0aef76b9c9bb31c29e4cb54a521f2f66d5401a6db7e545c0eca232be5
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
r"""Module to provide mechanism to store and restore option states in driver.
"""
import sys
from .exceptions import *
class OptionState(object):
"""Class to store the state of a single *option*. If *module* given, the *option*
value and has_changed value is stored for global, local to *module*, and used by
*module* scopes; otherwise (used for BASIS keywords), only global scope is stored.
Class can store, print, and restore option values. ::
>>> OptionState('SCF_TYPE', 'SCF')
>>> print(OptionState('DF_BASIS_MP2'))
"""
def __init__(self, option, module=None):
self.option = option.upper()
if module:
self.module = module.upper()
else:
self.module = None
self.value_global = core.get_global_option(option)
self.haschanged_global = core.has_global_option_changed(option)
if self.module:
self.value_local = core.get_local_option(self.module, option)
self.haschanged_local = core.has_local_option_changed(self.module, option)
self.value_used = core.get_option(self.module, option)
self.haschanged_used = core.has_option_changed(self.module, option)
else:
self.value_local = None
self.haschanged_local = None
self.value_used = None
self.haschanged_used = None
def __str__(self):
text = ''
if self.module:
text += """ ==> %s Option in Module %s <==\n\n""" % (self.option, self.module)
text += """ Global (has changed?) value: %7s %s\n""" % ('(' + str(self.haschanged_global) + ')', self.value_global)
text += """ Local (has changed?) value: %7s %s\n""" % ('(' + str(self.haschanged_local) + ')', self.value_local)
text += """ Used (has changed?) value: %7s %s\n""" % ('(' + str(self.haschanged_used) + ')', self.value_used)
else:
text += """ ==> %s Option in Global Scope <==\n\n""" % (self.option)
text += """ Global (has changed?) value: %7s %s\n""" % ('(' + str(self.haschanged_global) + ')', self.value_global)
text += """\n"""
return text
def restore(self):
core.set_global_option(self.option, self.value_global)
if not self.haschanged_global:
core.revoke_global_option_changed(self.option)
if self.module:
core.set_local_option(self.module, self.option, self.value_local)
if not self.haschanged_local:
core.revoke_local_option_changed(self.module, self.option)
class OptionsState(object):
"""Class to contain multiple :py:func:`~optproc.OptionsState` objects.
Used in python driver functions to collect several options before altering
them, then restoring before function return. ::
>>> optstash = OptionsState(
['SCF', 'DFT_FUNCTIONAL'],
['DF_BASIS_SCF'],
['SCF', 'SCF_TYPE'],
['SCF', 'REFERENCE'])
>>> print(optstash)
>>> optstash.restore()
"""
def __init__(self, *largs):
self.data = []
for item in largs:
if len(item) == 2:
self.data.append(OptionState(item[1], item[0]))
elif len(item) == 1:
self.data.append(OptionState(item[0]))
else:
raise ValidationError('Each argument to OptionsState should be an array, the first element of which is the module scope and the second element of which is the module name. Bad argument: %s' % (item))
def __str__(self):
text = ''
for item in self.data:
text += str(item)
return text
def restore(self):
for item in self.data:
item.restore()
|
kratman/psi4public
|
psi4/driver/p4util/optproc.py
|
Python
|
gpl-2.0
| 4,749
|
[
"Psi4"
] |
b10879e0ac0b5419ff74c2231988086359a4a97f16457baed7127f051ed3fbdd
|
#! /usr/bin/env python
import numpy as np
import ase.io
import ase
import os
import sys
sys.path.insert(0, '.')
import shutil
import quippy
from quippy.clusters import (HYBRID_ACTIVE_MARK, HYBRID_NO_MARK, HYBRID_BUFFER_MARK,
create_hybrid_weights,
create_cluster_simple)
from quippy import Dictionary
from params import initial_strain as strain
from some_tools import relax_structure
##### PARAMETERS TO CHANGE #####
buffer_hops = 8
deltay_tip = 1.05
##### OTHER STUFF, LEAVE UNCHANGED UNLESS NECESSARY #####
cluster_vacuum = 12.0
crack_slab = ase.io.read('crack.xyz', format='extxyz')
ring_size = int(raw_input('Size of ring at crack tip:'))
pre_optim = True
try:
pre_optim = (sys.argv[1] in [' ', 't', 'T', 'True', 'true', 'y', 'yes'])
except:
print("") # it's alright
if pre_optim:
crack_slab = relax_structure(crack_slab)
folder = 'ring%1d_strain%.03f' % (ring_size, strain)
try:
os.mkdir(folder)
except:
print("Folder already existing. Overwrite?")
sig = raw_input()
if sig not in ['y', 'yes']:
exit
core_ring = np.loadtxt('tip_4ring.csv', dtype='int')
try:
xy_ring = np.loadtxt('xy_ring.csv', dtype='int')
temp = set(xy_ring)
for idx in xy_ring:
ring_indices = np.where(((crack_slab.positions[:,:2] - crack_slab.positions[idx,:2])**2).sum(axis=1)**0.5 < 0.5)[0]
temp = temp.union(set(ring_indices))
xy_ring = temp
except:
print("WARNING: Breaking ring on XY plane not specified, using very small QM buffers")
xy_ring = set()
hybrid_mark = np.array([1 if i in set(core_ring).union(set(xy_ring)) else 0 for i in range(len(crack_slab))])
crack_slab.set_array('hybrid_mark', hybrid_mark)
# CALC_ARGS: use first to have radius cutoff, use second for a bond hop construction
# calc_args = Dictionary('little_clusters=F terminate even_electrons cluster_vacuum=12.0 cluster_calc_connect=F buffer_hops=1 transition_hops=0 randomise_buffer=F hysteretic_connect=F nneighb_only cluster_hopping_nneighb_only property_list=species:pos:hybrid_mark:index cluster_box_buffer=20.0 cluster_hopping=F keep_whole_residues=F min_images_only keep_whole_silica_tetrahedra protect_double_bonds=F force_no_fix_termination_clash=F termination_clash_factor=1.8 nneighb_different_z in_out_in=F cluster_hopping_skip_unreachable hysteretic_buffer=T hysteretic_buffer_inner_radius=7.0 hysteretic_buffer_outer_radius=9.0')
calc_args = Dictionary('little_clusters=T terminate even_electrons cluster_vacuum=12.0 cluster_calc_connect=F buffer_hops=%d transition_hops=0 randomise_buffer=F hysteretic_connect=F nneighb_only cluster_hopping_nneighb_only property_list=species:pos:hybrid_mark:index cluster_box_buffer=20.0 cluster_hopping=T keep_whole_residues=F min_images_only keep_whole_silica_tetrahedra protect_double_bonds=F force_no_fix_termination_clash=F termination_clash_factor=1.8 nneighb_different_z in_out_in=F cluster_hopping_skip_unreachable hysteretic_buffer=F hysteretic_buffer_inner_radius=7.0 hysteretic_buffer_outer_radius=9.0 cluster_same_lattice=T' % buffer_hops)
pretty = quippy.Atoms(crack_slab)
pretty.set_pbc([True]*3)
pretty.calc_connect()
cluster_args = calc_args.copy()
create_hybrid_weights_args = calc_args.copy()
if create_hybrid_weights_args['buffer_hops'] == 0:
create_hybrid_weights_args['buffer_hops'] = 1 # FIXME disable shortcut
create_hybrid_weights_args_str = quippy.util.args_str(create_hybrid_weights_args)
create_hybrid_weights(pretty, args_str=create_hybrid_weights_args_str)
cluster = create_cluster_simple(pretty, args_str=quippy.util.args_str(cluster_args))
cluster_indices = np.array(cluster.orig_index - 1)
fix_in_dft = np.array([i for i in cluster.indices if (cluster.hybrid_mark[i] not in [1,2])])
cluster_ase = ase.Atoms(np.array(cluster.z), np.array(cluster.positions))
shift_cluster = cluster_ase.get_positions().min(axis=0) + 0.5 * cluster_vacuum
cluster_ase.positions -= shift_cluster
cluster_ase.set_cell(cluster_ase.get_positions().ptp(axis=0) + cluster_vacuum)
abc = cluster_ase.cell.diagonal()
with open('sio22d_minim_template.inp', 'r') as fff:
lines = fff.readlines()
# Write everything to folder
os.chdir(folder)
if True:
# DFT has interatomic distances of about 1.626 instead of 1.603 Angstrom
cluster_ase.set_cell(1.014 * cluster_ase.get_cell(), scale_atoms=True)
cluster_ase.set_array('orig_index', cluster_indices)
cluster_ase.write('ase_cluster_00.xyz', format='extxyz')
cluster_ase.write('cp2k_cluster_00.xyz', format='xyz')
os.system('tail -n +3 cp2k_cluster_00.xyz > tmp && mv tmp cp2k_cluster_00.xyz')
abc.tofile('cp2k_ABC.txt', sep=" ", format="%s")
np.savetxt('cluster_shift.csv', shift_cluster)
fix_in_dft.tofile('cp2k_fix_list.txt', sep=" ", format="%s")
np.savetxt('cluster_indices_ase.csv', cluster_indices, fmt='%d')
shutil.copy('../tip_4ring.csv', 'tip_4ring.csv')
np.savetxt('ringcage.csv', list(set(core_ring).union(set(xy_ring))))
shutil.copy('../crack.xyz', 'crack_ase.xyz')
shutil.copy('../swap_topbottom_atoms.csv', 'swap_topbottom_atoms.csv')
bottom_core_ring = core_ring[:4]
top_core_ring = core_ring[4:]
displacement_cluster = np.zeros(len(cluster_indices))
displacement_slab = np.zeros(len(crack_slab))
for idx, orig_idx in enumerate(cluster_indices):
if orig_idx in list(top_core_ring):
displacement_cluster[idx] = deltay_tip
displacement_slab[orig_idx - 1] = deltay_tip
elif orig_idx in list(bottom_core_ring):
displacement_cluster[idx] = - deltay_tip
displacement_slab[orig_idx - 1] = - deltay_tip
cluster_ase.positions[:,1] += displacement_cluster
crack_slab.positions[:,1] += displacement_slab
if pre_optim:
# temp = cluster_ase.copy()
# temp.set_cell(cluster_ase.get_cell())
# delete = [i for i, n in enumerate(temp.get_atomic_numbers()) if n == 1]
# keep = [i for i, n in enumerate(temp.get_atomic_numbers()) if n != 1]
# del temp[delete]
# fix_TS = [i-1 for i in fix_in_dft if i-1 not in delete]
# move_mask_TS = np.array([0 if (i in fix_TS) else 1 for i in range(len(temp))])
# temp.set_array('move_mask', move_mask_TS)
# temp = relax_structure(temp)
# cluster_ase.positions[keep] = temp.get_positions()
try:
crack_slab = ase.io.read('../crack_open.xyz', format='extxyz')
except:
print('relaxing slab with open bond')
crack_slab = relax_structure(crack_slab)
quippy.Atoms(crack_slab).write('../crack_open.xyz', format='extxyz')
pretty.positions[:,:] = crack_slab.get_positions()
cluster10 = create_cluster_simple(pretty, args_str=quippy.util.args_str(cluster_args))
# remap is an essential step: re-indexes the atoms in cluster10 following the order in cluster
# without this, there is no way to do NEB or anything that needs connection between initial and final state
remap = [np.where(idx == cluster10.orig_index)[0].item() for idx in cluster.orig_index]
cluster_ase = ase.Atoms(np.array(cluster10.z)[remap], np.array(cluster10.positions)[remap])
cluster_ase.positions -= shift_cluster
cluster_ase.set_cell(np.diag(abc))
if True:
# DFT has interatomic distances of about 1.626 instead of 1.603 Angstrom
cluster_ase.set_cell(1.014 * cluster_ase.get_cell(), scale_atoms=True)
cluster_ase.set_array('orig_index', cluster_indices)
cluster_ase.write('ase_cluster_10.xyz', format='extxyz')
cluster_ase.write('cp2k_cluster_10.xyz', format='xyz')
os.system('tail -n +3 cp2k_cluster_10.xyz > tmp && mv tmp cp2k_cluster_10.xyz')
line1 = lines[1][:-1]
line81 = lines[81][:-1]
lines[1] = line1 + (' %s_00\n' % folder)
lines[81] = line81 + ' \'cp2k_cluster_00.xyz\'\n'
lines[85] = lines[85][:-1] + ' ' + ' '.join([str(s) for s in abc]) + '\n'
lines[164] = lines[164][:-1] + ' ' + ' '.join([str(s) for s in fix_in_dft]) + '\n'
# Write input file for minimising final image
with open('sio22d-00.inp', 'w') as fff:
fff.writelines(lines)
lines[1] = line1 + (' %s_10\n' % folder)
lines[81] = line81 + ' \"cp2k_cluster_10.xyz\"\n'
with open('sio22d-10.inp', 'w') as fff:
fff.writelines(lines)
# Write the NEB input file, to be used AFTER minimising the initial and final images
with open('../neb_template.inp', 'r') as fff:
lines = fff.readlines()
lines[74] = lines[74][:-1] + ' ' + ' '.join([str(s) for s in abc]) + '\n'
lines[126] = lines[126][:-1] + ' ' + ' '.join([str(s) for s in fix_in_dft]) + '\n'
with open('neb.inp', 'w') as fff:
fff.writelines(lines)
shutil.copy('../run_neb_cp2k.py', 'run_neb_cp2k.py')
# HERE FOLLOWS DEV STUFF, NO USE YET
# # import silayergraph.py functions
# indices = np.where(a.get_atomic_numbers() == 14)[0]
# # asi = a[indices]
# # cutoff = 3.8
# # graph = atoms_to_nxgraph(asi, cutoff)
# # all_cycles = minimal_cycles(graph, cutoff=9)
# all_cycles = pkl.load('slab_cycles.pkl')
# core_cycle_index = [i for i, c in enumerate(all_cycles) if set(core_cycle) == set(list(indices[list(c)]))][0]
|
marcocaccin/crack2Dglass
|
make_cluster_from_crack.py
|
Python
|
gpl-2.0
| 8,992
|
[
"ASE"
] |
a112fc780b3f1c01afc6cd58c36ca34ce6cc30d0dd616c6fac6d0cfd2bb75fa0
|
# -*- coding: utf-8 -*-
"""
Automatically generate Brian's reference documentation.
Based on sphinx-apidoc, published under a BSD license: http://sphinx-doc.org/
"""
import inspect
import sys
import os
from os import path
from .examplefinder import auto_find_examples
INITPY = '__init__.py'
OPTIONS = ['show-inheritance']
def makename(package, module):
"""Join package and module with a dot."""
# Both package and module can be None/empty.
if package:
name = package
if module:
name += f".{module}"
else:
name = module
return name
def write_file(name, text, destdir, suffix):
"""Write the output file for module/package <name>."""
fname = path.join(destdir, f'{name}.{suffix}')
print(f'Creating file {fname}.')
f = open(fname, 'w')
try:
f.write(text)
finally:
f.close()
def format_heading(level, text):
"""Create a heading of <level> [1, 2 or 3 supported]."""
underlining = ['=', '-', '~'][level - 1] * len(text)
return f'{text}\n{underlining}\n\n'
def format_directive(module, destdir, package=None, basename='brian2cuda'):
"""Create the automodule directive and add the options."""
directive = f'.. automodule:: {makename(package, module)}\n'
for option in OPTIONS:
directive += f' :{option}:\n'
directive += '\n'
# document all the classes in the modules
full_name = f"{basename}.{module}"
__import__(full_name)
mod = sys.modules[full_name]
dir_members = dir(mod)
classes = []
functions = []
variables = []
for member in dir_members:
_temp = __import__(full_name, {}, {}, [member], 0)
member_obj = getattr(_temp, member)
member_module = getattr(member_obj, '__module__', None)
# only document members that where defined in this module
if member_module == full_name and not member.startswith('_'):
if inspect.isclass(member_obj):
classes.append((member, member_obj))
elif inspect.isfunction(member_obj):
functions.append((member, member_obj))
else:
variables.append((member, member_obj))
if classes:
directive += '**Classes**\n\n'
for member, member_obj in classes:
directive += f'.. autosummary:: {member}\n'
directive += ' :toctree:\n\n'
create_member_file(full_name, member, member_obj, destdir)
if functions:
directive += '**Functions**\n\n'
for member, member_obj in functions:
directive += f'.. autosummary:: {member}\n'
directive += ' :toctree:\n\n'
create_member_file(full_name, member, member_obj, destdir)
if variables:
directive += '**Objects**\n\n'
for member, member_obj in variables:
directive += f'.. autosummary:: {member}\n'
directive += ' :toctree:\n\n'
create_member_file(full_name, member, member_obj, destdir)
return directive
def find_shortest_import(module_name, obj_name):
parts = module_name.split('.')
for idx in range(1, len(parts) + 1):
try:
result = __import__(
'.'.join(parts[:idx]), globals(), {}, fromlist=[str(obj_name)], level=0
)
result_obj = getattr(result, obj_name, None)
if (
result_obj is not None
and getattr(result_obj, '__module__', None) == module_name
):
# import seems to have worked
return '.'.join(parts[:idx])
except ImportError:
pass
raise AssertionError(f"Couldn't import {module_name}.{obj_name}")
def create_member_file(module_name, member, member_obj, destdir, suffix='rst'):
"""Build the text of the file and write the file."""
text = f".. currentmodule:: {module_name}\n\n"
shortest_import = find_shortest_import(module_name, member)
import_text = f'(*Shortest import*: ``from {shortest_import} import {member})``\n\n'
if inspect.isclass(member_obj):
text += format_heading(1, f'{member} class')
text += import_text
text += f'.. autoclass:: {member}\n\n'
text += auto_find_examples(member_obj, headersymbol='-')
elif inspect.isfunction(member_obj):
text += format_heading(1, f'{member} function')
text += import_text
text += f'.. autofunction:: {member}\n\n'
else:
text += format_heading(1, f'{member} object')
text += import_text
text += f'.. autodata:: {member}\n'
write_file(makename(module_name, member), text, destdir, suffix)
def create_package_file(
root, master_package, subroot, py_files, subs, destdir, excludes, suffix='rst'
):
"""Build the text of the file and write the file."""
package = path.split(root)[-1]
text = format_heading(1, f'{package} package')
# add each module in the package
for py_file in py_files:
if shall_skip(path.join(root, py_file)):
continue
is_package = py_file == INITPY
py_file = path.splitext(py_file)[0]
py_path = makename(subroot, py_file)
# we don't want an additional header for the package,
if not is_package:
heading = f':mod:`{py_file}` module'
text += format_heading(2, heading)
text += format_directive(
is_package and subroot or py_path, destdir, master_package
)
text += '\n'
# build a list of directories that are packages (contain an INITPY file)
subs = [sub for sub in subs if path.isfile(path.join(root, sub, INITPY))]
# if there are some package directories, add a TOC for theses subpackages
if subs:
text += format_heading(2, 'Subpackages')
text += '.. toctree::\n'
text += ' :maxdepth: 2\n\n'
for sub in subs:
if not is_excluded(os.path.join(root, sub), excludes):
text += f' {makename(master_package, subroot)}.{sub}\n'
text += '\n'
write_file(makename(master_package, subroot), text, destdir, suffix)
def shall_skip(module):
"""Check if we want to skip this module."""
# skip it if there is nothing (or just \n or \r\n) in the file
return path.getsize(module) <= 2
def recurse_tree(rootpath, exclude_dirs, exclude_files, destdir):
"""
Look for every file in the directory tree and create the corresponding
ReST files.
"""
# use absolute path for root, as relative paths like '../../foo' cause
# 'if "/." in root ...' to filter out *all* modules otherwise
rootpath = path.normpath(path.abspath(rootpath))
# check if the base directory is a package and get its name
if INITPY in os.listdir(rootpath):
root_package = rootpath.split(path.sep)[-1]
else:
# otherwise, the base is a directory with packages
root_package = None
toplevels = []
for root, subs, files in os.walk(rootpath):
if is_excluded(root, exclude_dirs):
del subs[:]
continue
# document only Python module files
py_files = sorted(
[
f
for f in files
if (path.splitext(f)[1] == '.py' and not f in exclude_files)
]
)
is_pkg = INITPY in py_files
if is_pkg:
py_files.remove(INITPY)
py_files.insert(0, INITPY)
elif root != rootpath:
# only accept non-package at toplevel
del subs[:]
continue
# remove hidden ('.') and private ('_') directories
subs[:] = sorted(sub for sub in subs if sub[0] not in ['.', '_'])
if is_pkg:
# we are in a package with something to document
if subs or len(py_files) > 1 or not shall_skip(path.join(root, INITPY)):
subpackage = (
root[len(rootpath) :].lstrip(path.sep).replace(path.sep, '.')
)
create_package_file(
root,
root_package,
subpackage,
py_files,
subs,
destdir,
exclude_dirs,
)
toplevels.append(makename(root_package, subpackage))
else:
raise AssertionError("Expected it to be a package")
return toplevels
def normalize_excludes(rootpath, excludes):
"""
Normalize the excluded directory list:
* must be either an absolute path or start with rootpath,
* otherwise it is joined with rootpath
* with trailing slash
"""
f_excludes = []
for exclude in excludes:
if not path.isabs(exclude) and not exclude.startswith(rootpath):
exclude = path.join(rootpath, exclude)
f_excludes.append(path.normpath(exclude) + path.sep)
return f_excludes
def is_excluded(root, excludes):
"""
Check if the directory is in the exclude list.
Note: by having trailing slashes, we avoid common prefix issues, like
e.g. an exlude "foo" also accidentally excluding "foobar".
"""
sep = path.sep
if not root.endswith(sep):
root += sep
for exclude in excludes:
if root.startswith(exclude):
return True
return False
def main(rootpath, exclude_dirs, exclude_files, destdir):
if not os.path.exists(destdir):
os.makedirs(destdir)
exclude_dirs = normalize_excludes(rootpath, exclude_dirs)
modules = recurse_tree(rootpath, exclude_dirs, exclude_files, destdir)
|
brian-team/brian2cuda
|
brian2cuda/sphinxext/generate_reference.py
|
Python
|
gpl-2.0
| 9,655
|
[
"Brian"
] |
b3f2ac4490a8010374a0303f34b6c75668b6d35952c321cd1404889786763fd4
|
# Clustalw modules
"""
A set of classes to interact with the multiple alignment command
line program clustalw.
Clustalw is the command line version of the graphical Clustalx
aligment program.
This requires clustalw available from:
ftp://ftp-igbmc.u-strasbg.fr/pub/ClustalW/.
functions:
o parse_file
o do_alignment
classes:
o ClustalAlignment
o _AlignCreator
o MultipleAlignCL"""
# standard library
import os
import sys
import string #Obsolete - we should switch to using string object methods instead!
# biopython
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio import Alphabet
from Bio.Alphabet import IUPAC
import clustal_format
from Bio.Align.Generic import Alignment
# PyXML package
from xml.sax import saxutils
from xml.sax import handler
def parse_file(file_name, alphabet = IUPAC.unambiguous_dna, debug_level = 0):
"""Parse the given file into a clustal aligment object.
Arguments:
o file_name - The name of the file to parse.
o alphabet - The type of alphabet to use for the alignment sequences.
This should correspond to the type of information contained in the file.
Defaults to be unambiguous_dna sequence.
"""
align_handler = _AlignCreator(Alphabet.Gapped(alphabet))
parser = clustal_format.format.make_parser(debug_level)
parser.setContentHandler(align_handler)
parser.setErrorHandler(handler.ErrorHandler())
to_parse = open(file_name, 'r')
parser.parseFile(to_parse)
to_parse.close()
return align_handler.align
def do_alignment(command_line, alphabet=None):
"""Perform an alignment with the given command line.
Arguments:
o command_line - A command line object that can give out
the command line we will input into clustalw.
o alphabet - the alphabet to use in the created alignment. If not
specified IUPAC.unambiguous_dna and IUPAC.protein will be used for
dna and protein alignment respectively.
Returns:
o A clustal alignment object corresponding to the created alignment.
If the alignment type was not a clustal object, None is returned.
"""
run_clust = os.popen(str(command_line))
status = run_clust.close()
# The exit status is the second byte of the termination status
# TODO - Check this holds on win32...
value = 0
if status: value = status / 256
# check the return value for errors, as on 1.81 the return value
# from Clustalw is actually helpful for figuring out errors
# 1 => bad command line option
if value == 1:
raise ValueError("Bad command line option in the command: %s"
% str(command_line))
# 2 => can't open sequence file
elif value == 2:
raise IOError("Cannot open sequence file %s"
% command_line.sequence_file)
# 3 => wrong format in sequence file
elif value == 3:
raise IOError("Sequence file %s has an invalid format."
% command_line.sequence_file)
# 4 => sequence file only has one sequence
elif value == 4:
raise IOError("Sequence file %s has only one sequence present."
% command_line.sequence_file)
# if an output file was specified, we need to grab it
if command_line.output_file:
out_file = command_line.output_file
else:
out_file = os.path.splitext(command_line.sequence_file)[0] + '.aln'
# if we can't deal with the format, just return None
if command_line.output_type and command_line.output_type != 'CLUSTAL':
return None
# otherwise parse it into a ClustalAlignment object
else:
if not alphabet:
alphabet = (IUPAC.unambiguous_dna, IUPAC.protein)[
command_line.type == 'PROTEIN']
# check if the outfile exists before parsing
if not(os.path.exists(out_file)):
raise IOError("Output .aln file %s not produced, commandline: %s"
% (out_file, command_line))
return parse_file(out_file, alphabet)
class ClustalAlignment(Alignment):
"""Work with the clustal aligment format.
This format is the default output from clustal -- these files normally
have an extension of .aln.
"""
# the default version to use if one isn't set
DEFAULT_VERSION = '1.81'
def __init__(self, alphabet = Alphabet.Gapped(IUPAC.ambiguous_dna)):
Alignment.__init__(self, alphabet)
# represent all of those stars in the aln output format
self._star_info = ''
self._version = ''
def __str__(self):
"""Print out the alignment so it looks pretty.
The output produced from this should also be formatted in valid
clustal format.
"""
# if the version isn't set, we need to use the default
if self._version == '':
self._version = self.DEFAULT_VERSION
output = "CLUSTAL X (%s) multiple sequence alignment\n\n\n" % \
self._version
cur_char = 0
max_length = len(self._records[0].seq)
# keep displaying sequences until we reach the end
while cur_char != max_length:
# calculate the number of sequences to show, which will
# be less if we are at the end of the sequence
if (cur_char + 50) > max_length:
show_num = max_length - cur_char
else:
show_num = 50
# go through all of the records and print out the sequences
# when we output, we do a nice 80 column output, although this
# may result in truncation of the ids.
for record in self._records:
line = record.description[0:30].ljust(36)
line = line + record.seq.data[cur_char:(cur_char + show_num)]
output = output + line + "\n"
# now we need to print out the star info, if we've got it
if self._star_info != '':
output = output + (" " * 36) + \
self._star_info[cur_char:(cur_char + show_num)] + "\n"
output = output + "\n"
cur_char = cur_char + show_num
# have a extra newline, so strip two off and add one before returning
return string.rstrip(output) + "\n"
def _add_star_info(self, stars):
"""Add all of the stars, which indicate consensus sequence.
"""
self._star_info = stars
def _add_version(self, version):
"""Add the version information about the clustal file being read.
"""
self._version = version
class _AlignCreator(handler.ContentHandler):
"""Handler to create a ClustalAlignment object from clustal file info.
This handler is used to accept events coming from a Martel parsing
stream, and acts like a normal SAX handler.
After parsing, the alignment object created is available as the
align attribute of the class.
"""
def __init__(self, alphabet):
"""Create a new handler ready to deal with output from Martel parsing.
Arguments:
o alphabet - The alphabet to create all of the new sequences with.
"""
self.align = ClustalAlignment(alphabet)
# store sequence info in a dictionary
self.all_info = {}
self.all_keys = []
# the current id we are working with
self.cur_id = None
# info so we know how big the ids and sequences are
self.id_size = 0
self.space_size = 0
self.seq_size = 0
# flags so we can keep track of where we are during the parse
self.in_version = 0
self.in_stars = 0
self.in_seq_id = 0
self.in_space = 0
self.in_seq = 0
self.all_star_info = ''
def startElement(self, name, attrs):
"""Check the various tags for the info we are interested in."""
if name == "version":
self.in_version = 1
self.version_info = ''
elif name == "seq_id":
self.in_seq_id = 1
self.seq_id_info = ''
elif name == "seq_space":
self.in_space = 1
self.space_info = ''
elif name == "seq_info":
self.in_seq = 1
self.seq_info = ''
elif name == "match_stars":
self.in_stars = 1
self.star_info = ''
def characters(self, content):
if self.in_version:
self.version_info = self.version_info + content
elif self.in_seq_id:
self.seq_id_info = self.seq_id_info + content
elif self.in_space:
self.space_info = self.space_info + content
elif self.in_seq:
self.seq_info = self.seq_info + content
elif self.in_stars:
self.star_info = self.star_info + content
def endElement(self, name):
if name == "version":
self.in_version = 0
self.align._add_version(string.strip(self.version_info))
elif name == "seq_id":
self.in_seq_id = 0
self.id_size = len(self.seq_id_info)
self.cur_id = self.seq_id_info
elif name == "seq_space":
self.in_space = 0
self.space_size = len(self.space_info)
elif name == "seq_info":
self.in_seq = 0
self.seq_size = len(self.seq_info)
# if the id is already there, add the sequence info
if self.cur_id in self.all_info.keys():
self.all_info[self.cur_id] = self.all_info[self.cur_id] + \
self.seq_info
else:
self.all_info[self.cur_id] = self.seq_info
self.all_keys.append(self.cur_id)
elif name == "match_stars":
id_length = self.id_size + self.space_size
line_length = id_length + self.seq_size
self.all_star_info = self.all_star_info + \
self.star_info[id_length:line_length]
def endDocument(self):
# when we are done parsing add all of the info we need
self.align._add_star_info(self.all_star_info)
for id in self.all_keys:
self.align.add_sequence(id, self.all_info[id])
class MultipleAlignCL:
"""Represent a clustalw multiple alignment command line.
This is meant to make it easy to code the command line options you
want to submit to clustalw.
Clustalw has a ton of options and things to do but this is set up to
represent a clustalw mutliple alignment.
Warning: I don't use all of these options personally, so if you find
one to be broken for any reason, please let us know!
"""
# set the valid options for different parameters
OUTPUT_TYPES = ['GCG', 'GDE', 'PHYLIP', 'PIR', 'NEXUS', 'FASTA']
OUTPUT_ORDER = ['INPUT', 'ALIGNED']
OUTPUT_CASE = ['LOWER', 'UPPER']
OUTPUT_SEQNOS = ['OFF', 'ON']
RESIDUE_TYPES = ['PROTEIN', 'DNA']
PROTEIN_MATRIX = ['BLOSUM', 'PAM', 'GONNET', 'ID']
DNA_MATRIX = ['IUB', 'CLUSTALW']
def __init__(self, sequence_file, command = 'clustalw'):
"""Initialize some general parameters that can be set as attributes.
Arguments:
o sequence_file - The file to read the sequences for alignment from.
o command - The command used to run clustalw. This defaults to
just 'clustalw' (ie. assumes you have it on your path somewhere).
General attributes that can be set:
o is_quick - if set as 1, will use a fast algorithm to create
the alignment guide tree.
o allow_negative - allow negative values in the alignment matrix.
Multiple alignment attributes that can be set as attributes:
o gap_open_pen - Gap opening penalty
o gap_ext_pen - Gap extension penalty
o is_no_end_pen - A flag as to whether or not there should be a gap
separation penalty for the ends.
o gap_sep_range - The gap separation penalty range.
o is_no_pgap - A flag to turn off residue specific gaps
o is_no_hgap - A flag to turn off hydrophilic gaps
o h_gap_residues - A list of residues to count a hydrophilic
o max_div - A percent identity to use for delay (? - I don't undertand
this!)
o trans_weight - The weight to use for transitions
"""
self.sequence_file = sequence_file
self.command = command
self.is_quick = None
self.allow_negative = None
self.gap_open_pen = None
self.gap_ext_pen = None
self.is_no_end_pen = None
self.gap_sep_range = None
self.is_no_pgap = None
self.is_no_hgap = None
self.h_gap_residues = []
self.max_div = None
self.trans_weight = None
# other attributes that should be set via various functions
# 1. output parameters
self.output_file = None
self.output_type = None
self.output_order = None
self.change_case = None
self.add_seqnos = None
# 2. a guide tree to use
self.guide_tree = None
self.new_tree = None
# 3. matrices
self.protein_matrix = None
self.dna_matrix = None
# 4. type of residues
self.type = None
def __str__(self):
"""Write out the command line as a string."""
if sys.platform <> "win32" :
#On Linux with clustalw 1.83, you can do:
#clustalw input.faa
#clustalw /full/path/input.faa
#clustalw -INFILE=input.faa
#clustalw -INFILE=/full/path/input.faa
#
#Note these fail (using DOS style slashes):
#
#clustalw /INFILE=input.faa
#clustalw /INFILE=/full/path/input.faa
#
#To keep things simple, and follow the original
#behaviour of Bio.Clustalw use this:
cline = self.command + " " + self.sequence_file
else :
#On Windows XP with clustalw.exe 1.83, these work at
#the command prompt:
#
#clustalw.exe input.faa
#clustalw.exe /INFILE=input.faa
#clustalw.exe /INFILE="input.faa"
#clustalw.exe /INFILE="with space.faa"
#clustalw.exe /INFILE=C:\full\path\input.faa
#clustalw.exe /INFILE="C:\full path\with spaces.faa"
#
#Sadly these fail:
#clustalw.exe "input.faa"
#clustalw.exe "with space.faa"
#clustalw.exe C:\full\path\input.faa
#clustalw.exe "C:\full path\with spaces.faa"
#
#These also fail but a minus/dash does seem to
#work with other options (!):
#clustalw.exe -INFILE=input.faa
#clustalw.exe -INFILE=C:\full\path\input.faa
#
#Also these fail:
#clustalw.exe "/INFILE=input.faa"
#clustalw.exe "/INFILE=C:\full\path\input.faa"
#
#Thanks to Emanuel Hey for flagging this on the mailing list.
#
#In addtion, both self.command and self.sequence_file
#may contain spaces, so should be quoted. But clustalw
#is fussy.
if self.command.count(" ") > 0 :
cline = '"%s"' % self.command
else :
cline = self.command
if self.sequence_file.count(" ") > 0 :
cline += ' /INFILE="%s"' % self.sequence_file
else :
cline += ' /INFILE=%s' % self.sequence_file
# general options
if self.type:
cline += " -TYPE=%s" % self.type
if self.is_quick == 1:
#Some versions of clustalw are case sensitive,
#and require -quicktree rather than -QUICKTREE
cline += " -quicktree"
if self.allow_negative == 1:
cline += " -NEGATIVE"
# output options
if self.output_file:
cline += " -OUTFILE=%s" % self.output_file
if self.output_type:
cline += " -OUTPUT=%s" % self.output_type
if self.output_order:
cline += " -OUTORDER=%s" % self.output_order
if self.change_case:
cline += " -CASE=%s" % self.change_case
if self.add_seqnos:
cline += " -SEQNOS=%s" % self.add_seqnos
if self.new_tree:
# clustal does not work if -align is written -ALIGN
cline += " -NEWTREE=%s -align" % self.new_tree
# multiple alignment options
if self.guide_tree:
cline += " -USETREE=%s" % self.guide_tree
if self.protein_matrix:
cline += " -MATRIX=%s" % self.protein_matrix
if self.dna_matrix:
cline += " -DNAMATRIX=%s" % self.dna_matrix
if self.gap_open_pen:
cline += " -GAPOPEN=%s" % self.gap_open_pen
if self.gap_ext_pen:
cline += " -GAPEXT=%s" % self.gap_ext_pen
if self.is_no_end_pen == 1:
cline += " -ENDGAPS"
if self.gap_sep_range:
cline += " -GAPDIST=%s" % self.gap_sep_range
if self.is_no_pgap == 1:
cline += " -NOPGAP"
if self.is_no_hgap == 1:
cline += " -NOHGAP"
if len(self.h_gap_residues) != 0:
# stick the list of residues together as one big list o' residues
residue_list = ''
for residue in self.h_gap_residues:
residue_list = residue_list + residue
cline += " -HGAPRESIDUES=%s" % residue_list
if self.max_div:
cline += " -MAXDIV=%s" % self.max_div
if self.trans_weight:
cline += " -TRANSWEIGHT=%s" % self.trans_weight
return cline
def set_output(self, output_file, output_type = None, output_order = None,
change_case = None, add_seqnos = None):
"""Set the output parameters for the command line.
"""
self.output_file = output_file
if output_type:
output_type = string.upper(output_type)
if output_type not in self.OUTPUT_TYPES:
raise ValueError("Invalid output type %s. Valid choices are %s"
% (output_type, self.OUTPUT_TYPES))
else:
self.output_type = output_type
if output_order:
output_order = string.upper(output_order)
if output_order not in self.OUTPUT_ORDER:
raise ValueError("Invalid output order %s. Valid choices are %s"
% (output_order, self.OUTPUT_ORDER))
else:
self.output_order = output_order
if change_case:
change_case = string.upper(change_case)
if output_type != "GDE":
raise ValueError("Change case only valid for GDE output.")
elif change_case not in self.CHANGE_CASE:
raise ValueError("Invalid change case %s. Valid choices are %s"
% (change_case, self.CHANGE_CASE))
else:
self.change_case = change_case
if add_seqnos:
add_seqnos = string.upper(add_seqnos)
if output_type:
raise ValueError("Add SeqNos only valid for CLUSTAL output.")
elif add_seqnos not in self.OUTPUT_SEQNOS:
raise ValueError("Invalid seqnos option %s. Valid choices: %s"
% (add_seqnos, self.OUTPUT_SEQNOS))
else:
self.add_seqnos = add_seqnos
def set_guide_tree(self, tree_file):
"""Provide a file to use as the guide tree for alignment.
Raises:
o IOError - If the tree_file doesn't exist."""
if not(os.path.exists(tree_file)):
raise IOError("Could not find the guide tree file %s." %
tree_file)
else:
self.guide_tree = tree_file
def set_new_guide_tree(self, tree_file):
"""Set the name of the guide tree file generated in the alignment.
"""
self.new_tree = tree_file
def set_protein_matrix(self, protein_matrix):
"""Set the type of protein matrix to use.
Protein matrix can be either one of the defined types (blosum, pam,
gonnet or id) or a file with your own defined matrix.
"""
if string.upper(protein_matrix) in self.PROTEIN_MATRIX:
self.protein_matrix = string.upper(protein_matrix)
elif os.path.exists(protein_matrix):
self.protein_matrix = protein_matrix
else:
raise ValueError("Invalid matrix %s. Options are %s or a file." %
(string.upper(protein_matrix),
self.PROTEIN_MATRIX))
def set_dna_matrix(self, dna_matrix):
"""Set the type of DNA matrix to use.
The dna_matrix can either be one of the defined types (iub or clustalw)
or a file with the matrix to use."""
if string.upper(dna_matrix) in self.DNA_MATRIX:
self.dna_matrix = string.upper(dna_matrix)
elif os.path.exists(dna_matrix):
self.dna_matrix = dna_matrix
else:
raise ValueError("Invalid matrix %s. Options are %s or a file." %
(dna_matrix, self.DNA_MATRIX))
def set_type(self, residue_type):
"""Set the type of residues within the file.
Clustal tries to guess whether the info is protein or DNA based on
the number of GATCs, but this can be wrong if you have a messed up
protein or DNA you are working with, so this allows you to set it
explicitly.
"""
residue_type = string.upper(residue_type)
if residue_type in self.RESIDUE_TYPES:
self.type = residue_type
else:
raise ValueError("Invalid residue type %s. Valid choices are %s"
% (residue_type, self.RESIDUE_TYPES))
|
dbmi-pitt/DIKB-Micropublication
|
scripts/mp-scripts/Bio/Clustalw/__init__.py
|
Python
|
apache-2.0
| 22,263
|
[
"Biopython"
] |
411ad27ed22c484a03672a9ee4d70b16d5b6125c77c94b8c788f0efe3f9ec061
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for compute resource tracking."""
import uuid
import mock
from oslo.config import cfg
from oslo.serialization import jsonutils
from oslo.utils import timeutils
from nova.compute import flavors
from nova.compute import resource_tracker
from nova.compute import resources
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova import objects
from nova.objects import base as obj_base
from nova import rpc
from nova import test
from nova.tests.compute.monitors import test_monitors
from nova.tests.objects import test_migration
from nova.tests.pci import pci_fakes
from nova.virt import driver
from nova.virt import hardware
FAKE_VIRT_MEMORY_MB = 5
FAKE_VIRT_MEMORY_OVERHEAD = 1
FAKE_VIRT_MEMORY_WITH_OVERHEAD = (
FAKE_VIRT_MEMORY_MB + FAKE_VIRT_MEMORY_OVERHEAD)
FAKE_VIRT_NUMA_TOPOLOGY = hardware.VirtNUMAHostTopology(
cells=[hardware.VirtNUMATopologyCellUsage(0, set([1, 2]), 3072),
hardware.VirtNUMATopologyCellUsage(1, set([3, 4]), 3072)])
FAKE_VIRT_NUMA_TOPOLOGY_OVERHEAD = hardware.VirtNUMALimitTopology(
cells=[hardware.VirtNUMATopologyCellLimit(
0, set([1, 2]), 3072, 4, 10240),
hardware.VirtNUMATopologyCellLimit(
1, set([3, 4]), 3072, 4, 10240)])
ROOT_GB = 5
EPHEMERAL_GB = 1
FAKE_VIRT_LOCAL_GB = ROOT_GB + EPHEMERAL_GB
FAKE_VIRT_VCPUS = 1
FAKE_VIRT_STATS = {'virt_stat': 10}
FAKE_VIRT_STATS_JSON = jsonutils.dumps(FAKE_VIRT_STATS)
RESOURCE_NAMES = ['vcpu']
CONF = cfg.CONF
class UnsupportedVirtDriver(driver.ComputeDriver):
"""Pretend version of a lame virt driver."""
def __init__(self):
super(UnsupportedVirtDriver, self).__init__(None)
def get_host_ip_addr(self):
return '127.0.0.1'
def get_available_resource(self, nodename):
# no support for getting resource usage info
return {}
class FakeVirtDriver(driver.ComputeDriver):
def __init__(self, pci_support=False, stats=None,
numa_topology=FAKE_VIRT_NUMA_TOPOLOGY):
super(FakeVirtDriver, self).__init__(None)
self.memory_mb = FAKE_VIRT_MEMORY_MB
self.local_gb = FAKE_VIRT_LOCAL_GB
self.vcpus = FAKE_VIRT_VCPUS
self.numa_topology = numa_topology
self.memory_mb_used = 0
self.local_gb_used = 0
self.pci_support = pci_support
self.pci_devices = [{
'label': 'forza-napoli',
'dev_type': 'foo',
'compute_node_id': 1,
'address': '0000:00:00.1',
'product_id': 'p1',
'vendor_id': 'v1',
'status': 'available',
'extra_k1': 'v1'}] if self.pci_support else []
self.pci_stats = [{
'count': 1,
'vendor_id': 'v1',
'product_id': 'p1'}] if self.pci_support else []
if stats is not None:
self.stats = stats
def get_host_ip_addr(self):
return '127.0.0.1'
def get_available_resource(self, nodename):
d = {
'vcpus': self.vcpus,
'memory_mb': self.memory_mb,
'local_gb': self.local_gb,
'vcpus_used': 0,
'memory_mb_used': self.memory_mb_used,
'local_gb_used': self.local_gb_used,
'hypervisor_type': 'fake',
'hypervisor_version': 0,
'hypervisor_hostname': 'fakehost',
'cpu_info': '',
'numa_topology': (
self.numa_topology.to_json() if self.numa_topology else None),
}
if self.pci_support:
d['pci_passthrough_devices'] = jsonutils.dumps(self.pci_devices)
if hasattr(self, 'stats'):
d['stats'] = self.stats
return d
def estimate_instance_overhead(self, instance_info):
instance_info['memory_mb'] # make sure memory value is present
overhead = {
'memory_mb': FAKE_VIRT_MEMORY_OVERHEAD
}
return overhead # just return a constant value for testing
class BaseTestCase(test.TestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
self.flags(reserved_host_disk_mb=0,
reserved_host_memory_mb=0)
self.context = context.get_admin_context()
self.flags(use_local=True, group='conductor')
self.conductor = self.start_service('conductor',
manager=CONF.conductor.manager)
self._instances = {}
self._numa_topologies = {}
self._instance_types = {}
self.stubs.Set(self.conductor.db,
'instance_get_all_by_host_and_node',
self._fake_instance_get_all_by_host_and_node)
self.stubs.Set(db, 'instance_extra_get_by_instance_uuid',
self._fake_instance_extra_get_by_instance_uuid)
self.stubs.Set(self.conductor.db,
'instance_update_and_get_original',
self._fake_instance_update_and_get_original)
self.stubs.Set(self.conductor.db,
'flavor_get', self._fake_flavor_get)
self.host = 'fakehost'
def _create_compute_node(self, values=None):
compute = {
"id": 1,
"service_id": 1,
"vcpus": 1,
"memory_mb": 1,
"local_gb": 1,
"vcpus_used": 1,
"memory_mb_used": 1,
"local_gb_used": 1,
"free_ram_mb": 1,
"free_disk_gb": 1,
"current_workload": 1,
"running_vms": 0,
"cpu_info": None,
"numa_topology": None,
"stats": {
"num_instances": "1",
},
"hypervisor_hostname": "fakenode",
}
if values:
compute.update(values)
return compute
def _create_service(self, host="fakehost", compute=None):
if compute:
compute = [compute]
service = {
"id": 1,
"host": host,
"binary": "nova-compute",
"topic": "compute",
"compute_node": compute,
}
return service
def _fake_instance_system_metadata(self, instance_type, prefix=''):
sys_meta = []
for key in flavors.system_metadata_flavor_props.keys():
sys_meta.append({'key': '%sinstance_type_%s' % (prefix, key),
'value': instance_type[key]})
return sys_meta
def _fake_instance(self, stash=True, flavor=None, **kwargs):
# Default to an instance ready to resize to or from the same
# instance_type
flavor = flavor or self._fake_flavor_create()
sys_meta = self._fake_instance_system_metadata(flavor)
if stash:
# stash instance types in system metadata.
sys_meta = (sys_meta +
self._fake_instance_system_metadata(flavor, 'new_') +
self._fake_instance_system_metadata(flavor, 'old_'))
instance_uuid = str(uuid.uuid1())
instance = {
'uuid': instance_uuid,
'vm_state': vm_states.RESIZED,
'task_state': None,
'ephemeral_key_uuid': None,
'os_type': 'Linux',
'project_id': '123456',
'host': None,
'node': None,
'instance_type_id': flavor['id'],
'memory_mb': flavor['memory_mb'],
'vcpus': flavor['vcpus'],
'root_gb': flavor['root_gb'],
'ephemeral_gb': flavor['ephemeral_gb'],
'launched_on': None,
'system_metadata': sys_meta,
'availability_zone': None,
'vm_mode': None,
'reservation_id': None,
'display_name': None,
'default_swap_device': None,
'power_state': None,
'scheduled_at': None,
'access_ip_v6': None,
'access_ip_v4': None,
'key_name': None,
'updated_at': None,
'cell_name': None,
'locked': None,
'locked_by': None,
'launch_index': None,
'architecture': None,
'auto_disk_config': None,
'terminated_at': None,
'ramdisk_id': None,
'user_data': None,
'cleaned': None,
'deleted_at': None,
'id': 333,
'disable_terminate': None,
'hostname': None,
'display_description': None,
'key_data': None,
'deleted': None,
'default_ephemeral_device': None,
'progress': None,
'launched_at': None,
'config_drive': None,
'kernel_id': None,
'user_id': None,
'shutdown_terminate': None,
'created_at': None,
'image_ref': None,
'root_device_name': None,
}
numa_topology = kwargs.pop('numa_topology', None)
if numa_topology:
numa_topology = {
'id': 1, 'created_at': None, 'updated_at': None,
'deleted_at': None, 'deleted': None,
'instance_uuid': instance['uuid'],
'numa_topology': numa_topology.to_json()
}
instance.update(kwargs)
self._instances[instance_uuid] = instance
self._numa_topologies[instance_uuid] = numa_topology
return instance
def _fake_flavor_create(self, **kwargs):
instance_type = {
'id': 1,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'disabled': False,
'is_public': True,
'name': 'fakeitype',
'memory_mb': FAKE_VIRT_MEMORY_MB,
'vcpus': FAKE_VIRT_VCPUS,
'root_gb': ROOT_GB,
'ephemeral_gb': EPHEMERAL_GB,
'swap': 0,
'rxtx_factor': 1.0,
'vcpu_weight': 1,
'flavorid': 'fakeflavor',
'extra_specs': {},
}
instance_type.update(**kwargs)
id_ = instance_type['id']
self._instance_types[id_] = instance_type
return instance_type
def _fake_instance_get_all_by_host_and_node(self, context, host, nodename):
return [i for i in self._instances.values() if i['host'] == host]
def _fake_instance_extra_get_by_instance_uuid(self, context,
instance_uuid, columns=None):
return self._numa_topologies.get(instance_uuid)
def _fake_flavor_get(self, ctxt, id_):
return self._instance_types[id_]
def _fake_instance_update_and_get_original(self, context, instance_uuid,
values):
instance = self._instances[instance_uuid]
instance.update(values)
# the test doesn't care what the original instance values are, it's
# only used in the subsequent notification:
return (instance, instance)
def _driver(self):
return FakeVirtDriver()
def _tracker(self, host=None):
if host is None:
host = self.host
node = "fakenode"
driver = self._driver()
tracker = resource_tracker.ResourceTracker(host, driver, node)
tracker.ext_resources_handler = \
resources.ResourceHandler(RESOURCE_NAMES, True)
return tracker
class UnsupportedDriverTestCase(BaseTestCase):
"""Resource tracking should be disabled when the virt driver doesn't
support it.
"""
def setUp(self):
super(UnsupportedDriverTestCase, self).setUp()
self.tracker = self._tracker()
# seed tracker with data:
self.tracker.update_available_resource(self.context)
def _driver(self):
return UnsupportedVirtDriver()
def test_disabled(self):
# disabled = no compute node stats
self.assertTrue(self.tracker.disabled)
self.assertIsNone(self.tracker.compute_node)
def test_disabled_claim(self):
# basic claim:
instance = self._fake_instance()
claim = self.tracker.instance_claim(self.context, instance)
self.assertEqual(0, claim.memory_mb)
def test_disabled_instance_claim(self):
# instance variation:
instance = self._fake_instance()
claim = self.tracker.instance_claim(self.context, instance)
self.assertEqual(0, claim.memory_mb)
def test_disabled_instance_context_claim(self):
# instance context manager variation:
instance = self._fake_instance()
claim = self.tracker.instance_claim(self.context, instance)
with self.tracker.instance_claim(self.context, instance) as claim:
self.assertEqual(0, claim.memory_mb)
def test_disabled_updated_usage(self):
instance = self._fake_instance(host='fakehost', memory_mb=5,
root_gb=10)
self.tracker.update_usage(self.context, instance)
def test_disabled_resize_claim(self):
instance = self._fake_instance()
instance_type = self._fake_flavor_create()
claim = self.tracker.resize_claim(self.context, instance,
instance_type)
self.assertEqual(0, claim.memory_mb)
self.assertEqual(instance['uuid'], claim.migration['instance_uuid'])
self.assertEqual(instance_type['id'],
claim.migration['new_instance_type_id'])
def test_disabled_resize_context_claim(self):
instance = self._fake_instance()
instance_type = self._fake_flavor_create()
with self.tracker.resize_claim(self.context, instance, instance_type) \
as claim:
self.assertEqual(0, claim.memory_mb)
class MissingServiceTestCase(BaseTestCase):
def setUp(self):
super(MissingServiceTestCase, self).setUp()
self.context = context.get_admin_context()
self.tracker = self._tracker()
def test_missing_service(self):
self.tracker.update_available_resource(self.context)
self.assertTrue(self.tracker.disabled)
class MissingComputeNodeTestCase(BaseTestCase):
def setUp(self):
super(MissingComputeNodeTestCase, self).setUp()
self.tracker = self._tracker()
self.stubs.Set(db, 'service_get_by_compute_host',
self._fake_service_get_by_compute_host)
self.stubs.Set(db, 'compute_node_create',
self._fake_create_compute_node)
self.tracker.scheduler_client.update_resource_stats = mock.Mock()
def _fake_create_compute_node(self, context, values):
self.created = True
return self._create_compute_node()
def _fake_service_get_by_compute_host(self, ctx, host):
# return a service with no joined compute
service = self._create_service()
return service
def test_create_compute_node(self):
self.tracker.update_available_resource(self.context)
self.assertTrue(self.created)
def test_enabled(self):
self.tracker.update_available_resource(self.context)
self.assertFalse(self.tracker.disabled)
class BaseTrackerTestCase(BaseTestCase):
def setUp(self):
# setup plumbing for a working resource tracker with required
# database models and a compatible compute driver:
super(BaseTrackerTestCase, self).setUp()
self.updated = False
self.deleted = False
self.update_call_count = 0
self.tracker = self._tracker()
self._migrations = {}
self.stubs.Set(db, 'service_get_by_compute_host',
self._fake_service_get_by_compute_host)
self.stubs.Set(db, 'compute_node_update',
self._fake_compute_node_update)
self.stubs.Set(db, 'compute_node_delete',
self._fake_compute_node_delete)
self.stubs.Set(db, 'migration_update',
self._fake_migration_update)
self.stubs.Set(db, 'migration_get_in_progress_by_host_and_node',
self._fake_migration_get_in_progress_by_host_and_node)
# Note that this must be called before the call to _init_tracker()
patcher = pci_fakes.fake_pci_whitelist()
self.addCleanup(patcher.stop)
self._init_tracker()
self.limits = self._limits()
def _fake_service_get_by_compute_host(self, ctx, host):
self.compute = self._create_compute_node()
self.service = self._create_service(host, compute=self.compute)
return self.service
def _fake_compute_node_update(self, ctx, compute_node_id, values,
prune_stats=False):
self.update_call_count += 1
self.updated = True
self.compute.update(values)
return self.compute
def _fake_compute_node_delete(self, ctx, compute_node_id):
self.deleted = True
self.compute.update({'deleted': 1})
return self.compute
def _fake_migration_get_in_progress_by_host_and_node(self, ctxt, host,
node):
status = ['confirmed', 'reverted', 'error']
migrations = []
for migration in self._migrations.values():
migration = obj_base.obj_to_primitive(migration)
if migration['status'] in status:
continue
uuid = migration['instance_uuid']
migration['instance'] = self._instances[uuid]
migrations.append(migration)
return migrations
def _fake_migration_update(self, ctxt, migration_id, values):
# cheat and assume there's only 1 migration present
migration = self._migrations.values()[0]
migration.update(values)
return migration
def _init_tracker(self):
self.tracker.update_available_resource(self.context)
def _limits(self, memory_mb=FAKE_VIRT_MEMORY_WITH_OVERHEAD,
disk_gb=FAKE_VIRT_LOCAL_GB,
vcpus=FAKE_VIRT_VCPUS,
numa_topology=FAKE_VIRT_NUMA_TOPOLOGY_OVERHEAD):
"""Create limits dictionary used for oversubscribing resources."""
return {
'memory_mb': memory_mb,
'disk_gb': disk_gb,
'vcpu': vcpus,
'numa_topology': numa_topology.to_json() if numa_topology else None
}
def assertEqualNUMAHostTopology(self, expected, got):
attrs = ('cpuset', 'memory', 'id', 'cpu_usage', 'memory_usage')
if None in (expected, got):
if expected != got:
raise AssertionError("Topologies don't match. Expected: "
"%(expected)s, but got: %(got)s" %
{'expected': expected, 'got': got})
else:
return
if len(expected) != len(got):
raise AssertionError("Topologies don't match due to different "
"number of cells. Expected: "
"%(expected)s, but got: %(got)s" %
{'expected': expected, 'got': got})
for exp_cell, got_cell in zip(expected.cells, got.cells):
for attr in attrs:
if getattr(exp_cell, attr) != getattr(got_cell, attr):
raise AssertionError("Topologies don't match. Expected: "
"%(expected)s, but got: %(got)s" %
{'expected': expected, 'got': got})
def _assert(self, value, field, tracker=None):
if tracker is None:
tracker = self.tracker
if field not in tracker.compute_node:
raise test.TestingException(
"'%(field)s' not in compute node." % {'field': field})
x = tracker.compute_node[field]
if field == 'numa_topology':
self.assertEqualNUMAHostTopology(
value, hardware.VirtNUMAHostTopology.from_json(x))
else:
self.assertEqual(value, x)
class TrackerTestCase(BaseTrackerTestCase):
def test_free_ram_resource_value(self):
driver = FakeVirtDriver()
mem_free = driver.memory_mb - driver.memory_mb_used
self.assertEqual(mem_free, self.tracker.compute_node['free_ram_mb'])
def test_free_disk_resource_value(self):
driver = FakeVirtDriver()
mem_free = driver.local_gb - driver.local_gb_used
self.assertEqual(mem_free, self.tracker.compute_node['free_disk_gb'])
def test_update_compute_node(self):
self.assertFalse(self.tracker.disabled)
self.assertTrue(self.updated)
def test_init(self):
driver = self._driver()
self._assert(FAKE_VIRT_MEMORY_MB, 'memory_mb')
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb')
self._assert(FAKE_VIRT_VCPUS, 'vcpus')
self._assert(FAKE_VIRT_NUMA_TOPOLOGY, 'numa_topology')
self._assert(0, 'memory_mb_used')
self._assert(0, 'local_gb_used')
self._assert(0, 'vcpus_used')
self._assert(0, 'running_vms')
self._assert(FAKE_VIRT_MEMORY_MB, 'free_ram_mb')
self._assert(FAKE_VIRT_LOCAL_GB, 'free_disk_gb')
self.assertFalse(self.tracker.disabled)
self.assertEqual(0, self.tracker.compute_node['current_workload'])
self.assertEqual(driver.pci_stats,
jsonutils.loads(self.tracker.compute_node['pci_stats']))
class SchedulerClientTrackerTestCase(BaseTrackerTestCase):
def setUp(self):
super(SchedulerClientTrackerTestCase, self).setUp()
self.tracker.scheduler_client.update_resource_stats = mock.Mock()
def test_create_resource(self):
self.tracker._write_ext_resources = mock.Mock()
self.tracker.conductor_api.compute_node_create = mock.Mock(
return_value=dict(id=1))
values = {'stats': {}, 'foo': 'bar', 'baz_count': 0}
self.tracker._create(self.context, values)
expected = {'stats': '{}', 'foo': 'bar', 'baz_count': 0,
'id': 1}
self.tracker.scheduler_client.update_resource_stats.\
assert_called_once_with(self.context,
("fakehost", "fakenode"),
expected)
def test_update_resource(self):
self.tracker._write_ext_resources = mock.Mock()
values = {'stats': {}, 'foo': 'bar', 'baz_count': 0}
self.tracker._update(self.context, values)
expected = {'stats': '{}', 'foo': 'bar', 'baz_count': 0,
'id': 1}
self.tracker.scheduler_client.update_resource_stats.\
assert_called_once_with(self.context,
("fakehost", "fakenode"),
expected)
class TrackerPciStatsTestCase(BaseTrackerTestCase):
def test_update_compute_node(self):
self.assertFalse(self.tracker.disabled)
self.assertTrue(self.updated)
def test_init(self):
driver = self._driver()
self._assert(FAKE_VIRT_MEMORY_MB, 'memory_mb')
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb')
self._assert(FAKE_VIRT_VCPUS, 'vcpus')
self._assert(FAKE_VIRT_NUMA_TOPOLOGY, 'numa_topology')
self._assert(0, 'memory_mb_used')
self._assert(0, 'local_gb_used')
self._assert(0, 'vcpus_used')
self._assert(0, 'running_vms')
self._assert(FAKE_VIRT_MEMORY_MB, 'free_ram_mb')
self._assert(FAKE_VIRT_LOCAL_GB, 'free_disk_gb')
self.assertFalse(self.tracker.disabled)
self.assertEqual(0, self.tracker.compute_node['current_workload'])
self.assertEqual(driver.pci_stats,
jsonutils.loads(self.tracker.compute_node['pci_stats']))
def _driver(self):
return FakeVirtDriver(pci_support=True)
class TrackerExtraResourcesTestCase(BaseTrackerTestCase):
def setUp(self):
super(TrackerExtraResourcesTestCase, self).setUp()
self.driver = self._driver()
def _driver(self):
return FakeVirtDriver()
def test_set_empty_ext_resources(self):
resources = self.driver.get_available_resource(self.tracker.nodename)
self.assertNotIn('stats', resources)
self.tracker._write_ext_resources(resources)
self.assertIn('stats', resources)
def test_set_extra_resources(self):
def fake_write_resources(resources):
resources['stats']['resA'] = '123'
resources['stats']['resB'] = 12
self.stubs.Set(self.tracker.ext_resources_handler,
'write_resources',
fake_write_resources)
resources = self.driver.get_available_resource(self.tracker.nodename)
self.tracker._write_ext_resources(resources)
expected = {"resA": "123", "resB": 12}
self.assertEqual(sorted(expected),
sorted(resources['stats']))
class InstanceClaimTestCase(BaseTrackerTestCase):
def _instance_topology(self, mem):
mem = mem * 1024
return hardware.VirtNUMAInstanceTopology(
cells=[hardware.VirtNUMATopologyCell(0, set([1]), mem),
hardware.VirtNUMATopologyCell(1, set([3]), mem)])
def _claim_topology(self, mem, cpus=1):
if self.tracker.driver.numa_topology is None:
return None
mem = mem * 1024
return hardware.VirtNUMAHostTopology(
cells=[hardware.VirtNUMATopologyCellUsage(
0, set([1, 2]), 3072, cpu_usage=cpus,
memory_usage=mem),
hardware.VirtNUMATopologyCellUsage(
1, set([3, 4]), 3072, cpu_usage=cpus,
memory_usage=mem)])
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_update_usage_only_for_tracked(self, mock_get):
flavor = self._fake_flavor_create()
claim_mem = flavor['memory_mb'] + FAKE_VIRT_MEMORY_OVERHEAD
claim_gb = flavor['root_gb'] + flavor['ephemeral_gb']
claim_topology = self._claim_topology(claim_mem / 2)
instance_topology = self._instance_topology(claim_mem / 2)
instance = self._fake_instance(
flavor=flavor, task_state=None,
numa_topology=instance_topology)
self.tracker.update_usage(self.context, instance)
self._assert(0, 'memory_mb_used')
self._assert(0, 'local_gb_used')
self._assert(0, 'current_workload')
self._assert(FAKE_VIRT_NUMA_TOPOLOGY, 'numa_topology')
claim = self.tracker.instance_claim(self.context, instance,
self.limits)
self.assertNotEqual(0, claim.memory_mb)
self._assert(claim_mem, 'memory_mb_used')
self._assert(claim_gb, 'local_gb_used')
self._assert(claim_topology, 'numa_topology')
# now update should actually take effect
instance['task_state'] = task_states.SCHEDULING
self.tracker.update_usage(self.context, instance)
self._assert(claim_mem, 'memory_mb_used')
self._assert(claim_gb, 'local_gb_used')
self._assert(claim_topology, 'numa_topology')
self._assert(1, 'current_workload')
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_claim_and_audit(self, mock_get):
claim_mem = 3
claim_mem_total = 3 + FAKE_VIRT_MEMORY_OVERHEAD
claim_disk = 2
claim_topology = self._claim_topology(claim_mem_total / 2)
instance_topology = self._instance_topology(claim_mem_total / 2)
instance = self._fake_instance(memory_mb=claim_mem, root_gb=claim_disk,
ephemeral_gb=0, numa_topology=instance_topology)
self.tracker.instance_claim(self.context, instance, self.limits)
self.assertEqual(FAKE_VIRT_MEMORY_MB, self.compute["memory_mb"])
self.assertEqual(claim_mem_total, self.compute["memory_mb_used"])
self.assertEqual(FAKE_VIRT_MEMORY_MB - claim_mem_total,
self.compute["free_ram_mb"])
self.assertEqualNUMAHostTopology(
claim_topology, hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
self.assertEqual(FAKE_VIRT_LOCAL_GB, self.compute["local_gb"])
self.assertEqual(claim_disk, self.compute["local_gb_used"])
self.assertEqual(FAKE_VIRT_LOCAL_GB - claim_disk,
self.compute["free_disk_gb"])
# 1st pretend that the compute operation finished and claimed the
# desired resources from the virt layer
driver = self.tracker.driver
driver.memory_mb_used = claim_mem
driver.local_gb_used = claim_disk
self.tracker.update_available_resource(self.context)
# confirm tracker is adding in host_ip
self.assertIsNotNone(self.compute.get('host_ip'))
# confirm that resource usage is derived from instance usages,
# not virt layer:
self.assertEqual(claim_mem_total, self.compute['memory_mb_used'])
self.assertEqual(FAKE_VIRT_MEMORY_MB - claim_mem_total,
self.compute['free_ram_mb'])
self.assertEqualNUMAHostTopology(
claim_topology, hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
self.assertEqual(claim_disk, self.compute['local_gb_used'])
self.assertEqual(FAKE_VIRT_LOCAL_GB - claim_disk,
self.compute['free_disk_gb'])
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_claim_and_abort(self, mock_get):
claim_mem = 3
claim_mem_total = 3 + FAKE_VIRT_MEMORY_OVERHEAD
claim_disk = 2
claim_topology = self._claim_topology(claim_mem_total / 2)
instance_topology = self._instance_topology(claim_mem_total / 2)
instance = self._fake_instance(memory_mb=claim_mem,
root_gb=claim_disk, ephemeral_gb=0,
numa_topology=instance_topology)
claim = self.tracker.instance_claim(self.context, instance,
self.limits)
self.assertIsNotNone(claim)
self.assertEqual(claim_mem_total, self.compute["memory_mb_used"])
self.assertEqual(FAKE_VIRT_MEMORY_MB - claim_mem_total,
self.compute["free_ram_mb"])
self.assertEqualNUMAHostTopology(
claim_topology, hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
self.assertEqual(claim_disk, self.compute["local_gb_used"])
self.assertEqual(FAKE_VIRT_LOCAL_GB - claim_disk,
self.compute["free_disk_gb"])
claim.abort()
self.assertEqual(0, self.compute["memory_mb_used"])
self.assertEqual(FAKE_VIRT_MEMORY_MB, self.compute["free_ram_mb"])
self.assertEqualNUMAHostTopology(
FAKE_VIRT_NUMA_TOPOLOGY,
hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
self.assertEqual(0, self.compute["local_gb_used"])
self.assertEqual(FAKE_VIRT_LOCAL_GB, self.compute["free_disk_gb"])
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_instance_claim_with_oversubscription(self, mock_get):
memory_mb = FAKE_VIRT_MEMORY_MB * 2
root_gb = ephemeral_gb = FAKE_VIRT_LOCAL_GB
vcpus = FAKE_VIRT_VCPUS * 2
claim_topology = self._claim_topology(memory_mb)
instance_topology = self._instance_topology(memory_mb)
limits = {'memory_mb': memory_mb + FAKE_VIRT_MEMORY_OVERHEAD,
'disk_gb': root_gb * 2,
'vcpu': vcpus,
'numa_topology': FAKE_VIRT_NUMA_TOPOLOGY_OVERHEAD.to_json()}
instance = self._fake_instance(memory_mb=memory_mb,
root_gb=root_gb, ephemeral_gb=ephemeral_gb,
numa_topology=instance_topology)
self.tracker.instance_claim(self.context, instance, limits)
self.assertEqual(memory_mb + FAKE_VIRT_MEMORY_OVERHEAD,
self.tracker.compute_node['memory_mb_used'])
self.assertEqualNUMAHostTopology(
claim_topology,
hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
self.assertEqual(root_gb * 2,
self.tracker.compute_node['local_gb_used'])
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_additive_claims(self, mock_get):
self.limits['vcpu'] = 2
claim_topology = self._claim_topology(2, cpus=2)
flavor = self._fake_flavor_create(
memory_mb=1, root_gb=1, ephemeral_gb=0)
instance_topology = self._instance_topology(1)
instance = self._fake_instance(
flavor=flavor, numa_topology=instance_topology)
with self.tracker.instance_claim(self.context, instance, self.limits):
pass
instance = self._fake_instance(
flavor=flavor, numa_topology=instance_topology)
with self.tracker.instance_claim(self.context, instance, self.limits):
pass
self.assertEqual(2 * (flavor['memory_mb'] + FAKE_VIRT_MEMORY_OVERHEAD),
self.tracker.compute_node['memory_mb_used'])
self.assertEqual(2 * (flavor['root_gb'] + flavor['ephemeral_gb']),
self.tracker.compute_node['local_gb_used'])
self.assertEqual(2 * flavor['vcpus'],
self.tracker.compute_node['vcpus_used'])
self.assertEqualNUMAHostTopology(
claim_topology,
hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_context_claim_with_exception(self, mock_get):
instance = self._fake_instance(memory_mb=1, root_gb=1, ephemeral_gb=1)
try:
with self.tracker.instance_claim(self.context, instance):
# <insert exciting things that utilize resources>
raise test.TestingException()
except test.TestingException:
pass
self.assertEqual(0, self.tracker.compute_node['memory_mb_used'])
self.assertEqual(0, self.tracker.compute_node['local_gb_used'])
self.assertEqual(0, self.compute['memory_mb_used'])
self.assertEqual(0, self.compute['local_gb_used'])
self.assertEqualNUMAHostTopology(
FAKE_VIRT_NUMA_TOPOLOGY,
hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_instance_context_claim(self, mock_get):
flavor = self._fake_flavor_create(
memory_mb=1, root_gb=2, ephemeral_gb=3)
claim_topology = self._claim_topology(1)
instance_topology = self._instance_topology(1)
instance = self._fake_instance(
flavor=flavor, numa_topology=instance_topology)
with self.tracker.instance_claim(self.context, instance):
# <insert exciting things that utilize resources>
self.assertEqual(flavor['memory_mb'] + FAKE_VIRT_MEMORY_OVERHEAD,
self.tracker.compute_node['memory_mb_used'])
self.assertEqual(flavor['root_gb'] + flavor['ephemeral_gb'],
self.tracker.compute_node['local_gb_used'])
self.assertEqual(flavor['memory_mb'] + FAKE_VIRT_MEMORY_OVERHEAD,
self.compute['memory_mb_used'])
self.assertEqualNUMAHostTopology(
claim_topology,
hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
self.assertEqual(flavor['root_gb'] + flavor['ephemeral_gb'],
self.compute['local_gb_used'])
# after exiting claim context, build is marked as finished. usage
# totals should be same:
self.tracker.update_available_resource(self.context)
self.assertEqual(flavor['memory_mb'] + FAKE_VIRT_MEMORY_OVERHEAD,
self.tracker.compute_node['memory_mb_used'])
self.assertEqual(flavor['root_gb'] + flavor['ephemeral_gb'],
self.tracker.compute_node['local_gb_used'])
self.assertEqual(flavor['memory_mb'] + FAKE_VIRT_MEMORY_OVERHEAD,
self.compute['memory_mb_used'])
self.assertEqualNUMAHostTopology(
claim_topology,
hardware.VirtNUMAHostTopology.from_json(
self.compute['numa_topology']))
self.assertEqual(flavor['root_gb'] + flavor['ephemeral_gb'],
self.compute['local_gb_used'])
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_update_load_stats_for_instance(self, mock_get):
instance = self._fake_instance(task_state=task_states.SCHEDULING)
with self.tracker.instance_claim(self.context, instance):
pass
self.assertEqual(1, self.tracker.compute_node['current_workload'])
instance['vm_state'] = vm_states.ACTIVE
instance['task_state'] = None
instance['host'] = 'fakehost'
self.tracker.update_usage(self.context, instance)
self.assertEqual(0, self.tracker.compute_node['current_workload'])
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_cpu_stats(self, mock_get):
limits = {'disk_gb': 100, 'memory_mb': 100}
self.assertEqual(0, self.tracker.compute_node['vcpus_used'])
vcpus = 1
instance = self._fake_instance(vcpus=vcpus)
# should not do anything until a claim is made:
self.tracker.update_usage(self.context, instance)
self.assertEqual(0, self.tracker.compute_node['vcpus_used'])
with self.tracker.instance_claim(self.context, instance, limits):
pass
self.assertEqual(vcpus, self.tracker.compute_node['vcpus_used'])
# instance state can change without modifying vcpus in use:
instance['task_state'] = task_states.SCHEDULING
self.tracker.update_usage(self.context, instance)
self.assertEqual(vcpus, self.tracker.compute_node['vcpus_used'])
add_vcpus = 10
vcpus += add_vcpus
instance = self._fake_instance(vcpus=add_vcpus)
with self.tracker.instance_claim(self.context, instance, limits):
pass
self.assertEqual(vcpus, self.tracker.compute_node['vcpus_used'])
instance['vm_state'] = vm_states.DELETED
self.tracker.update_usage(self.context, instance)
vcpus -= add_vcpus
self.assertEqual(vcpus, self.tracker.compute_node['vcpus_used'])
def test_skip_deleted_instances(self):
# ensure that the audit process skips instances that have vm_state
# DELETED, but the DB record is not yet deleted.
self._fake_instance(vm_state=vm_states.DELETED, host=self.host)
self.tracker.update_available_resource(self.context)
self.assertEqual(0, self.tracker.compute_node['memory_mb_used'])
self.assertEqual(0, self.tracker.compute_node['local_gb_used'])
class ResizeClaimTestCase(BaseTrackerTestCase):
def setUp(self):
super(ResizeClaimTestCase, self).setUp()
def _fake_migration_create(mig_self, ctxt):
self._migrations[mig_self.instance_uuid] = mig_self
mig_self.obj_reset_changes()
self.stubs.Set(objects.Migration, 'create',
_fake_migration_create)
self.instance = self._fake_instance()
self.instance_type = self._fake_flavor_create()
def _fake_migration_create(self, context, values=None):
instance_uuid = str(uuid.uuid1())
mig_dict = test_migration.fake_db_migration()
mig_dict.update({
'id': 1,
'source_compute': 'host1',
'source_node': 'fakenode',
'dest_compute': 'host2',
'dest_node': 'fakenode',
'dest_host': '127.0.0.1',
'old_instance_type_id': 1,
'new_instance_type_id': 2,
'instance_uuid': instance_uuid,
'status': 'pre-migrating',
'updated_at': timeutils.utcnow()
})
if values:
mig_dict.update(values)
migration = objects.Migration()
migration.update(mig_dict)
# This hits the stub in setUp()
migration.create('fake')
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_claim(self, mock_get):
self.tracker.resize_claim(self.context, self.instance,
self.instance_type, self.limits)
self._assert(FAKE_VIRT_MEMORY_WITH_OVERHEAD, 'memory_mb_used')
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb_used')
self._assert(FAKE_VIRT_VCPUS, 'vcpus_used')
self.assertEqual(1, len(self.tracker.tracked_migrations))
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_abort(self, mock_get):
try:
with self.tracker.resize_claim(self.context, self.instance,
self.instance_type, self.limits):
raise test.TestingException("abort")
except test.TestingException:
pass
self._assert(0, 'memory_mb_used')
self._assert(0, 'local_gb_used')
self._assert(0, 'vcpus_used')
self.assertEqual(0, len(self.tracker.tracked_migrations))
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_additive_claims(self, mock_get):
limits = self._limits(
2 * FAKE_VIRT_MEMORY_WITH_OVERHEAD,
2 * FAKE_VIRT_LOCAL_GB,
2 * FAKE_VIRT_VCPUS)
self.tracker.resize_claim(self.context, self.instance,
self.instance_type, limits)
instance2 = self._fake_instance()
self.tracker.resize_claim(self.context, instance2, self.instance_type,
limits)
self._assert(2 * FAKE_VIRT_MEMORY_WITH_OVERHEAD, 'memory_mb_used')
self._assert(2 * FAKE_VIRT_LOCAL_GB, 'local_gb_used')
self._assert(2 * FAKE_VIRT_VCPUS, 'vcpus_used')
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_claim_and_audit(self, mock_get):
self.tracker.resize_claim(self.context, self.instance,
self.instance_type, self.limits)
self.tracker.update_available_resource(self.context)
self._assert(FAKE_VIRT_MEMORY_WITH_OVERHEAD, 'memory_mb_used')
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb_used')
self._assert(FAKE_VIRT_VCPUS, 'vcpus_used')
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_same_host(self, mock_get):
self.limits['vcpu'] = 3
src_dict = {
'memory_mb': 1, 'root_gb': 1, 'ephemeral_gb': 0, 'vcpus': 1}
dest_dict = dict((k, v + 1) for (k, v) in src_dict.iteritems())
src_type = self._fake_flavor_create(
id=10, name="srcflavor", **src_dict)
dest_type = self._fake_flavor_create(
id=11, name="destflavor", **dest_dict)
# make an instance of src_type:
instance = self._fake_instance(flavor=src_type)
instance['system_metadata'] = self._fake_instance_system_metadata(
dest_type)
self.tracker.instance_claim(self.context, instance, self.limits)
# resize to dest_type:
claim = self.tracker.resize_claim(self.context, instance,
dest_type, self.limits)
self._assert(src_dict['memory_mb'] + dest_dict['memory_mb']
+ 2 * FAKE_VIRT_MEMORY_OVERHEAD, 'memory_mb_used')
self._assert(src_dict['root_gb'] + src_dict['ephemeral_gb']
+ dest_dict['root_gb'] + dest_dict['ephemeral_gb'],
'local_gb_used')
self._assert(src_dict['vcpus'] + dest_dict['vcpus'], 'vcpus_used')
self.tracker.update_available_resource(self.context)
claim.abort()
# only the original instance should remain, not the migration:
self._assert(src_dict['memory_mb'] + FAKE_VIRT_MEMORY_OVERHEAD,
'memory_mb_used')
self._assert(src_dict['root_gb'] + src_dict['ephemeral_gb'],
'local_gb_used')
self._assert(src_dict['vcpus'], 'vcpus_used')
self.assertEqual(1, len(self.tracker.tracked_instances))
self.assertEqual(0, len(self.tracker.tracked_migrations))
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_revert(self, mock_get):
self.tracker.resize_claim(self.context, self.instance,
self.instance_type, {}, self.limits)
self.tracker.drop_resize_claim(self.context, self.instance)
self.assertEqual(0, len(self.tracker.tracked_instances))
self.assertEqual(0, len(self.tracker.tracked_migrations))
self._assert(0, 'memory_mb_used')
self._assert(0, 'local_gb_used')
self._assert(0, 'vcpus_used')
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_revert_reserve_source(self, mock_get):
# if a revert has started at the API and audit runs on
# the source compute before the instance flips back to source,
# resources should still be held at the source based on the
# migration:
dest = "desthost"
dest_tracker = self._tracker(host=dest)
dest_tracker.update_available_resource(self.context)
self.instance = self._fake_instance(memory_mb=FAKE_VIRT_MEMORY_MB,
root_gb=FAKE_VIRT_LOCAL_GB, ephemeral_gb=0,
vcpus=FAKE_VIRT_VCPUS, instance_type_id=1)
values = {'source_compute': self.host, 'dest_compute': dest,
'old_instance_type_id': 1, 'new_instance_type_id': 1,
'status': 'post-migrating',
'instance_uuid': self.instance['uuid']}
self._fake_migration_create(self.context, values)
# attach an instance to the destination host tracker:
dest_tracker.instance_claim(self.context, self.instance)
self._assert(FAKE_VIRT_MEMORY_WITH_OVERHEAD,
'memory_mb_used', tracker=dest_tracker)
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb_used',
tracker=dest_tracker)
self._assert(FAKE_VIRT_VCPUS, 'vcpus_used',
tracker=dest_tracker)
# audit and recheck to confirm migration doesn't get double counted
# on dest:
dest_tracker.update_available_resource(self.context)
self._assert(FAKE_VIRT_MEMORY_WITH_OVERHEAD,
'memory_mb_used', tracker=dest_tracker)
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb_used',
tracker=dest_tracker)
self._assert(FAKE_VIRT_VCPUS, 'vcpus_used',
tracker=dest_tracker)
# apply the migration to the source host tracker:
self.tracker.update_available_resource(self.context)
self._assert(FAKE_VIRT_MEMORY_WITH_OVERHEAD, 'memory_mb_used')
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb_used')
self._assert(FAKE_VIRT_VCPUS, 'vcpus_used')
# flag the instance and migration as reverting and re-audit:
self.instance['vm_state'] = vm_states.RESIZED
self.instance['task_state'] = task_states.RESIZE_REVERTING
self.tracker.update_available_resource(self.context)
self._assert(FAKE_VIRT_MEMORY_MB + 1, 'memory_mb_used')
self._assert(FAKE_VIRT_LOCAL_GB, 'local_gb_used')
self._assert(FAKE_VIRT_VCPUS, 'vcpus_used')
def test_resize_filter(self):
instance = self._fake_instance(vm_state=vm_states.ACTIVE,
task_state=task_states.SUSPENDING)
self.assertFalse(self.tracker._instance_in_resize_state(instance))
instance = self._fake_instance(vm_state=vm_states.RESIZED,
task_state=task_states.SUSPENDING)
self.assertTrue(self.tracker._instance_in_resize_state(instance))
states = [task_states.RESIZE_PREP, task_states.RESIZE_MIGRATING,
task_states.RESIZE_MIGRATED, task_states.RESIZE_FINISH]
for vm_state in [vm_states.ACTIVE, vm_states.STOPPED]:
for task_state in states:
instance = self._fake_instance(vm_state=vm_state,
task_state=task_state)
result = self.tracker._instance_in_resize_state(instance)
self.assertTrue(result)
def test_dupe_filter(self):
instance = self._fake_instance(host=self.host)
values = {'source_compute': self.host, 'dest_compute': self.host,
'instance_uuid': instance['uuid'], 'new_instance_type_id': 2}
self._fake_flavor_create(id=2)
self._fake_migration_create(self.context, values)
self._fake_migration_create(self.context, values)
self.tracker.update_available_resource(self.context)
self.assertEqual(1, len(self.tracker.tracked_migrations))
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_set_instance_host_and_node(self, mock_get):
instance = self._fake_instance()
self.assertIsNone(instance['host'])
self.assertIsNone(instance['launched_on'])
self.assertIsNone(instance['node'])
claim = self.tracker.instance_claim(self.context, instance)
self.assertNotEqual(0, claim.memory_mb)
self.assertEqual('fakehost', instance['host'])
self.assertEqual('fakehost', instance['launched_on'])
self.assertEqual('fakenode', instance['node'])
class NoInstanceTypesInSysMetadata(ResizeClaimTestCase):
"""Make sure we handle the case where the following are true:
#) Compute node C gets upgraded to code that looks for instance types in
system metadata. AND
#) C already has instances in the process of migrating that do not have
stashed instance types.
bug 1164110
"""
def setUp(self):
super(NoInstanceTypesInSysMetadata, self).setUp()
self.instance = self._fake_instance(stash=False)
def test_get_instance_type_stash_false(self):
with (mock.patch.object(objects.Flavor, 'get_by_id',
return_value=self.instance_type)):
flavor = self.tracker._get_instance_type(self.context,
self.instance, "new_")
self.assertEqual(self.instance_type, flavor)
class OrphanTestCase(BaseTrackerTestCase):
def _driver(self):
class OrphanVirtDriver(FakeVirtDriver):
def get_per_instance_usage(self):
return {
'1-2-3-4-5': {'memory_mb': FAKE_VIRT_MEMORY_MB,
'uuid': '1-2-3-4-5'},
'2-3-4-5-6': {'memory_mb': FAKE_VIRT_MEMORY_MB,
'uuid': '2-3-4-5-6'},
}
return OrphanVirtDriver()
def test_usage(self):
self.assertEqual(2 * FAKE_VIRT_MEMORY_WITH_OVERHEAD,
self.tracker.compute_node['memory_mb_used'])
def test_find(self):
# create one legit instance and verify the 2 orphans remain
self._fake_instance()
orphans = self.tracker._find_orphaned_instances()
self.assertEqual(2, len(orphans))
class ComputeMonitorTestCase(BaseTestCase):
def setUp(self):
super(ComputeMonitorTestCase, self).setUp()
fake_monitors = [
'nova.tests.compute.monitors.test_monitors.FakeMonitorClass1',
'nova.tests.compute.monitors.test_monitors.FakeMonitorClass2']
self.flags(compute_available_monitors=fake_monitors)
self.tracker = self._tracker()
self.node_name = 'nodename'
self.user_id = 'fake'
self.project_id = 'fake'
self.info = {}
self.context = context.RequestContext(self.user_id,
self.project_id)
def test_get_host_metrics_none(self):
self.flags(compute_monitors=['FakeMontorClass1', 'FakeMonitorClass4'])
self.tracker.monitors = []
metrics = self.tracker._get_host_metrics(self.context,
self.node_name)
self.assertEqual(len(metrics), 0)
def test_get_host_metrics_one_failed(self):
self.flags(compute_monitors=['FakeMonitorClass1', 'FakeMonitorClass4'])
class1 = test_monitors.FakeMonitorClass1(self.tracker)
class4 = test_monitors.FakeMonitorClass4(self.tracker)
self.tracker.monitors = [class1, class4]
metrics = self.tracker._get_host_metrics(self.context,
self.node_name)
self.assertTrue(len(metrics) > 0)
def test_get_host_metrics(self):
self.flags(compute_monitors=['FakeMonitorClass1', 'FakeMonitorClass2'])
class1 = test_monitors.FakeMonitorClass1(self.tracker)
class2 = test_monitors.FakeMonitorClass2(self.tracker)
self.tracker.monitors = [class1, class2]
mock_notifier = mock.Mock()
with mock.patch.object(rpc, 'get_notifier',
return_value=mock_notifier) as mock_get:
metrics = self.tracker._get_host_metrics(self.context,
self.node_name)
mock_get.assert_called_once_with(service='compute',
host=self.node_name)
expected_metrics = [{
'timestamp': 1232,
'name': 'key1',
'value': 2600,
'source': 'libvirt'
}, {
'name': 'key2',
'source': 'libvirt',
'timestamp': 123,
'value': 1600
}]
payload = {
'metrics': expected_metrics,
'host': self.tracker.host,
'host_ip': CONF.my_ip,
'nodename': self.node_name
}
mock_notifier.info.assert_called_once_with(
self.context, 'compute.metrics.update', payload)
self.assertEqual(metrics, expected_metrics)
class TrackerPeriodicTestCase(BaseTrackerTestCase):
def test_periodic_status_update(self):
# verify update called on instantiation
self.assertEqual(1, self.update_call_count)
# verify update not called if no change to resources
self.tracker.update_available_resource(self.context)
self.assertEqual(1, self.update_call_count)
# verify update is called when resources change
driver = self.tracker.driver
driver.memory_mb += 1
self.tracker.update_available_resource(self.context)
self.assertEqual(2, self.update_call_count)
def test_update_available_resource_calls_locked_inner(self):
@mock.patch.object(self.tracker, 'driver')
@mock.patch.object(self.tracker,
'_update_available_resource')
@mock.patch.object(self.tracker, '_verify_resources')
@mock.patch.object(self.tracker, '_report_hypervisor_resource_view')
def _test(mock_rhrv, mock_vr, mock_uar, mock_driver):
resources = {'there is someone in my head': 'but it\'s not me'}
mock_driver.get_available_resource.return_value = resources
self.tracker.update_available_resource(self.context)
mock_uar.assert_called_once_with(self.context, resources)
_test()
class StatsDictTestCase(BaseTrackerTestCase):
"""Test stats handling for a virt driver that provides
stats as a dictionary.
"""
def _driver(self):
return FakeVirtDriver(stats=FAKE_VIRT_STATS)
def _get_stats(self):
return jsonutils.loads(self.tracker.compute_node['stats'])
def test_virt_stats(self):
# start with virt driver stats
stats = self._get_stats()
self.assertEqual(FAKE_VIRT_STATS, stats)
# adding an instance should keep virt driver stats
self._fake_instance(vm_state=vm_states.ACTIVE, host=self.host)
self.tracker.update_available_resource(self.context)
stats = self._get_stats()
expected_stats = {}
expected_stats.update(FAKE_VIRT_STATS)
expected_stats.update(self.tracker.stats)
self.assertEqual(expected_stats, stats)
# removing the instances should keep only virt driver stats
self._instances = {}
self.tracker.update_available_resource(self.context)
stats = self._get_stats()
self.assertEqual(FAKE_VIRT_STATS, stats)
class StatsJsonTestCase(BaseTrackerTestCase):
"""Test stats handling for a virt driver that provides
stats as a json string.
"""
def _driver(self):
return FakeVirtDriver(stats=FAKE_VIRT_STATS_JSON)
def _get_stats(self):
return jsonutils.loads(self.tracker.compute_node['stats'])
def test_virt_stats(self):
# start with virt driver stats
stats = self._get_stats()
self.assertEqual(FAKE_VIRT_STATS, stats)
# adding an instance should keep virt driver stats
# and add rt stats
self._fake_instance(vm_state=vm_states.ACTIVE, host=self.host)
self.tracker.update_available_resource(self.context)
stats = self._get_stats()
expected_stats = {}
expected_stats.update(FAKE_VIRT_STATS)
expected_stats.update(self.tracker.stats)
self.assertEqual(expected_stats, stats)
# removing the instances should keep only virt driver stats
self._instances = {}
self.tracker.update_available_resource(self.context)
stats = self._get_stats()
self.assertEqual(FAKE_VIRT_STATS, stats)
class StatsInvalidJsonTestCase(BaseTrackerTestCase):
"""Test stats handling for a virt driver that provides
an invalid type for stats.
"""
def _driver(self):
return FakeVirtDriver(stats='this is not json')
def _init_tracker(self):
# do not do initial update in setup
pass
def test_virt_stats(self):
# should throw exception for string that does not parse as json
self.assertRaises(ValueError,
self.tracker.update_available_resource,
context=self.context)
class StatsInvalidTypeTestCase(BaseTrackerTestCase):
"""Test stats handling for a virt driver that provides
an invalid type for stats.
"""
def _driver(self):
return FakeVirtDriver(stats=10)
def _init_tracker(self):
# do not do initial update in setup
pass
def test_virt_stats(self):
# should throw exception for incorrect stats value type
self.assertRaises(ValueError,
self.tracker.update_available_resource,
context=self.context)
|
angdraug/nova
|
nova/tests/compute/test_resource_tracker.py
|
Python
|
apache-2.0
| 61,231
|
[
"exciting"
] |
5b00ed313e1b81ed450dbe4236f0821dec8b2acbc05a3375a005cef56b2ce684
|
"""
Provide a generic structure to support window functions,
similar to how we have a Groupby object.
"""
from __future__ import division
from collections import defaultdict
from datetime import timedelta
from textwrap import dedent
import warnings
import numpy as np
import pandas._libs.window as libwindow
import pandas.compat as compat
from pandas.compat.numpy import function as nv
from pandas.util._decorators import Appender, Substitution, cache_readonly
from pandas.core.dtypes.common import (
ensure_float64, is_bool, is_float_dtype, is_integer, is_integer_dtype,
is_list_like, is_scalar, is_timedelta64_dtype, needs_i8_conversion)
from pandas.core.dtypes.generic import (
ABCDataFrame, ABCDateOffset, ABCDatetimeIndex, ABCPeriodIndex, ABCSeries,
ABCTimedeltaIndex)
from pandas.core.base import PandasObject, SelectionMixin
import pandas.core.common as com
from pandas.core.generic import _shared_docs
from pandas.core.groupby.base import GroupByMixin
_shared_docs = dict(**_shared_docs)
_doc_template = """
Returns
-------
Series or DataFrame
Return type is determined by the caller.
See Also
--------
Series.%(name)s : Series %(name)s.
DataFrame.%(name)s : DataFrame %(name)s.
"""
class _Window(PandasObject, SelectionMixin):
_attributes = ['window', 'min_periods', 'center', 'win_type',
'axis', 'on', 'closed']
exclusions = set()
def __init__(self, obj, window=None, min_periods=None,
center=False, win_type=None, axis=0, on=None, closed=None,
**kwargs):
self.__dict__.update(kwargs)
self.blocks = []
self.obj = obj
self.on = on
self.closed = closed
self.window = window
self.min_periods = min_periods
self.center = center
self.win_type = win_type
self.win_freq = None
self.axis = obj._get_axis_number(axis) if axis is not None else None
self.validate()
@property
def _constructor(self):
return Window
@property
def is_datetimelike(self):
return None
@property
def _on(self):
return None
@property
def is_freq_type(self):
return self.win_type == 'freq'
def validate(self):
if self.center is not None and not is_bool(self.center):
raise ValueError("center must be a boolean")
if (self.min_periods is not None and
not is_integer(self.min_periods)):
raise ValueError("min_periods must be an integer")
if (self.closed is not None and
self.closed not in ['right', 'both', 'left', 'neither']):
raise ValueError("closed must be 'right', 'left', 'both' or "
"'neither'")
def _convert_freq(self):
"""
Resample according to the how, return a new object.
"""
obj = self._selected_obj
index = None
return obj, index
def _create_blocks(self):
"""
Split data into blocks & return conformed data.
"""
obj, index = self._convert_freq()
if index is not None:
index = self._on
# filter out the on from the object
if self.on is not None:
if obj.ndim == 2:
obj = obj.reindex(columns=obj.columns.difference([self.on]),
copy=False)
blocks = obj._to_dict_of_blocks(copy=False).values()
return blocks, obj, index
def _gotitem(self, key, ndim, subset=None):
"""
Sub-classes to define. Return a sliced object.
Parameters
----------
key : str / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
"""
# create a new object to prevent aliasing
if subset is None:
subset = self.obj
self = self._shallow_copy(subset)
self._reset_cache()
if subset.ndim == 2:
if is_scalar(key) and key in subset or is_list_like(key):
self._selection = key
return self
def __getattr__(self, attr):
if attr in self._internal_names_set:
return object.__getattribute__(self, attr)
if attr in self.obj:
return self[attr]
raise AttributeError("%r object has no attribute %r" %
(type(self).__name__, attr))
def _dir_additions(self):
return self.obj._dir_additions()
def _get_window(self, other=None):
return self.window
@property
def _window_type(self):
return self.__class__.__name__
def __unicode__(self):
"""
Provide a nice str repr of our rolling object.
"""
attrs = ["{k}={v}".format(k=k, v=getattr(self, k))
for k in self._attributes
if getattr(self, k, None) is not None]
return "{klass} [{attrs}]".format(klass=self._window_type,
attrs=','.join(attrs))
def __iter__(self):
url = 'https://github.com/pandas-dev/pandas/issues/11704'
raise NotImplementedError('See issue #11704 {url}'.format(url=url))
def _get_index(self, index=None):
"""
Return index as ndarrays.
Returns
-------
tuple of (index, index_as_ndarray)
"""
if self.is_freq_type:
if index is None:
index = self._on
return index, index.asi8
return index, index
def _prep_values(self, values=None, kill_inf=True):
if values is None:
values = getattr(self._selected_obj, 'values', self._selected_obj)
# GH #12373 : rolling functions error on float32 data
# make sure the data is coerced to float64
if is_float_dtype(values.dtype):
values = ensure_float64(values)
elif is_integer_dtype(values.dtype):
values = ensure_float64(values)
elif needs_i8_conversion(values.dtype):
raise NotImplementedError("ops for {action} for this "
"dtype {dtype} are not "
"implemented".format(
action=self._window_type,
dtype=values.dtype))
else:
try:
values = ensure_float64(values)
except (ValueError, TypeError):
raise TypeError("cannot handle this type -> {0}"
"".format(values.dtype))
if kill_inf:
values = values.copy()
values[np.isinf(values)] = np.NaN
return values
def _wrap_result(self, result, block=None, obj=None):
"""
Wrap a single result.
"""
if obj is None:
obj = self._selected_obj
index = obj.index
if isinstance(result, np.ndarray):
# coerce if necessary
if block is not None:
if is_timedelta64_dtype(block.values.dtype):
from pandas import to_timedelta
result = to_timedelta(
result.ravel(), unit='ns').values.reshape(result.shape)
if result.ndim == 1:
from pandas import Series
return Series(result, index, name=obj.name)
return type(obj)(result, index=index, columns=block.columns)
return result
def _wrap_results(self, results, blocks, obj):
"""
Wrap the results.
Parameters
----------
results : list of ndarrays
blocks : list of blocks
obj : conformed data (may be resampled)
"""
from pandas import Series, concat
from pandas.core.index import ensure_index
final = []
for result, block in zip(results, blocks):
result = self._wrap_result(result, block=block, obj=obj)
if result.ndim == 1:
return result
final.append(result)
# if we have an 'on' column
# we want to put it back into the results
# in the same location
columns = self._selected_obj.columns
if self.on is not None and not self._on.equals(obj.index):
name = self._on.name
final.append(Series(self._on, index=obj.index, name=name))
if self._selection is not None:
selection = ensure_index(self._selection)
# need to reorder to include original location of
# the on column (if its not already there)
if name not in selection:
columns = self.obj.columns
indexer = columns.get_indexer(selection.tolist() + [name])
columns = columns.take(sorted(indexer))
if not len(final):
return obj.astype('float64')
return concat(final, axis=1).reindex(columns=columns, copy=False)
def _center_window(self, result, window):
"""
Center the result in the window.
"""
if self.axis > result.ndim - 1:
raise ValueError("Requested axis is larger then no. of argument "
"dimensions")
offset = _offset(window, True)
if offset > 0:
if isinstance(result, (ABCSeries, ABCDataFrame)):
result = result.slice_shift(-offset, axis=self.axis)
else:
lead_indexer = [slice(None)] * result.ndim
lead_indexer[self.axis] = slice(offset, None)
result = np.copy(result[tuple(lead_indexer)])
return result
def aggregate(self, arg, *args, **kwargs):
result, how = self._aggregate(arg, *args, **kwargs)
if result is None:
return self.apply(arg, raw=False, args=args, kwargs=kwargs)
return result
agg = aggregate
_shared_docs['sum'] = dedent("""
Calculate %(name)s sum of given DataFrame or Series.
Parameters
----------
*args, **kwargs
For compatibility with other %(name)s methods. Has no effect
on the computed value.
Returns
-------
Series or DataFrame
Same type as the input, with the same index, containing the
%(name)s sum.
See Also
--------
Series.sum : Reducing sum for Series.
DataFrame.sum : Reducing sum for DataFrame.
Examples
--------
>>> s = pd.Series([1, 2, 3, 4, 5])
>>> s
0 1
1 2
2 3
3 4
4 5
dtype: int64
>>> s.rolling(3).sum()
0 NaN
1 NaN
2 6.0
3 9.0
4 12.0
dtype: float64
>>> s.expanding(3).sum()
0 NaN
1 NaN
2 6.0
3 10.0
4 15.0
dtype: float64
>>> s.rolling(3, center=True).sum()
0 NaN
1 6.0
2 9.0
3 12.0
4 NaN
dtype: float64
For DataFrame, each %(name)s sum is computed column-wise.
>>> df = pd.DataFrame({"A": s, "B": s ** 2})
>>> df
A B
0 1 1
1 2 4
2 3 9
3 4 16
4 5 25
>>> df.rolling(3).sum()
A B
0 NaN NaN
1 NaN NaN
2 6.0 14.0
3 9.0 29.0
4 12.0 50.0
""")
_shared_docs['mean'] = dedent("""
Calculate the %(name)s mean of the values.
Parameters
----------
*args
Under Review.
**kwargs
Under Review.
Returns
-------
Series or DataFrame
Returned object type is determined by the caller of the %(name)s
calculation.
See Also
--------
Series.%(name)s : Calling object with Series data.
DataFrame.%(name)s : Calling object with DataFrames.
Series.mean : Equivalent method for Series.
DataFrame.mean : Equivalent method for DataFrame.
Examples
--------
The below examples will show rolling mean calculations with window sizes of
two and three, respectively.
>>> s = pd.Series([1, 2, 3, 4])
>>> s.rolling(2).mean()
0 NaN
1 1.5
2 2.5
3 3.5
dtype: float64
>>> s.rolling(3).mean()
0 NaN
1 NaN
2 2.0
3 3.0
dtype: float64
""")
class Window(_Window):
"""
Provides rolling window calculations.
.. versionadded:: 0.18.0
Parameters
----------
window : int, or offset
Size of the moving window. This is the number of observations used for
calculating the statistic. Each window will be a fixed size.
If its an offset then this will be the time period of each window. Each
window will be a variable sized based on the observations included in
the time-period. This is only valid for datetimelike indexes. This is
new in 0.19.0
min_periods : int, default None
Minimum number of observations in window required to have a value
(otherwise result is NA). For a window that is specified by an offset,
`min_periods` will default to 1. Otherwise, `min_periods` will default
to the size of the window.
center : bool, default False
Set the labels at the center of the window.
win_type : str, default None
Provide a window type. If ``None``, all points are evenly weighted.
See the notes below for further information.
on : str, optional
For a DataFrame, column on which to calculate
the rolling window, rather than the index
axis : int or str, default 0
closed : str, default None
Make the interval closed on the 'right', 'left', 'both' or
'neither' endpoints.
For offset-based windows, it defaults to 'right'.
For fixed windows, defaults to 'both'. Remaining cases not implemented
for fixed windows.
.. versionadded:: 0.20.0
Returns
-------
a Window or Rolling sub-classed for the particular operation
See Also
--------
expanding : Provides expanding transformations.
ewm : Provides exponential weighted functions.
Notes
-----
By default, the result is set to the right edge of the window. This can be
changed to the center of the window by setting ``center=True``.
To learn more about the offsets & frequency strings, please see `this link
<http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases>`__.
The recognized win_types are:
* ``boxcar``
* ``triang``
* ``blackman``
* ``hamming``
* ``bartlett``
* ``parzen``
* ``bohman``
* ``blackmanharris``
* ``nuttall``
* ``barthann``
* ``kaiser`` (needs beta)
* ``gaussian`` (needs std)
* ``general_gaussian`` (needs power, width)
* ``slepian`` (needs width).
If ``win_type=None`` all points are evenly weighted. To learn more about
different window types see `scipy.signal window functions
<https://docs.scipy.org/doc/scipy/reference/signal.html#window-functions>`__.
Examples
--------
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})
>>> df
B
0 0.0
1 1.0
2 2.0
3 NaN
4 4.0
Rolling sum with a window length of 2, using the 'triang'
window type.
>>> df.rolling(2, win_type='triang').sum()
B
0 NaN
1 1.0
2 2.5
3 NaN
4 NaN
Rolling sum with a window length of 2, min_periods defaults
to the window length.
>>> df.rolling(2).sum()
B
0 NaN
1 1.0
2 3.0
3 NaN
4 NaN
Same as above, but explicitly set the min_periods
>>> df.rolling(2, min_periods=1).sum()
B
0 0.0
1 1.0
2 3.0
3 2.0
4 4.0
A ragged (meaning not-a-regular frequency), time-indexed DataFrame
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]},
... index = [pd.Timestamp('20130101 09:00:00'),
... pd.Timestamp('20130101 09:00:02'),
... pd.Timestamp('20130101 09:00:03'),
... pd.Timestamp('20130101 09:00:05'),
... pd.Timestamp('20130101 09:00:06')])
>>> df
B
2013-01-01 09:00:00 0.0
2013-01-01 09:00:02 1.0
2013-01-01 09:00:03 2.0
2013-01-01 09:00:05 NaN
2013-01-01 09:00:06 4.0
Contrasting to an integer rolling window, this will roll a variable
length window corresponding to the time period.
The default for min_periods is 1.
>>> df.rolling('2s').sum()
B
2013-01-01 09:00:00 0.0
2013-01-01 09:00:02 1.0
2013-01-01 09:00:03 3.0
2013-01-01 09:00:05 NaN
2013-01-01 09:00:06 4.0
"""
def validate(self):
super(Window, self).validate()
window = self.window
if isinstance(window, (list, tuple, np.ndarray)):
pass
elif is_integer(window):
if window <= 0:
raise ValueError("window must be > 0 ")
try:
import scipy.signal as sig
except ImportError:
raise ImportError('Please install scipy to generate window '
'weight')
if not isinstance(self.win_type, compat.string_types):
raise ValueError('Invalid win_type {0}'.format(self.win_type))
if getattr(sig, self.win_type, None) is None:
raise ValueError('Invalid win_type {0}'.format(self.win_type))
else:
raise ValueError('Invalid window {0}'.format(window))
def _prep_window(self, **kwargs):
"""
Provide validation for our window type, return the window
we have already been validated.
"""
window = self._get_window()
if isinstance(window, (list, tuple, np.ndarray)):
return com.asarray_tuplesafe(window).astype(float)
elif is_integer(window):
import scipy.signal as sig
# the below may pop from kwargs
def _validate_win_type(win_type, kwargs):
arg_map = {'kaiser': ['beta'],
'gaussian': ['std'],
'general_gaussian': ['power', 'width'],
'slepian': ['width']}
if win_type in arg_map:
return tuple([win_type] + _pop_args(win_type,
arg_map[win_type],
kwargs))
return win_type
def _pop_args(win_type, arg_names, kwargs):
msg = '%s window requires %%s' % win_type
all_args = []
for n in arg_names:
if n not in kwargs:
raise ValueError(msg % n)
all_args.append(kwargs.pop(n))
return all_args
win_type = _validate_win_type(self.win_type, kwargs)
# GH #15662. `False` makes symmetric window, rather than periodic.
return sig.get_window(win_type, window, False).astype(float)
def _apply_window(self, mean=True, **kwargs):
"""
Applies a moving window of type ``window_type`` on the data.
Parameters
----------
mean : bool, default True
If True computes weighted mean, else weighted sum
Returns
-------
y : same type as input argument
"""
window = self._prep_window(**kwargs)
center = self.center
blocks, obj, index = self._create_blocks()
results = []
for b in blocks:
try:
values = self._prep_values(b.values)
except TypeError:
results.append(b.values.copy())
continue
if values.size == 0:
results.append(values.copy())
continue
offset = _offset(window, center)
additional_nans = np.array([np.NaN] * offset)
def f(arg, *args, **kwargs):
minp = _use_window(self.min_periods, len(window))
return libwindow.roll_window(np.concatenate((arg,
additional_nans))
if center else arg, window, minp,
avg=mean)
result = np.apply_along_axis(f, self.axis, values)
if center:
result = self._center_window(result, window)
results.append(result)
return self._wrap_results(results, blocks, obj)
_agg_see_also_doc = dedent("""
See Also
--------
pandas.DataFrame.rolling.aggregate
pandas.DataFrame.aggregate
""")
_agg_examples_doc = dedent("""
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 3), columns=['A', 'B', 'C'])
>>> df
A B C
0 -2.385977 -0.102758 0.438822
1 -1.004295 0.905829 -0.954544
2 0.735167 -0.165272 -1.619346
3 -0.702657 -1.340923 -0.706334
4 -0.246845 0.211596 -0.901819
5 2.463718 3.157577 -1.380906
6 -1.142255 2.340594 -0.039875
7 1.396598 -1.647453 1.677227
8 -0.543425 1.761277 -0.220481
9 -0.640505 0.289374 -1.550670
>>> df.rolling(3, win_type='boxcar').agg('mean')
A B C
0 NaN NaN NaN
1 NaN NaN NaN
2 -0.885035 0.212600 -0.711689
3 -0.323928 -0.200122 -1.093408
4 -0.071445 -0.431533 -1.075833
5 0.504739 0.676083 -0.996353
6 0.358206 1.903256 -0.774200
7 0.906020 1.283573 0.085482
8 -0.096361 0.818139 0.472290
9 0.070889 0.134399 -0.031308
""")
@Substitution(see_also=_agg_see_also_doc,
examples=_agg_examples_doc,
versionadded='',
klass='Series/DataFrame',
axis='')
@Appender(_shared_docs['aggregate'])
def aggregate(self, arg, *args, **kwargs):
result, how = self._aggregate(arg, *args, **kwargs)
if result is None:
# these must apply directly
result = arg(self)
return result
agg = aggregate
@Substitution(name='window')
@Appender(_shared_docs['sum'])
def sum(self, *args, **kwargs):
nv.validate_window_func('sum', args, kwargs)
return self._apply_window(mean=False, **kwargs)
@Substitution(name='window')
@Appender(_shared_docs['mean'])
def mean(self, *args, **kwargs):
nv.validate_window_func('mean', args, kwargs)
return self._apply_window(mean=True, **kwargs)
class _GroupByMixin(GroupByMixin):
"""
Provide the groupby facilities.
"""
def __init__(self, obj, *args, **kwargs):
parent = kwargs.pop('parent', None) # noqa
groupby = kwargs.pop('groupby', None)
if groupby is None:
groupby, obj = obj, obj.obj
self._groupby = groupby
self._groupby.mutated = True
self._groupby.grouper.mutated = True
super(GroupByMixin, self).__init__(obj, *args, **kwargs)
count = GroupByMixin._dispatch('count')
corr = GroupByMixin._dispatch('corr', other=None, pairwise=None)
cov = GroupByMixin._dispatch('cov', other=None, pairwise=None)
def _apply(self, func, name, window=None, center=None,
check_minp=None, **kwargs):
"""
Dispatch to apply; we are stripping all of the _apply kwargs and
performing the original function call on the grouped object.
"""
def f(x, name=name, *args):
x = self._shallow_copy(x)
if isinstance(name, compat.string_types):
return getattr(x, name)(*args, **kwargs)
return x.apply(name, *args, **kwargs)
return self._groupby.apply(f)
class _Rolling(_Window):
@property
def _constructor(self):
return Rolling
def _apply(self, func, name=None, window=None, center=None,
check_minp=None, **kwargs):
"""
Rolling statistical measure using supplied function.
Designed to be used with passed-in Cython array-based functions.
Parameters
----------
func : str/callable to apply
name : str, optional
name of this function
window : int/array, default to _get_window()
center : bool, default to self.center
check_minp : function, default to _use_window
Returns
-------
y : type of input
"""
if center is None:
center = self.center
if window is None:
window = self._get_window()
if check_minp is None:
check_minp = _use_window
blocks, obj, index = self._create_blocks()
index, indexi = self._get_index(index=index)
results = []
for b in blocks:
values = self._prep_values(b.values)
if values.size == 0:
results.append(values.copy())
continue
# if we have a string function name, wrap it
if isinstance(func, compat.string_types):
cfunc = getattr(libwindow, func, None)
if cfunc is None:
raise ValueError("we do not support this function "
"in libwindow.{func}".format(func=func))
def func(arg, window, min_periods=None, closed=None):
minp = check_minp(min_periods, window)
# ensure we are only rolling on floats
arg = ensure_float64(arg)
return cfunc(arg,
window, minp, indexi, closed, **kwargs)
# calculation function
if center:
offset = _offset(window, center)
additional_nans = np.array([np.NaN] * offset)
def calc(x):
return func(np.concatenate((x, additional_nans)),
window, min_periods=self.min_periods,
closed=self.closed)
else:
def calc(x):
return func(x, window, min_periods=self.min_periods,
closed=self.closed)
with np.errstate(all='ignore'):
if values.ndim > 1:
result = np.apply_along_axis(calc, self.axis, values)
else:
result = calc(values)
if center:
result = self._center_window(result, window)
results.append(result)
return self._wrap_results(results, blocks, obj)
class _Rolling_and_Expanding(_Rolling):
_shared_docs['count'] = dedent(r"""
The %(name)s count of any non-NaN observations inside the window.
Returns
-------
Series or DataFrame
Returned object type is determined by the caller of the %(name)s
calculation.
See Also
--------
pandas.Series.%(name)s : Calling object with Series data.
pandas.DataFrame.%(name)s : Calling object with DataFrames.
pandas.DataFrame.count : Count of the full DataFrame.
Examples
--------
>>> s = pd.Series([2, 3, np.nan, 10])
>>> s.rolling(2).count()
0 1.0
1 2.0
2 1.0
3 1.0
dtype: float64
>>> s.rolling(3).count()
0 1.0
1 2.0
2 2.0
3 2.0
dtype: float64
>>> s.rolling(4).count()
0 1.0
1 2.0
2 2.0
3 3.0
dtype: float64
""")
def count(self):
blocks, obj, index = self._create_blocks()
# Validate the index
self._get_index(index=index)
window = self._get_window()
window = min(window, len(obj)) if not self.center else window
results = []
for b in blocks:
result = b.notna().astype(int)
result = self._constructor(result, window=window, min_periods=0,
center=self.center,
closed=self.closed).sum()
results.append(result)
return self._wrap_results(results, blocks, obj)
_shared_docs['apply'] = dedent(r"""
The %(name)s function's apply function.
Parameters
----------
func : function
Must produce a single value from an ndarray input if ``raw=True``
or a Series if ``raw=False``.
raw : bool, default None
* ``False`` : passes each row or column as a Series to the
function.
* ``True`` or ``None`` : the passed function will receive ndarray
objects instead.
If you are just applying a NumPy reduction function this will
achieve much better performance.
The `raw` parameter is required and will show a FutureWarning if
not passed. In the future `raw` will default to False.
.. versionadded:: 0.23.0
*args, **kwargs
Arguments and keyword arguments to be passed into func.
Returns
-------
Series or DataFrame
Return type is determined by the caller.
See Also
--------
Series.%(name)s : Series %(name)s.
DataFrame.%(name)s : DataFrame %(name)s.
""")
def apply(self, func, raw=None, args=(), kwargs={}):
from pandas import Series
# TODO: _level is unused?
_level = kwargs.pop('_level', None) # noqa
window = self._get_window()
offset = _offset(window, self.center)
index, indexi = self._get_index()
# TODO: default is for backward compat
# change to False in the future
if raw is None:
warnings.warn(
"Currently, 'apply' passes the values as ndarrays to the "
"applied function. In the future, this will change to passing "
"it as Series objects. You need to specify 'raw=True' to keep "
"the current behaviour, and you can pass 'raw=False' to "
"silence this warning", FutureWarning, stacklevel=3)
raw = True
def f(arg, window, min_periods, closed):
minp = _use_window(min_periods, window)
if not raw:
arg = Series(arg, index=self.obj.index)
return libwindow.roll_generic(
arg, window, minp, indexi,
closed, offset, func, raw, args, kwargs)
return self._apply(f, func, args=args, kwargs=kwargs,
center=False, raw=raw)
def sum(self, *args, **kwargs):
nv.validate_window_func('sum', args, kwargs)
return self._apply('roll_sum', 'sum', **kwargs)
_shared_docs['max'] = dedent("""
Calculate the %(name)s maximum.
Parameters
----------
*args, **kwargs
Arguments and keyword arguments to be passed into func.
""")
def max(self, *args, **kwargs):
nv.validate_window_func('max', args, kwargs)
return self._apply('roll_max', 'max', **kwargs)
_shared_docs['min'] = dedent("""
Calculate the %(name)s minimum.
Parameters
----------
**kwargs
Under Review.
Returns
-------
Series or DataFrame
Returned object type is determined by the caller of the %(name)s
calculation.
See Also
--------
Series.%(name)s : Calling object with a Series.
DataFrame.%(name)s : Calling object with a DataFrame.
Series.min : Similar method for Series.
DataFrame.min : Similar method for DataFrame.
Examples
--------
Performing a rolling minimum with a window size of 3.
>>> s = pd.Series([4, 3, 5, 2, 6])
>>> s.rolling(3).min()
0 NaN
1 NaN
2 3.0
3 2.0
4 2.0
dtype: float64
""")
def min(self, *args, **kwargs):
nv.validate_window_func('min', args, kwargs)
return self._apply('roll_min', 'min', **kwargs)
def mean(self, *args, **kwargs):
nv.validate_window_func('mean', args, kwargs)
return self._apply('roll_mean', 'mean', **kwargs)
_shared_docs['median'] = dedent("""
Calculate the %(name)s median.
Parameters
----------
**kwargs
For compatibility with other %(name)s methods. Has no effect
on the computed median.
Returns
-------
Series or DataFrame
Returned type is the same as the original object.
See Also
--------
Series.%(name)s : Calling object with Series data.
DataFrame.%(name)s : Calling object with DataFrames.
Series.median : Equivalent method for Series.
DataFrame.median : Equivalent method for DataFrame.
Examples
--------
Compute the rolling median of a series with a window size of 3.
>>> s = pd.Series([0, 1, 2, 3, 4])
>>> s.rolling(3).median()
0 NaN
1 NaN
2 1.0
3 2.0
4 3.0
dtype: float64
""")
def median(self, **kwargs):
return self._apply('roll_median_c', 'median', **kwargs)
_shared_docs['std'] = dedent("""
Calculate %(name)s standard deviation.
Normalized by N-1 by default. This can be changed using the `ddof`
argument.
Parameters
----------
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations
is ``N - ddof``, where ``N`` represents the number of elements.
*args, **kwargs
For NumPy compatibility. No additional arguments are used.
Returns
-------
Series or DataFrame
Returns the same object type as the caller of the %(name)s calculation.
See Also
--------
Series.%(name)s : Calling object with Series data.
DataFrame.%(name)s : Calling object with DataFrames.
Series.std : Equivalent method for Series.
DataFrame.std : Equivalent method for DataFrame.
numpy.std : Equivalent method for Numpy array.
Notes
-----
The default `ddof` of 1 used in Series.std is different than the default
`ddof` of 0 in numpy.std.
A minimum of one period is required for the rolling calculation.
Examples
--------
>>> s = pd.Series([5, 5, 6, 7, 5, 5, 5])
>>> s.rolling(3).std()
0 NaN
1 NaN
2 0.577350
3 1.000000
4 1.000000
5 1.154701
6 0.000000
dtype: float64
>>> s.expanding(3).std()
0 NaN
1 NaN
2 0.577350
3 0.957427
4 0.894427
5 0.836660
6 0.786796
dtype: float64
""")
def std(self, ddof=1, *args, **kwargs):
nv.validate_window_func('std', args, kwargs)
window = self._get_window()
index, indexi = self._get_index()
def f(arg, *args, **kwargs):
minp = _require_min_periods(1)(self.min_periods, window)
return _zsqrt(libwindow.roll_var(arg, window, minp, indexi,
self.closed, ddof))
return self._apply(f, 'std', check_minp=_require_min_periods(1),
ddof=ddof, **kwargs)
_shared_docs['var'] = dedent("""
Calculate unbiased %(name)s variance.
Normalized by N-1 by default. This can be changed using the `ddof`
argument.
Parameters
----------
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations
is ``N - ddof``, where ``N`` represents the number of elements.
*args, **kwargs
For NumPy compatibility. No additional arguments are used.
Returns
-------
Series or DataFrame
Returns the same object type as the caller of the %(name)s calculation.
See Also
--------
Series.%(name)s : Calling object with Series data.
DataFrame.%(name)s : Calling object with DataFrames.
Series.var : Equivalent method for Series.
DataFrame.var : Equivalent method for DataFrame.
numpy.var : Equivalent method for Numpy array.
Notes
-----
The default `ddof` of 1 used in :meth:`Series.var` is different than the
default `ddof` of 0 in :func:`numpy.var`.
A minimum of 1 period is required for the rolling calculation.
Examples
--------
>>> s = pd.Series([5, 5, 6, 7, 5, 5, 5])
>>> s.rolling(3).var()
0 NaN
1 NaN
2 0.333333
3 1.000000
4 1.000000
5 1.333333
6 0.000000
dtype: float64
>>> s.expanding(3).var()
0 NaN
1 NaN
2 0.333333
3 0.916667
4 0.800000
5 0.700000
6 0.619048
dtype: float64
""")
def var(self, ddof=1, *args, **kwargs):
nv.validate_window_func('var', args, kwargs)
return self._apply('roll_var', 'var',
check_minp=_require_min_periods(1), ddof=ddof,
**kwargs)
_shared_docs['skew'] = """
Unbiased %(name)s skewness.
Parameters
----------
**kwargs
Keyword arguments to be passed into func.
"""
def skew(self, **kwargs):
return self._apply('roll_skew', 'skew',
check_minp=_require_min_periods(3), **kwargs)
_shared_docs['kurt'] = dedent("""
Calculate unbiased %(name)s kurtosis.
This function uses Fisher's definition of kurtosis without bias.
Parameters
----------
**kwargs
Under Review.
Returns
-------
Series or DataFrame
Returned object type is determined by the caller of the %(name)s
calculation
See Also
--------
Series.%(name)s : Calling object with Series data.
DataFrame.%(name)s : Calling object with DataFrames.
Series.kurt : Equivalent method for Series.
DataFrame.kurt : Equivalent method for DataFrame.
scipy.stats.skew : Third moment of a probability density.
scipy.stats.kurtosis : Reference SciPy method.
Notes
-----
A minimum of 4 periods is required for the %(name)s calculation.
""")
def kurt(self, **kwargs):
return self._apply('roll_kurt', 'kurt',
check_minp=_require_min_periods(4), **kwargs)
_shared_docs['quantile'] = dedent("""
Calculate the %(name)s quantile.
Parameters
----------
quantile : float
Quantile to compute. 0 <= quantile <= 1.
interpolation : {'linear', 'lower', 'higher', 'midpoint', 'nearest'}
.. versionadded:: 0.23.0
This optional parameter specifies the interpolation method to use,
when the desired quantile lies between two data points `i` and `j`:
* linear: `i + (j - i) * fraction`, where `fraction` is the
fractional part of the index surrounded by `i` and `j`.
* lower: `i`.
* higher: `j`.
* nearest: `i` or `j` whichever is nearest.
* midpoint: (`i` + `j`) / 2.
**kwargs:
For compatibility with other %(name)s methods. Has no effect on
the result.
Returns
-------
Series or DataFrame
Returned object type is determined by the caller of the %(name)s
calculation.
See Also
--------
pandas.Series.quantile : Computes value at the given quantile over all data
in Series.
pandas.DataFrame.quantile : Computes values at the given quantile over
requested axis in DataFrame.
Examples
--------
>>> s = pd.Series([1, 2, 3, 4])
>>> s.rolling(2).quantile(.4, interpolation='lower')
0 NaN
1 1.0
2 2.0
3 3.0
dtype: float64
>>> s.rolling(2).quantile(.4, interpolation='midpoint')
0 NaN
1 1.5
2 2.5
3 3.5
dtype: float64
""")
def quantile(self, quantile, interpolation='linear', **kwargs):
window = self._get_window()
index, indexi = self._get_index()
def f(arg, *args, **kwargs):
minp = _use_window(self.min_periods, window)
if quantile == 1.0:
return libwindow.roll_max(arg, window, minp, indexi,
self.closed)
elif quantile == 0.0:
return libwindow.roll_min(arg, window, minp, indexi,
self.closed)
else:
return libwindow.roll_quantile(arg, window, minp, indexi,
self.closed, quantile,
interpolation)
return self._apply(f, 'quantile', quantile=quantile,
**kwargs)
_shared_docs['cov'] = """
Calculate the %(name)s sample covariance.
Parameters
----------
other : Series, DataFrame, or ndarray, optional
If not supplied then will default to self and produce pairwise
output.
pairwise : bool, default None
If False then only matching columns between self and other will be
used and the output will be a DataFrame.
If True then all pairwise combinations will be calculated and the
output will be a MultiIndexed DataFrame in the case of DataFrame
inputs. In the case of missing elements, only complete pairwise
observations will be used.
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations
is ``N - ddof``, where ``N`` represents the number of elements.
**kwargs
Keyword arguments to be passed into func.
"""
def cov(self, other=None, pairwise=None, ddof=1, **kwargs):
if other is None:
other = self._selected_obj
# only default unset
pairwise = True if pairwise is None else pairwise
other = self._shallow_copy(other)
# GH 16058: offset window
if self.is_freq_type:
window = self.win_freq
else:
window = self._get_window(other)
def _get_cov(X, Y):
# GH #12373 : rolling functions error on float32 data
# to avoid potential overflow, cast the data to float64
X = X.astype('float64')
Y = Y.astype('float64')
mean = lambda x: x.rolling(window, self.min_periods,
center=self.center).mean(**kwargs)
count = (X + Y).rolling(window=window,
center=self.center).count(**kwargs)
bias_adj = count / (count - ddof)
return (mean(X * Y) - mean(X) * mean(Y)) * bias_adj
return _flex_binary_moment(self._selected_obj, other._selected_obj,
_get_cov, pairwise=bool(pairwise))
_shared_docs['corr'] = dedent("""
Calculate %(name)s correlation.
Parameters
----------
other : Series, DataFrame, or ndarray, optional
If not supplied then will default to self.
pairwise : bool, default None
Calculate pairwise combinations of columns within a
DataFrame. If `other` is not specified, defaults to `True`,
otherwise defaults to `False`.
Not relevant for :class:`~pandas.Series`.
**kwargs
Unused.
Returns
-------
Series or DataFrame
Returned object type is determined by the caller of the
%(name)s calculation.
See Also
--------
Series.%(name)s : Calling object with Series data.
DataFrame.%(name)s : Calling object with DataFrames.
Series.corr : Equivalent method for Series.
DataFrame.corr : Equivalent method for DataFrame.
%(name)s.cov : Similar method to calculate covariance.
numpy.corrcoef : NumPy Pearson's correlation calculation.
Notes
-----
This function uses Pearson's definition of correlation
(https://en.wikipedia.org/wiki/Pearson_correlation_coefficient).
When `other` is not specified, the output will be self correlation (e.g.
all 1's), except for :class:`~pandas.DataFrame` inputs with `pairwise`
set to `True`.
Function will return ``NaN`` for correlations of equal valued sequences;
this is the result of a 0/0 division error.
When `pairwise` is set to `False`, only matching columns between `self` and
`other` will be used.
When `pairwise` is set to `True`, the output will be a MultiIndex DataFrame
with the original index on the first level, and the `other` DataFrame
columns on the second level.
In the case of missing elements, only complete pairwise observations
will be used.
Examples
--------
The below example shows a rolling calculation with a window size of
four matching the equivalent function call using :meth:`numpy.corrcoef`.
>>> v1 = [3, 3, 3, 5, 8]
>>> v2 = [3, 4, 4, 4, 8]
>>> fmt = "{0:.6f}" # limit the printed precision to 6 digits
>>> # numpy returns a 2X2 array, the correlation coefficient
>>> # is the number at entry [0][1]
>>> print(fmt.format(np.corrcoef(v1[:-1], v2[:-1])[0][1]))
0.333333
>>> print(fmt.format(np.corrcoef(v1[1:], v2[1:])[0][1]))
0.916949
>>> s1 = pd.Series(v1)
>>> s2 = pd.Series(v2)
>>> s1.rolling(4).corr(s2)
0 NaN
1 NaN
2 NaN
3 0.333333
4 0.916949
dtype: float64
The below example shows a similar rolling calculation on a
DataFrame using the pairwise option.
>>> matrix = np.array([[51., 35.], [49., 30.], [47., 32.],\
[46., 31.], [50., 36.]])
>>> print(np.corrcoef(matrix[:-1,0], matrix[:-1,1]).round(7))
[[1. 0.6263001]
[0.6263001 1. ]]
>>> print(np.corrcoef(matrix[1:,0], matrix[1:,1]).round(7))
[[1. 0.5553681]
[0.5553681 1. ]]
>>> df = pd.DataFrame(matrix, columns=['X','Y'])
>>> df
X Y
0 51.0 35.0
1 49.0 30.0
2 47.0 32.0
3 46.0 31.0
4 50.0 36.0
>>> df.rolling(4).corr(pairwise=True)
X Y
0 X NaN NaN
Y NaN NaN
1 X NaN NaN
Y NaN NaN
2 X NaN NaN
Y NaN NaN
3 X 1.000000 0.626300
Y 0.626300 1.000000
4 X 1.000000 0.555368
Y 0.555368 1.000000
""")
def corr(self, other=None, pairwise=None, **kwargs):
if other is None:
other = self._selected_obj
# only default unset
pairwise = True if pairwise is None else pairwise
other = self._shallow_copy(other)
window = self._get_window(other)
def _get_corr(a, b):
a = a.rolling(window=window, min_periods=self.min_periods,
center=self.center)
b = b.rolling(window=window, min_periods=self.min_periods,
center=self.center)
return a.cov(b, **kwargs) / (a.std(**kwargs) * b.std(**kwargs))
return _flex_binary_moment(self._selected_obj, other._selected_obj,
_get_corr, pairwise=bool(pairwise))
class Rolling(_Rolling_and_Expanding):
@cache_readonly
def is_datetimelike(self):
return isinstance(self._on,
(ABCDatetimeIndex,
ABCTimedeltaIndex,
ABCPeriodIndex))
@cache_readonly
def _on(self):
if self.on is None:
return self.obj.index
elif (isinstance(self.obj, ABCDataFrame) and
self.on in self.obj.columns):
from pandas import Index
return Index(self.obj[self.on])
else:
raise ValueError("invalid on specified as {0}, "
"must be a column (if DataFrame) "
"or None".format(self.on))
def validate(self):
super(Rolling, self).validate()
# we allow rolling on a datetimelike index
if ((self.obj.empty or self.is_datetimelike) and
isinstance(self.window, (compat.string_types, ABCDateOffset,
timedelta))):
self._validate_monotonic()
freq = self._validate_freq()
# we don't allow center
if self.center:
raise NotImplementedError("center is not implemented "
"for datetimelike and offset "
"based windows")
# this will raise ValueError on non-fixed freqs
self.win_freq = self.window
self.window = freq.nanos
self.win_type = 'freq'
# min_periods must be an integer
if self.min_periods is None:
self.min_periods = 1
elif not is_integer(self.window):
raise ValueError("window must be an integer")
elif self.window < 0:
raise ValueError("window must be non-negative")
if not self.is_datetimelike and self.closed is not None:
raise ValueError("closed only implemented for datetimelike "
"and offset based windows")
def _validate_monotonic(self):
"""
Validate on is_monotonic.
"""
if not self._on.is_monotonic:
formatted = self.on or 'index'
raise ValueError("{0} must be "
"monotonic".format(formatted))
def _validate_freq(self):
"""
Validate & return window frequency.
"""
from pandas.tseries.frequencies import to_offset
try:
return to_offset(self.window)
except (TypeError, ValueError):
raise ValueError("passed window {0} is not "
"compatible with a datetimelike "
"index".format(self.window))
_agg_see_also_doc = dedent("""
See Also
--------
pandas.Series.rolling
pandas.DataFrame.rolling
""")
_agg_examples_doc = dedent("""
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 3), columns=['A', 'B', 'C'])
>>> df
A B C
0 -2.385977 -0.102758 0.438822
1 -1.004295 0.905829 -0.954544
2 0.735167 -0.165272 -1.619346
3 -0.702657 -1.340923 -0.706334
4 -0.246845 0.211596 -0.901819
5 2.463718 3.157577 -1.380906
6 -1.142255 2.340594 -0.039875
7 1.396598 -1.647453 1.677227
8 -0.543425 1.761277 -0.220481
9 -0.640505 0.289374 -1.550670
>>> df.rolling(3).sum()
A B C
0 NaN NaN NaN
1 NaN NaN NaN
2 -2.655105 0.637799 -2.135068
3 -0.971785 -0.600366 -3.280224
4 -0.214334 -1.294599 -3.227500
5 1.514216 2.028250 -2.989060
6 1.074618 5.709767 -2.322600
7 2.718061 3.850718 0.256446
8 -0.289082 2.454418 1.416871
9 0.212668 0.403198 -0.093924
>>> df.rolling(3).agg({'A':'sum', 'B':'min'})
A B
0 NaN NaN
1 NaN NaN
2 -2.655105 -0.165272
3 -0.971785 -1.340923
4 -0.214334 -1.340923
5 1.514216 -1.340923
6 1.074618 0.211596
7 2.718061 -1.647453
8 -0.289082 -1.647453
9 0.212668 -1.647453
""")
@Substitution(see_also=_agg_see_also_doc,
examples=_agg_examples_doc,
versionadded='',
klass='Series/Dataframe',
axis='')
@Appender(_shared_docs['aggregate'])
def aggregate(self, arg, *args, **kwargs):
return super(Rolling, self).aggregate(arg, *args, **kwargs)
agg = aggregate
@Substitution(name='rolling')
@Appender(_shared_docs['count'])
def count(self):
# different impl for freq counting
if self.is_freq_type:
return self._apply('roll_count', 'count')
return super(Rolling, self).count()
@Substitution(name='rolling')
@Appender(_shared_docs['apply'])
def apply(self, func, raw=None, args=(), kwargs={}):
return super(Rolling, self).apply(
func, raw=raw, args=args, kwargs=kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['sum'])
def sum(self, *args, **kwargs):
nv.validate_rolling_func('sum', args, kwargs)
return super(Rolling, self).sum(*args, **kwargs)
@Substitution(name='rolling')
@Appender(_doc_template)
@Appender(_shared_docs['max'])
def max(self, *args, **kwargs):
nv.validate_rolling_func('max', args, kwargs)
return super(Rolling, self).max(*args, **kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['min'])
def min(self, *args, **kwargs):
nv.validate_rolling_func('min', args, kwargs)
return super(Rolling, self).min(*args, **kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['mean'])
def mean(self, *args, **kwargs):
nv.validate_rolling_func('mean', args, kwargs)
return super(Rolling, self).mean(*args, **kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['median'])
def median(self, **kwargs):
return super(Rolling, self).median(**kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['std'])
def std(self, ddof=1, *args, **kwargs):
nv.validate_rolling_func('std', args, kwargs)
return super(Rolling, self).std(ddof=ddof, **kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['var'])
def var(self, ddof=1, *args, **kwargs):
nv.validate_rolling_func('var', args, kwargs)
return super(Rolling, self).var(ddof=ddof, **kwargs)
@Substitution(name='rolling')
@Appender(_doc_template)
@Appender(_shared_docs['skew'])
def skew(self, **kwargs):
return super(Rolling, self).skew(**kwargs)
_agg_doc = dedent("""
Examples
--------
The example below will show a rolling calculation with a window size of
four matching the equivalent function call using `scipy.stats`.
>>> arr = [1, 2, 3, 4, 999]
>>> fmt = "{0:.6f}" # limit the printed precision to 6 digits
>>> import scipy.stats
>>> print(fmt.format(scipy.stats.kurtosis(arr[:-1], bias=False)))
-1.200000
>>> print(fmt.format(scipy.stats.kurtosis(arr[1:], bias=False)))
3.999946
>>> s = pd.Series(arr)
>>> s.rolling(4).kurt()
0 NaN
1 NaN
2 NaN
3 -1.200000
4 3.999946
dtype: float64
""")
@Appender(_agg_doc)
@Substitution(name='rolling')
@Appender(_shared_docs['kurt'])
def kurt(self, **kwargs):
return super(Rolling, self).kurt(**kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['quantile'])
def quantile(self, quantile, interpolation='linear', **kwargs):
return super(Rolling, self).quantile(quantile=quantile,
interpolation=interpolation,
**kwargs)
@Substitution(name='rolling')
@Appender(_doc_template)
@Appender(_shared_docs['cov'])
def cov(self, other=None, pairwise=None, ddof=1, **kwargs):
return super(Rolling, self).cov(other=other, pairwise=pairwise,
ddof=ddof, **kwargs)
@Substitution(name='rolling')
@Appender(_shared_docs['corr'])
def corr(self, other=None, pairwise=None, **kwargs):
return super(Rolling, self).corr(other=other, pairwise=pairwise,
**kwargs)
class RollingGroupby(_GroupByMixin, Rolling):
"""
Provides a rolling groupby implementation.
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return Rolling
def _gotitem(self, key, ndim, subset=None):
# we are setting the index on the actual object
# here so our index is carried thru to the selected obj
# when we do the splitting for the groupby
if self.on is not None:
self._groupby.obj = self._groupby.obj.set_index(self._on)
self.on = None
return super(RollingGroupby, self)._gotitem(key, ndim, subset=subset)
def _validate_monotonic(self):
"""
Validate that on is monotonic;
we don't care for groupby.rolling
because we have already validated at a higher
level.
"""
pass
class Expanding(_Rolling_and_Expanding):
"""
Provides expanding transformations.
.. versionadded:: 0.18.0
Parameters
----------
min_periods : int, default 1
Minimum number of observations in window required to have a value
(otherwise result is NA).
center : bool, default False
Set the labels at the center of the window.
axis : int or str, default 0
Returns
-------
a Window sub-classed for the particular operation
See Also
--------
rolling : Provides rolling window calculations.
ewm : Provides exponential weighted functions.
Notes
-----
By default, the result is set to the right edge of the window. This can be
changed to the center of the window by setting ``center=True``.
Examples
--------
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})
B
0 0.0
1 1.0
2 2.0
3 NaN
4 4.0
>>> df.expanding(2).sum()
B
0 NaN
1 1.0
2 3.0
3 3.0
4 7.0
"""
_attributes = ['min_periods', 'center', 'axis']
def __init__(self, obj, min_periods=1, center=False, axis=0,
**kwargs):
super(Expanding, self).__init__(obj=obj, min_periods=min_periods,
center=center, axis=axis)
@property
def _constructor(self):
return Expanding
def _get_window(self, other=None):
"""
Get the window length over which to perform some operation.
Parameters
----------
other : object, default None
The other object that is involved in the operation.
Such an object is involved for operations like covariance.
Returns
-------
window : int
The window length.
"""
axis = self.obj._get_axis(self.axis)
length = len(axis) + (other is not None) * len(axis)
other = self.min_periods or -1
return max(length, other)
_agg_see_also_doc = dedent("""
See Also
--------
pandas.DataFrame.expanding.aggregate
pandas.DataFrame.rolling.aggregate
pandas.DataFrame.aggregate
""")
_agg_examples_doc = dedent("""
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 3), columns=['A', 'B', 'C'])
>>> df
A B C
0 -2.385977 -0.102758 0.438822
1 -1.004295 0.905829 -0.954544
2 0.735167 -0.165272 -1.619346
3 -0.702657 -1.340923 -0.706334
4 -0.246845 0.211596 -0.901819
5 2.463718 3.157577 -1.380906
6 -1.142255 2.340594 -0.039875
7 1.396598 -1.647453 1.677227
8 -0.543425 1.761277 -0.220481
9 -0.640505 0.289374 -1.550670
>>> df.ewm(alpha=0.5).mean()
A B C
0 -2.385977 -0.102758 0.438822
1 -1.464856 0.569633 -0.490089
2 -0.207700 0.149687 -1.135379
3 -0.471677 -0.645305 -0.906555
4 -0.355635 -0.203033 -0.904111
5 1.076417 1.503943 -1.146293
6 -0.041654 1.925562 -0.588728
7 0.680292 0.132049 0.548693
8 0.067236 0.948257 0.163353
9 -0.286980 0.618493 -0.694496
""")
@Substitution(see_also=_agg_see_also_doc,
examples=_agg_examples_doc,
versionadded='',
klass='Series/Dataframe',
axis='')
@Appender(_shared_docs['aggregate'])
def aggregate(self, arg, *args, **kwargs):
return super(Expanding, self).aggregate(arg, *args, **kwargs)
agg = aggregate
@Substitution(name='expanding')
@Appender(_shared_docs['count'])
def count(self, **kwargs):
return super(Expanding, self).count(**kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['apply'])
def apply(self, func, raw=None, args=(), kwargs={}):
return super(Expanding, self).apply(
func, raw=raw, args=args, kwargs=kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['sum'])
def sum(self, *args, **kwargs):
nv.validate_expanding_func('sum', args, kwargs)
return super(Expanding, self).sum(*args, **kwargs)
@Substitution(name='expanding')
@Appender(_doc_template)
@Appender(_shared_docs['max'])
def max(self, *args, **kwargs):
nv.validate_expanding_func('max', args, kwargs)
return super(Expanding, self).max(*args, **kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['min'])
def min(self, *args, **kwargs):
nv.validate_expanding_func('min', args, kwargs)
return super(Expanding, self).min(*args, **kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['mean'])
def mean(self, *args, **kwargs):
nv.validate_expanding_func('mean', args, kwargs)
return super(Expanding, self).mean(*args, **kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['median'])
def median(self, **kwargs):
return super(Expanding, self).median(**kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['std'])
def std(self, ddof=1, *args, **kwargs):
nv.validate_expanding_func('std', args, kwargs)
return super(Expanding, self).std(ddof=ddof, **kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['var'])
def var(self, ddof=1, *args, **kwargs):
nv.validate_expanding_func('var', args, kwargs)
return super(Expanding, self).var(ddof=ddof, **kwargs)
@Substitution(name='expanding')
@Appender(_doc_template)
@Appender(_shared_docs['skew'])
def skew(self, **kwargs):
return super(Expanding, self).skew(**kwargs)
_agg_doc = dedent("""
Examples
--------
The example below will show an expanding calculation with a window size of
four matching the equivalent function call using `scipy.stats`.
>>> arr = [1, 2, 3, 4, 999]
>>> import scipy.stats
>>> fmt = "{0:.6f}" # limit the printed precision to 6 digits
>>> print(fmt.format(scipy.stats.kurtosis(arr[:-1], bias=False)))
-1.200000
>>> print(fmt.format(scipy.stats.kurtosis(arr, bias=False)))
4.999874
>>> s = pd.Series(arr)
>>> s.expanding(4).kurt()
0 NaN
1 NaN
2 NaN
3 -1.200000
4 4.999874
dtype: float64
""")
@Appender(_agg_doc)
@Substitution(name='expanding')
@Appender(_shared_docs['kurt'])
def kurt(self, **kwargs):
return super(Expanding, self).kurt(**kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['quantile'])
def quantile(self, quantile, interpolation='linear', **kwargs):
return super(Expanding, self).quantile(quantile=quantile,
interpolation=interpolation,
**kwargs)
@Substitution(name='expanding')
@Appender(_doc_template)
@Appender(_shared_docs['cov'])
def cov(self, other=None, pairwise=None, ddof=1, **kwargs):
return super(Expanding, self).cov(other=other, pairwise=pairwise,
ddof=ddof, **kwargs)
@Substitution(name='expanding')
@Appender(_shared_docs['corr'])
def corr(self, other=None, pairwise=None, **kwargs):
return super(Expanding, self).corr(other=other, pairwise=pairwise,
**kwargs)
class ExpandingGroupby(_GroupByMixin, Expanding):
"""
Provides a expanding groupby implementation.
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return Expanding
_bias_template = """
Parameters
----------
bias : bool, default False
Use a standard estimation bias correction.
*args, **kwargs
Arguments and keyword arguments to be passed into func.
"""
_pairwise_template = """
Parameters
----------
other : Series, DataFrame, or ndarray, optional
If not supplied then will default to self and produce pairwise
output.
pairwise : bool, default None
If False then only matching columns between self and other will be
used and the output will be a DataFrame.
If True then all pairwise combinations will be calculated and the
output will be a MultiIndex DataFrame in the case of DataFrame
inputs. In the case of missing elements, only complete pairwise
observations will be used.
bias : bool, default False
Use a standard estimation bias correction.
**kwargs
Keyword arguments to be passed into func.
"""
class EWM(_Rolling):
r"""
Provides exponential weighted functions.
.. versionadded:: 0.18.0
Parameters
----------
com : float, optional
Specify decay in terms of center of mass,
:math:`\alpha = 1 / (1 + com),\text{ for } com \geq 0`
span : float, optional
Specify decay in terms of span,
:math:`\alpha = 2 / (span + 1),\text{ for } span \geq 1`
halflife : float, optional
Specify decay in terms of half-life,
:math:`\alpha = 1 - exp(log(0.5) / halflife),\text{ for } halflife > 0`
alpha : float, optional
Specify smoothing factor :math:`\alpha` directly,
:math:`0 < \alpha \leq 1`
.. versionadded:: 0.18.0
min_periods : int, default 0
Minimum number of observations in window required to have a value
(otherwise result is NA).
adjust : bool, default True
Divide by decaying adjustment factor in beginning periods to account
for imbalance in relative weightings (viewing EWMA as a moving average)
ignore_na : bool, default False
Ignore missing values when calculating weights;
specify True to reproduce pre-0.15.0 behavior
Returns
-------
a Window sub-classed for the particular operation
See Also
--------
rolling : Provides rolling window calculations.
expanding : Provides expanding transformations.
Notes
-----
Exactly one of center of mass, span, half-life, and alpha must be provided.
Allowed values and relationship between the parameters are specified in the
parameter descriptions above; see the link at the end of this section for
a detailed explanation.
When adjust is True (default), weighted averages are calculated using
weights (1-alpha)**(n-1), (1-alpha)**(n-2), ..., 1-alpha, 1.
When adjust is False, weighted averages are calculated recursively as:
weighted_average[0] = arg[0];
weighted_average[i] = (1-alpha)*weighted_average[i-1] + alpha*arg[i].
When ignore_na is False (default), weights are based on absolute positions.
For example, the weights of x and y used in calculating the final weighted
average of [x, None, y] are (1-alpha)**2 and 1 (if adjust is True), and
(1-alpha)**2 and alpha (if adjust is False).
When ignore_na is True (reproducing pre-0.15.0 behavior), weights are based
on relative positions. For example, the weights of x and y used in
calculating the final weighted average of [x, None, y] are 1-alpha and 1
(if adjust is True), and 1-alpha and alpha (if adjust is False).
More details can be found at
http://pandas.pydata.org/pandas-docs/stable/computation.html#exponentially-weighted-windows
Examples
--------
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})
B
0 0.0
1 1.0
2 2.0
3 NaN
4 4.0
>>> df.ewm(com=0.5).mean()
B
0 0.000000
1 0.750000
2 1.615385
3 1.615385
4 3.670213
"""
_attributes = ['com', 'min_periods', 'adjust', 'ignore_na', 'axis']
def __init__(self, obj, com=None, span=None, halflife=None, alpha=None,
min_periods=0, adjust=True, ignore_na=False,
axis=0):
self.obj = obj
self.com = _get_center_of_mass(com, span, halflife, alpha)
self.min_periods = min_periods
self.adjust = adjust
self.ignore_na = ignore_na
self.axis = axis
self.on = None
@property
def _constructor(self):
return EWM
_agg_see_also_doc = dedent("""
See Also
--------
pandas.DataFrame.rolling.aggregate
""")
_agg_examples_doc = dedent("""
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 3), columns=['A', 'B', 'C'])
>>> df
A B C
0 -2.385977 -0.102758 0.438822
1 -1.004295 0.905829 -0.954544
2 0.735167 -0.165272 -1.619346
3 -0.702657 -1.340923 -0.706334
4 -0.246845 0.211596 -0.901819
5 2.463718 3.157577 -1.380906
6 -1.142255 2.340594 -0.039875
7 1.396598 -1.647453 1.677227
8 -0.543425 1.761277 -0.220481
9 -0.640505 0.289374 -1.550670
>>> df.ewm(alpha=0.5).mean()
A B C
0 -2.385977 -0.102758 0.438822
1 -1.464856 0.569633 -0.490089
2 -0.207700 0.149687 -1.135379
3 -0.471677 -0.645305 -0.906555
4 -0.355635 -0.203033 -0.904111
5 1.076417 1.503943 -1.146293
6 -0.041654 1.925562 -0.588728
7 0.680292 0.132049 0.548693
8 0.067236 0.948257 0.163353
9 -0.286980 0.618493 -0.694496
""")
@Substitution(see_also=_agg_see_also_doc,
examples=_agg_examples_doc,
versionadded='',
klass='Series/Dataframe',
axis='')
@Appender(_shared_docs['aggregate'])
def aggregate(self, arg, *args, **kwargs):
return super(EWM, self).aggregate(arg, *args, **kwargs)
agg = aggregate
def _apply(self, func, **kwargs):
"""
Rolling statistical measure using supplied function. Designed to be
used with passed-in Cython array-based functions.
Parameters
----------
func : str/callable to apply
Returns
-------
y : same type as input argument
"""
blocks, obj, index = self._create_blocks()
results = []
for b in blocks:
try:
values = self._prep_values(b.values)
except TypeError:
results.append(b.values.copy())
continue
if values.size == 0:
results.append(values.copy())
continue
# if we have a string function name, wrap it
if isinstance(func, compat.string_types):
cfunc = getattr(libwindow, func, None)
if cfunc is None:
raise ValueError("we do not support this function "
"in libwindow.{func}".format(func=func))
def func(arg):
return cfunc(arg, self.com, int(self.adjust),
int(self.ignore_na), int(self.min_periods))
results.append(np.apply_along_axis(func, self.axis, values))
return self._wrap_results(results, blocks, obj)
@Substitution(name='ewm')
@Appender(_doc_template)
def mean(self, *args, **kwargs):
"""
Exponential weighted moving average.
Parameters
----------
*args, **kwargs
Arguments and keyword arguments to be passed into func.
"""
nv.validate_window_func('mean', args, kwargs)
return self._apply('ewma', **kwargs)
@Substitution(name='ewm')
@Appender(_doc_template)
@Appender(_bias_template)
def std(self, bias=False, *args, **kwargs):
"""
Exponential weighted moving stddev.
"""
nv.validate_window_func('std', args, kwargs)
return _zsqrt(self.var(bias=bias, **kwargs))
vol = std
@Substitution(name='ewm')
@Appender(_doc_template)
@Appender(_bias_template)
def var(self, bias=False, *args, **kwargs):
"""
Exponential weighted moving variance.
"""
nv.validate_window_func('var', args, kwargs)
def f(arg):
return libwindow.ewmcov(arg, arg, self.com, int(self.adjust),
int(self.ignore_na), int(self.min_periods),
int(bias))
return self._apply(f, **kwargs)
@Substitution(name='ewm')
@Appender(_doc_template)
@Appender(_pairwise_template)
def cov(self, other=None, pairwise=None, bias=False, **kwargs):
"""
Exponential weighted sample covariance.
"""
if other is None:
other = self._selected_obj
# only default unset
pairwise = True if pairwise is None else pairwise
other = self._shallow_copy(other)
def _get_cov(X, Y):
X = self._shallow_copy(X)
Y = self._shallow_copy(Y)
cov = libwindow.ewmcov(X._prep_values(), Y._prep_values(),
self.com, int(self.adjust),
int(self.ignore_na), int(self.min_periods),
int(bias))
return X._wrap_result(cov)
return _flex_binary_moment(self._selected_obj, other._selected_obj,
_get_cov, pairwise=bool(pairwise))
@Substitution(name='ewm')
@Appender(_doc_template)
@Appender(_pairwise_template)
def corr(self, other=None, pairwise=None, **kwargs):
"""
Exponential weighted sample correlation.
"""
if other is None:
other = self._selected_obj
# only default unset
pairwise = True if pairwise is None else pairwise
other = self._shallow_copy(other)
def _get_corr(X, Y):
X = self._shallow_copy(X)
Y = self._shallow_copy(Y)
def _cov(x, y):
return libwindow.ewmcov(x, y, self.com, int(self.adjust),
int(self.ignore_na),
int(self.min_periods),
1)
x_values = X._prep_values()
y_values = Y._prep_values()
with np.errstate(all='ignore'):
cov = _cov(x_values, y_values)
x_var = _cov(x_values, x_values)
y_var = _cov(y_values, y_values)
corr = cov / _zsqrt(x_var * y_var)
return X._wrap_result(corr)
return _flex_binary_moment(self._selected_obj, other._selected_obj,
_get_corr, pairwise=bool(pairwise))
# Helper Funcs
def _flex_binary_moment(arg1, arg2, f, pairwise=False):
if not (isinstance(arg1, (np.ndarray, ABCSeries, ABCDataFrame)) and
isinstance(arg2, (np.ndarray, ABCSeries, ABCDataFrame))):
raise TypeError("arguments to moment function must be of type "
"np.ndarray/Series/DataFrame")
if (isinstance(arg1, (np.ndarray, ABCSeries)) and
isinstance(arg2, (np.ndarray, ABCSeries))):
X, Y = _prep_binary(arg1, arg2)
return f(X, Y)
elif isinstance(arg1, ABCDataFrame):
from pandas import DataFrame
def dataframe_from_int_dict(data, frame_template):
result = DataFrame(data, index=frame_template.index)
if len(result.columns) > 0:
result.columns = frame_template.columns[result.columns]
return result
results = {}
if isinstance(arg2, ABCDataFrame):
if pairwise is False:
if arg1 is arg2:
# special case in order to handle duplicate column names
for i, col in enumerate(arg1.columns):
results[i] = f(arg1.iloc[:, i], arg2.iloc[:, i])
return dataframe_from_int_dict(results, arg1)
else:
if not arg1.columns.is_unique:
raise ValueError("'arg1' columns are not unique")
if not arg2.columns.is_unique:
raise ValueError("'arg2' columns are not unique")
with warnings.catch_warnings(record=True):
warnings.simplefilter("ignore", RuntimeWarning)
X, Y = arg1.align(arg2, join='outer')
X = X + 0 * Y
Y = Y + 0 * X
with warnings.catch_warnings(record=True):
warnings.simplefilter("ignore", RuntimeWarning)
res_columns = arg1.columns.union(arg2.columns)
for col in res_columns:
if col in X and col in Y:
results[col] = f(X[col], Y[col])
return DataFrame(results, index=X.index,
columns=res_columns)
elif pairwise is True:
results = defaultdict(dict)
for i, k1 in enumerate(arg1.columns):
for j, k2 in enumerate(arg2.columns):
if j < i and arg2 is arg1:
# Symmetric case
results[i][j] = results[j][i]
else:
results[i][j] = f(*_prep_binary(arg1.iloc[:, i],
arg2.iloc[:, j]))
from pandas import MultiIndex, concat
result_index = arg1.index.union(arg2.index)
if len(result_index):
# construct result frame
result = concat(
[concat([results[i][j]
for j, c in enumerate(arg2.columns)],
ignore_index=True)
for i, c in enumerate(arg1.columns)],
ignore_index=True,
axis=1)
result.columns = arg1.columns
# set the index and reorder
if arg2.columns.nlevels > 1:
result.index = MultiIndex.from_product(
arg2.columns.levels + [result_index])
result = result.reorder_levels([2, 0, 1]).sort_index()
else:
result.index = MultiIndex.from_product(
[range(len(arg2.columns)),
range(len(result_index))])
result = result.swaplevel(1, 0).sort_index()
result.index = MultiIndex.from_product(
[result_index] + [arg2.columns])
else:
# empty result
result = DataFrame(
index=MultiIndex(levels=[arg1.index, arg2.columns],
codes=[[], []]),
columns=arg2.columns,
dtype='float64')
# reset our index names to arg1 names
# reset our column names to arg2 names
# careful not to mutate the original names
result.columns = result.columns.set_names(
arg1.columns.names)
result.index = result.index.set_names(
result_index.names + arg2.columns.names)
return result
else:
raise ValueError("'pairwise' is not True/False")
else:
results = {i: f(*_prep_binary(arg1.iloc[:, i], arg2))
for i, col in enumerate(arg1.columns)}
return dataframe_from_int_dict(results, arg1)
else:
return _flex_binary_moment(arg2, arg1, f)
def _get_center_of_mass(comass, span, halflife, alpha):
valid_count = com.count_not_none(comass, span, halflife, alpha)
if valid_count > 1:
raise ValueError("comass, span, halflife, and alpha "
"are mutually exclusive")
# Convert to center of mass; domain checks ensure 0 < alpha <= 1
if comass is not None:
if comass < 0:
raise ValueError("comass must satisfy: comass >= 0")
elif span is not None:
if span < 1:
raise ValueError("span must satisfy: span >= 1")
comass = (span - 1) / 2.
elif halflife is not None:
if halflife <= 0:
raise ValueError("halflife must satisfy: halflife > 0")
decay = 1 - np.exp(np.log(0.5) / halflife)
comass = 1 / decay - 1
elif alpha is not None:
if alpha <= 0 or alpha > 1:
raise ValueError("alpha must satisfy: 0 < alpha <= 1")
comass = (1.0 - alpha) / alpha
else:
raise ValueError("Must pass one of comass, span, halflife, or alpha")
return float(comass)
def _offset(window, center):
if not is_integer(window):
window = len(window)
offset = (window - 1) / 2. if center else 0
try:
return int(offset)
except TypeError:
return offset.astype(int)
def _require_min_periods(p):
def _check_func(minp, window):
if minp is None:
return window
else:
return max(p, minp)
return _check_func
def _use_window(minp, window):
if minp is None:
return window
else:
return minp
def _zsqrt(x):
with np.errstate(all='ignore'):
result = np.sqrt(x)
mask = x < 0
if isinstance(x, ABCDataFrame):
if mask.values.any():
result[mask] = 0
else:
if mask.any():
result[mask] = 0
return result
def _prep_binary(arg1, arg2):
if not isinstance(arg2, type(arg1)):
raise Exception('Input arrays must be of the same type!')
# mask out values, this also makes a common index...
X = arg1 + 0 * arg2
Y = arg2 + 0 * arg1
return X, Y
# Top-level exports
def rolling(obj, win_type=None, **kwds):
if not isinstance(obj, (ABCSeries, ABCDataFrame)):
raise TypeError('invalid type: %s' % type(obj))
if win_type is not None:
return Window(obj, win_type=win_type, **kwds)
return Rolling(obj, **kwds)
rolling.__doc__ = Window.__doc__
def expanding(obj, **kwds):
if not isinstance(obj, (ABCSeries, ABCDataFrame)):
raise TypeError('invalid type: %s' % type(obj))
return Expanding(obj, **kwds)
expanding.__doc__ = Expanding.__doc__
def ewm(obj, **kwds):
if not isinstance(obj, (ABCSeries, ABCDataFrame)):
raise TypeError('invalid type: %s' % type(obj))
return EWM(obj, **kwds)
ewm.__doc__ = EWM.__doc__
|
GuessWhoSamFoo/pandas
|
pandas/core/window.py
|
Python
|
bsd-3-clause
| 83,431
|
[
"Gaussian"
] |
8ed11ecbe3fab1aa2d1fed1e6dccc6304bb59f752206f642e0318f066d0f34d8
|
#!/usr/bin/env python
import sys
import unittest
import vtk
from PyQt5 import QtWidgets
from peacock.ExodusViewer.ExodusViewer import main
from peacock.utils import Testing
class TestExodusViewer(Testing.PeacockImageTestCase):
"""
Testing for ExodusViewer.
TODO: There is a rendering artifact that shows up in these tests on linux,
so the imagediffs are not performed.
"""
#: QApplication: The main App for QT, this must be static to work correctly.
qapp = QtWidgets.QApplication(sys.argv)
#: str: The filename to load.
_filename = Testing.get_chigger_input('mug_blocks_out.e')
def setUp(self):
"""
Loads an Exodus file in the VTKWindowWidget object using a structure similar to the ExodusViewer widget.
"""
self._widget = main(size=[400,400])
self._widget.initialize([self._filename])
# Start with 'diffused' variable
self._widget.currentWidget().VariablePlugin.VariableList.setCurrentIndex(2)
self._widget.currentWidget().VariablePlugin.VariableList.currentIndexChanged.emit(2)
def write(self, filename):
"""
Overload the write method.
"""
self._widget.currentWidget().OutputPlugin.write.emit(filename)
def testInitial(self):
"""
Test initial.
"""
if sys.platform == 'darwin':
self.assertImage('testInitial.png')
self.assertFalse(self._widget.cornerWidget().CloseButton.isEnabled())
self.assertEqual(self._widget.tabText(self._widget.currentIndex()), 'Results')
def testCloneClose(self):
"""
Test clone button works.
"""
self._widget.cornerWidget().clone.emit()
self._widget.currentWidget().VariablePlugin.VariableList.setCurrentIndex(2)
self._widget.currentWidget().VariablePlugin.VariableList.currentIndexChanged.emit(2)
self.assertEqual(self._widget.count(), 2)
self.assertEqual(self._widget.tabText(self._widget.currentIndex()), 'Results (2)')
self.assertTrue(self._widget.cornerWidget().CloseButton.isEnabled())
if sys.platform == 'darwin':
self.assertImage('testInitial.png')
# Change camera on cloned tab
camera = vtk.vtkCamera()
camera.SetViewUp(-0.7786, 0.2277, 0.5847)
camera.SetPosition(9.2960, -0.4218, 12.6685)
camera.SetFocalPoint(0.0000, 0.0000, 0.1250)
self._widget.currentWidget().VTKWindowPlugin.onCameraChanged(camera)
if sys.platform == 'darwin':
self.assertImage('testClone.png')
# Switch to first tab
self._widget.setCurrentIndex(0)
self.assertEqual(self._widget.tabText(self._widget.currentIndex()), 'Results')
if sys.platform == 'darwin':
self.assertImage('testInitial.png')
# Close the first tab
self._widget.cornerWidget().close.emit()
self.assertEqual(self._widget.count(), 1)
self.assertEqual(self._widget.tabText(self._widget.currentIndex()), 'Results (2)')
self.assertFalse(self._widget.cornerWidget().CloseButton.isEnabled())
if __name__ == '__main__':
unittest.main(module=__name__, verbosity=2)
|
backmari/moose
|
python/peacock/tests/exodus_tab/test_ExodusViewer.py
|
Python
|
lgpl-2.1
| 3,210
|
[
"VTK"
] |
63cd65934bcceee85bb21eca20cceee52afd1cb17c2e808ddf478ed3dcb3dd6a
|
##############################################################################
# adaptiveMD: A Python Framework to Run Adaptive Molecular Dynamics (MD)
# Simulations on HPC Resources
# Copyright 2017 FU Berlin and the Authors
#
# Authors: Jan-Hendrik Prinz
# Contributors:
#
# `adaptiveMD` is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
from __future__ import absolute_import
import os
import ujson
# from adaptivemd.task import PythonTask
from adaptivemd.file import Location, File
from adaptivemd.engine import (Engine, Frame, Trajectory,
TrajectoryGenerationTask, TrajectoryExtensionTask)
exec_file = File('file://' + os.path.join(os.path.dirname(__file__), 'openmmrun.py')).load()
class OpenMMEngine(Engine):
"""
OpenMM Engine to be used by Adaptive MD
Attributes
----------
system_file : `File`
the system.xml file for OpenMM
integrator_file : `File`
the integrator.xml file for OpenMM
pdb_file : `File`
the .pdb file for the topology
args : str
a list of arguments passed to the `openmmrun.py` script
"""
def __init__(self, system_file, integrator_file, pdb_file, args=None):
super(OpenMMEngine, self).__init__()
self._items = dict()
self['pdb_file'] = pdb_file
self['system_file'] = system_file
self['integrator_file'] = integrator_file
self['_executable_file'] = exec_file
for name, f in self.files.items():
stage = f.transfer(Location('staging:///'))
self[name + '_stage'] = stage.target
self.initial_staging.append(stage)
if args is None:
args = '-p CPU'
self.args = args
@classmethod
def from_dict(cls, dct):
obj = super(OpenMMEngine, cls).from_dict(dct)
obj.args = dct['args']
return obj
def to_dict(self):
dct = super(OpenMMEngine, self).to_dict()
dct.update({
'args': self.args})
return dct
@staticmethod
def then_func_import(project, task, data, inputs):
for f in data:
# check if file with same location exists
if f not in project.files:
project.files.update(f)
def _create_output_str(self):
d = dict()
for name, opt in self.types.items():
d[name] = opt.to_dict()
return '--types="%s"' % ujson.dumps(d).replace('"', "'")
def run(self, target, resource_name=None, export_path=None,
cpu_threads=1, gpu_contexts=0, mpi_rank=0):
t = TrajectoryGenerationTask(self, target, cpu_threads=cpu_threads,
gpu_contexts=gpu_contexts, mpi_rank=mpi_rank)
if resource_name is None:
resource_name = list()
elif isinstance(resource_name, str):
resource_name = [resource_name]
assert isinstance(resource_name, list)
t.resource_name = resource_name
if export_path:
t.append(export_path)
initial_pdb = t.pre_link(self['pdb_file_stage'], Location('initial.pdb'))
t.pre_link(self['system_file_stage'])
t.pre_link(self['integrator_file_stage'])
t.pre_link(self['_executable_file_stage'])
if target.frame in [self['pdb_file'], self['pdb_file_stage']]:
input_pdb = initial_pdb
elif isinstance(target.frame, File):
loc = Location('coordinates.%s' % target.frame.extension)
input_pdb = t.get(target.frame, loc)
elif isinstance(target.frame, Frame):
input_traj = t.pre_link(target.frame.trajectory, 'source/')
input_pdb = File('input.pdb')
# frame index is in canonical stride = 1
# we need to figure out which frame in the traj this actually is
# also, we need a traj with full coordinates / selection = None
ty, idx = target.frame.index_in_outputs
if ty is None:
# cannot use a trajectory where we do not have full coordinates
return
t.pre.append('mdconvert -o {target} -i {index} -t {pdb} {source}'.format(
target=input_pdb, # input.pdb is used as starting structure
index=idx, # the index from the source trajectory
pdb=initial_pdb, # use the main pdb
source=input_traj.outputs(ty))) # we pick output ty
else:
# for now we assume that if the initial frame is None or
# not specific use the engines internal. That should be changed
# todo: Raise exception here
return
# this represents our output trajectory
output = Trajectory('traj/', target.frame, length=target.length, engine=self)
# create the directory
t.touch(output)
# TODO option for retry
# TODO use filenames from engine
retry = '\nj=0\ntries=10\nsleep=1\n'
retry += '\ntrajfile=traj/protein.dcd\n\n'
retry += 'while [ $j -le $tries ]; do if ! [ -s $trajfile ]; then {0}; fi; sleep 1; j=$((j+1)); done'
cmd = 'python openmmrun.py {args} {types} -s {system} -i {integrator} -t {pdb} --length {length} {output}'.format(
pdb=input_pdb,
types=self._create_output_str(),
length=target.length,
system=self['system_file'].basename,
integrator=self['integrator_file'].basename,
output=output,
args=self.args,
)
cmd = retry.format(cmd)
t.append(cmd)
t.put(output, target)
return t
def extend(self, source, length, resource_name=None, export_path=None,
cpu_threads=1, gpu_contexts=0, mpi_rank=0):
if length < 0:
return []
# create a new file, but with the same name, etc, just new length
target = source.clone()
target.length = len(source) + length
t = TrajectoryExtensionTask(self, target, source, cpu_threads=cpu_threads,
gpu_contexts=gpu_contexts, mpi_rank=mpi_rank,
)#resource_name=resource_name, export_path=export_path)
if resource_name is None:
resource_name = list()
elif isinstance(resource_name, str):
resource_name = [resource_name]
assert isinstance(resource_name, list)
t.resource_name = resource_name
if export_path:
t.append(export_path)
initial_pdb = t.link(self['pdb_file_stage'], Location('initial.pdb'))
t.link(self['system_file_stage'])
t.link(self['integrator_file_stage'])
t.link(self['_executable_file_stage'])
# this represents our output trajectory
source_link = t.link(source, 'source/')
extension = Trajectory(
'extension/',
target.frame,
length=target.length,
engine=self)
t.touch(extension)
# TODO option for retry
# TODO use filenames from engine
retry = '\nj=0\ntries=10\nsleep=1\n'
retry += '\ntrajfile=extension/protein.dcd\n\n'
retry += 'while [ $j -le $tries ]; do if ! [ -s $trajfile ]; then {0}; fi; sleep 1; j=$((j+1)); done'
cmd = ('python openmmrun.py {args} {types} -s {system} -i {integrator} --restart {restart} -t {pdb} '
'--length {length} {output}').format(
pdb=initial_pdb,
restart=source.file('restart.npz'), # todo: this is engine specific!
length=target.length - source.length,
system=self['system_file'].basename,
integrator=self['integrator_file'].basename,
output=extension,
args=self.args,
types=self._create_output_str()
)
cmd = retry.format(cmd)
t.append(cmd)
# join both trajectories for all outputs
for ty, desc in self.types.items():
# stride = desc['stride']
outname = ty + '.temp.dcd'
t.post.append('mdconvert -o {output} {source} {extension}'.format(
output=extension.file(outname),
source=source_link.outputs(ty),
extension=extension.outputs(ty)
))
# rename joined extended.dcd into output.dcd
t.post.append(extension.file(outname).move(extension.outputs(ty)))
# now extension/ should contain all files as expected
# move extended trajectory to target place (replace old) files
# this will also register the new trajectory folder as existent
t.post_put(extension, target)
return t
# def task_import_trajectory_folder(self, source):
# t = PythonTask(self)
#
# t.link(self['pdb_file_stage'], Location('initial.pdb'))
# t.call(scan_trajectories, source=source)
#
# # call `then_func_import` after success
# t.then('then_func_import')
#
# return t
#
#
# def scan_trajectories(source):
# import glob
# import mdtraj as md
#
# files = glob.glob(source)
#
# here = os.getcwd()
#
# reference_list = []
# for f in files:
#
# rel = os.path.relpath(f, here)
#
# if rel.startswith('../../../../'):
# p = 'worker://' + os.path.abspath(f)
# elif rel.startswith('../../../'):
# p = 'shared://' + rel[8:]
# elif rel.startswith('../../'):
# p = 'sandbox://' + rel[5:]
# else:
# p = 'worker://' + os.path.abspath(f)
#
# # print f, rel, p
#
# traj = md.load(f, top='initial.pdb')
# reference = Trajectory(p, None, len(traj))
# reference_list.append(reference)
#
# return reference_list
|
jrossyra/adaptivemd
|
adaptivemd/engine/openmm/openmm.py
|
Python
|
lgpl-2.1
| 10,478
|
[
"MDTraj",
"OpenMM"
] |
60f0051a9cb75f9d3a522703eb0a079a43c664aabd8fe6f85447148878a2823c
|
import re
import pytest
from .. import factories as f
pytestmark = pytest.mark.django_db
def test_signup_flow(base_url, browser, outbox):
f.create_usertype(slug='tutor', display_name='tutor')
# Sign-Up option should be present there
browser.visit(base_url)
sign_up_link = browser.find_by_text('Sign Up')[0]
assert sign_up_link
# On Clicking it, it should open a Sign Up Page
sign_up_link.click()
# asserting if it's the signup page or not
assert 'Signup' in browser.title
# Now Fill the relevant information
browser.fill('first_name', 'random')
browser.fill('last_name', 'person')
browser.fill('mobile', '0812739120')
browser.fill('username', 'randomnessprevails')
browser.fill('email', 'random@a.com')
browser.fill('password1', 'secretpassword')
browser.fill('password2', 'secretpassword')
# Click on the Submit Button
browser.find_by_css('[type=submit]')[0].click()
# Check for the text shown in the browser when user hits submit button
assert browser.is_text_present(
'We have sent an e-mail to you for verification')
# Check for the mailbox for the confirmation link
assert len(outbox) == 1
mail = outbox[0]
activate_link = re.findall(r'http.*/accounts/confirm-email/.*/', mail.body)
assert activate_link
browser.visit(activate_link[0])
assert "Confirm E-mail Address" in browser.title
browser.find_by_css('[type=submit]')[0].click()
assert "Login" in browser.title
browser.fill('login', 'random@a.com')
browser.fill('password', 'secretpassword')
browser.find_by_css('[type=submit]')[0].click()
assert browser.is_text_present("Dashboard")
# poc_type = f.create_usertype(slug='dummy', display_name='College POC')
section1 = f.create_workshop_section(name='section1')
location1 = f.create_locaiton(name='location1')
url = base_url + '/profile/randomnessprevails/edit'
browser.visit(url)
# browser.select('usertype', poc_type.id)
browser.select('interested_sections', section1.id)
browser.select('interested_locations', location1.id)
browser.select('location', location1.id)
browser.find_by_css('[type=submit]')[0].click()
assert browser.is_text_present('My Profile')
assert browser.is_text_present('Graph')
# Logging Out
url = base_url + '/accounts/logout/'
browser.visit(url)
assert 'Home | PythonExpress' in browser.title
|
shankisg/wye
|
tests/functional/test_user_flow.py
|
Python
|
mit
| 2,457
|
[
"VisIt"
] |
37a4fef3d7b811659691e2672e6410d1923ccfe4b200e7e65c6509fd0971d3de
|
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import print_function, absolute_import
import click
from click._termui_impl import ProgressBar
from click.utils import echo
import posixpath
"""
Various CLI UI utilities, many related to Click and progress reporting.
"""
class BaseCommand(click.Command):
"""
An enhanced click Command working around some Click quirk.
"""
# override this in sub-classes with a command-specific message such as
# "Try 'scancode --help' for help on options and arguments."
short_usage_help = ''
def get_usage(self, ctx):
"""
Ensure that usage points to the --help option explicitly.
Workaround click issue https://github.com/mitsuhiko/click/issues/393
"""
return super(BaseCommand, self).get_usage(ctx) + self.short_usage_help
def main(self, args=None, prog_name=None, complete_var=None,
standalone_mode=True, **extra):
"""
Workaround click 4.0 bug https://github.com/mitsuhiko/click/issues/365
"""
return click.Command.main(self, args=args, prog_name=self.name,
complete_var=complete_var,
standalone_mode=standalone_mode, **extra)
class EnhancedProgressBar(ProgressBar):
"""
Enhanced Click progressbar adding custom first and last messages on enter
and exit.
"""
def __init__(self, iterable, length=None, fill_char='#', empty_char=' ',
bar_template='%(bar)s', info_sep=' ', show_eta=True,
show_percent=None, show_pos=False, item_show_func=None,
label=None, file=None, color=None, width=30, # @ReservedAssignment
start_show_func=None, finish_show_func=None):
"""
New parameters added on top of ProgressBar: start_show_func and
finish_show_func to drive some display at the start and finish of a
progression.
"""
ProgressBar.__init__(self, iterable, length=length, fill_char=fill_char,
empty_char=empty_char, bar_template=bar_template,
info_sep=info_sep, show_eta=show_eta,
show_percent=show_percent, show_pos=show_pos,
item_show_func=item_show_func, label=label,
file=file, color=color, width=width)
self.start_show_func = start_show_func
self.finish_show_func = finish_show_func
def __enter__(self):
self.render_start()
return ProgressBar.__enter__(self)
def render_start(self):
if self.is_hidden:
return
if self.start_show_func is not None:
text = self.start_show_func()
if text:
echo(text, file=self.file, color=self.color)
self.file.flush()
def render_finish(self):
if self.is_hidden:
return
super(EnhancedProgressBar, self).render_finish()
self.show_finish()
def show_finish(self):
if self.finish_show_func is not None:
text = self.finish_show_func()
if text:
echo(text, file=self.file, color=self.color)
self.file.flush()
def render_progress(self):
if not self.is_hidden:
return ProgressBar.render_progress(self)
class ProgressLogger(EnhancedProgressBar):
"""
A subclass of Click ProgressBar providing a simpler and more verbose line-
by-line progress reporting.
In contrast with the progressbar the label, percent, ETA, pos, bar_template
and other formatting options are ignored.
Progress information are printed as-is and no LF is added. The caller must
provide an intem_show_func to display some content and this must terminated
with a line feed if needed.
If no item_show_func is provided a simple dot is printed for each event.
"""
def render_progress(self):
if self.is_hidden:
return
line = self.format_progress_line()
if line:
# only add new lines if there is an item_show_func
nl = bool(self.item_show_func)
echo(line, file=self.file, nl=nl, color=self.color)
self.file.flush()
def format_progress_line(self):
if self.item_show_func:
item_info = self.item_show_func(self.current_item)
else:
item_info = '.'
if item_info:
return item_info
def render_finish(self):
if self.is_hidden:
return
# display a new line after the 'dots' IFF we do not have a show func
nl = not bool(self.item_show_func)
echo(None, file=self.file, nl=nl, color=self.color)
self.show_finish()
class NoOpProgressBar(EnhancedProgressBar):
"""
A ProgressBar-like object that does not show any progress.
"""
def __init__(self, *args, **kwargs):
EnhancedProgressBar.__init__(self, *args, **kwargs)
self.is_hidden = True
def progressmanager(iterable=None, length=None, label=None, show_eta=True,
show_percent=None, show_pos=False, item_show_func=None,
fill_char='#', empty_char='-', bar_template=None,
info_sep=' ', width=36, file=None, color=None, # @ReservedAssignment
verbose=False, start_show_func=None, finish_show_func=None,
quiet=False):
"""This function creates an iterable context manager showing progress as a
bar (default) or line-by-line log (if verbose is True) while iterating.
Its arguments are similar to Click.termui.progressbar with
these new arguments added at the end of the signature:
:param verbose: if False, display a progress bar, otherwise a progress log
:param start_show_func: a function called at the start of iteration that
can return a string to display as an
introduction text before the progress.
:param finish_show_func: a function called at the end of iteration that
can return a string to display after the
progress.
:param quiet: If True, do not display any progress message.
"""
if quiet:
progress_class = NoOpProgressBar
elif verbose:
progress_class = ProgressLogger
else:
progress_class = EnhancedProgressBar
bar_template = ('%(label)s [%(bar)s] %(info)s'
if bar_template is None else bar_template)
return progress_class(iterable=iterable, length=length, show_eta=show_eta,
show_percent=show_percent, show_pos=show_pos,
item_show_func=item_show_func, fill_char=fill_char,
empty_char=empty_char, bar_template=bar_template,
info_sep=info_sep, file=file, label=label,
width=width, color=color,
start_show_func=start_show_func,
finish_show_func=finish_show_func)
def get_relative_path(base, base_resolved, path):
"""
Compute a new posix path based on 'path' relative to the base in original
format or a fully resolved posix format.
"""
# this takes care of a single file or a top level directory
if base_resolved == path:
return base
relative = posixpath.join(base, path[len(base_resolved):].lstrip('/'))
return relative
|
lach76/scancode-toolkit
|
src/scancode/utils.py
|
Python
|
apache-2.0
| 8,899
|
[
"VisIt"
] |
93b8e92ce0f8f5bda670677eb89ef4df8db426d6343f26602ef4bda0104c57a2
|
from math import pi, cos, sin, sqrt, acos
from ase.atoms import Atoms
from ase.parallel import paropen
def read_xyz(fileobj, index=-1):
if isinstance(fileobj, str):
fileobj = open(fileobj)
lines = fileobj.readlines()
L1 = lines[0].split()
if len(L1) == 1:
del lines[:2]
natoms = int(L1[0])
else:
natoms = len(lines)
images = []
while len(lines) >= natoms:
positions = []
symbols = []
for line in lines[:natoms]:
symbol, x, y, z = line.split()[:4]
symbols.append(symbol)
positions.append([float(x), float(y), float(z)])
images.append(Atoms(symbols=symbols, positions=positions))
del lines[:natoms + 2]
return images[index]
def write_xyz(fileobj, images):
if isinstance(fileobj, str):
fileobj = paropen(fileobj, 'w')
if not isinstance(images, (list, tuple)):
images = [images]
symbols = images[0].get_chemical_symbols()
natoms = len(symbols)
for atoms in images:
fileobj.write('%d\n\n' % natoms)
for s, (x, y, z) in zip(symbols, atoms.get_positions()):
fileobj.write('%-2s %22.15f %22.15f %22.15f\n' % (s, x, y, z))
|
freephys/python_ase
|
ase/io/xyz.py
|
Python
|
gpl-3.0
| 1,225
|
[
"ASE"
] |
ccc23eddeb4bbc317b486f6f82cccbd3eeda2b94b529b8c7e16c71bf924790d3
|
""" Author: Hongyang Cheng <chyalexcheng@gmail>
Test #1: 2D Membrane-wrapped granular material
"""
from esys.escript import *
from esys.weipa import saveVTK
from esys.finley import ReadGmsh
from esys.escript.pdetools import Projector
from esys.escript.linearPDEs import LinearPDE,SolverOptions
from msFEM2DExplicit import MultiScale
from saveGauss import saveGauss2D
import time
####################
## key controls ##
####################
# sample size, 1.2m by 1.2m
dim = 2; lx = 1.2; ly = 1.2
# name of mesh file
mshName = 'MshQuad8_0';
# Mesh with 8-node triangle elements; each element has 4 Gauss point
if mshName[3:7] == 'Quad': numOfElements = (int(mshName[7])*2)**2
# Mesh with 3-node triangle elements; each element has 1 Gauss point
else: numOfElements = 2*(int(mshName[7])*2)**2
# number of Gauss points
gp = 4; numg = gp*numOfElements;
packNo = range(0,numg)
# density and damping ratio
rho = 2254.; damp = .2
# number of processes in multiprocessing
nump = 32
# safety factor for timestep size and real-time duration of simulation
safe = 0.5; duration = 25/2.
# directory for exterior DE scenes and variables
sceneExt ='./DE_exts/Test1/'
# import node IDs of membrane in exterior DE domain
mIds = numpy.load(sceneExt+'mNodesIds'+mshName+'.npy')
# import FE-DE mapping of boundary node IDs
FEDENodeMap = numpy.load(sceneExt+'FEDENodeMap'+mshName+'.npy').item()
# state filename of initial membrane DE elements
DE_ext = './DE_exts/Test1/DE_ext_'+mshName+'.yade.gz'
# import FE-DE mapping of boundary element IDs (deprecated)
FEDEBoundMap = numpy.load(sceneExt+'FEDEBoundMap'+mshName+'.npy').item()
# file to write force on the bottom
graphDir = './result/graphs/msTest1_Explicit/gp'+str(gp)+'/'
fout=file(graphDir+'safe_%1.1f_'%safe+'t_%1.1f_'%duration+mshName+'_quasi.dat','w')
###################
## model setup ##
###################
# multiscale model description
dom = ReadGmsh(mshName[:8]+'.msh',numDim=dim,integrationOrder=2)
prob = MultiScale(domain=dom,dim=dim,ng=numg,np=nump,rho=rho,\
mIds=mIds,FEDENodeMap=FEDENodeMap,DE_ext=DE_ext)
# nodal coordinate
dom = prob.getDomain()
x = dom.getX()
bx = FunctionOnBoundary(dom).getX()
# Dirichlet BC positions: four corners fixed, bottom fixed along y
Dbc = whereZero(x[0])*whereZero(x[1])*[1,1] +\
whereZero(x[0]-lx)*whereZero(x[1])*[1,1] +\
whereZero(x[1])*[0,1]
# Dirichlet BC values
Dbc_val = whereZero(x[0])*whereZero(x[1])*[0,0] +\
whereZero(x[0]-lx)*whereZero(x[1])*[0,0] +\
whereZero(x[1])*[0,0]
######################
## Initialization ##
######################
# compute appropriate timestep from eigenvalue
eigFreq = sqrt(prob.getMaxEigenvalue())
dt = safe*(2./eigFreq)
# compute appropriate timestep from PWave velocity (deprecated)
#~ T = prob.getCurrentTangent()
#~ maxM = max(T[0,0,0,0].toListOfTuples())
#~ PwaveVel = sqrt(maxM/rho)
#~ dt = safe*inf(prob.getDomain().getSize()/PwaveVel)
# initialize partial differential equation
prob.initialize(specified_u_mask=Dbc, specified_u_val=Dbc_val, dt=dt)
########################################
## Run simulations for nt timesteps ##
########################################
# start the simulation
time_start = time.time()
t = 1
nt = int(duration/dt)
tWrite = nt/100
rtol = 1e-3
# directory to export vtk data and packing scenes
Dir = 'msTest1/explicit/gp'+str(gp)+'/'+mshName+'_safe_1.0_quasi/'
vtkDir = './result/vtk/'+Dir
packDir = './result/packing/'+Dir
gaussDir = './result/gauss/'+Dir
while t <= nt:
# update displacement and velocity at (n+1) timesteps
u, u_t = prob.solve(damp=damp)
# update maximum kinetic energy
Ek = integrate(length(u_t)**2*rho)/2.
# write data at selected timesteps
if t%tWrite == 0:
# check quasi-static state
while Ek > rtol or Ek <0:
u, u_t = prob.solve(damp=0.99,dynRelax=True)
Ek = integrate(length(u_t)**2*rho)/2.
print t, Ek
# get stress at (n) timesteps
stress = prob.getCurrentStress()
dom = prob.getDomain()
proj = Projector(dom)
# project Gauss point value to nodal value
sig = proj(stress)
# interpolate to stress at the boundary
sig_bounda = interpolate(sig,FunctionOnBoundary(dom))
# compute boundary traction by s_ij*n_j
traction = matrix_mult(sig_bounda,dom.getNormal())
# get mask for boundary nodes on the bottom
botSurf = whereZero(bx[1])
# traction at the bottom
tractBot = traction*botSurf
# resultant force at the bottom
forceBot = integrate(tractBot,where=FunctionOnBoundary(dom))
# length of the bottom surface
lengthBot = integrate(botSurf,where=FunctionOnBoundary(dom))
# force magnitude
magforceBot = sqrt(forceBot.dot(forceBot))
# write stress at the bottom surface
fout.write(str(t*dt)+' '+str(magforceBot)+' '+str(lengthBot)+'\n')
# get local void ratio
vR = prob.getLocalVoidRatio(); vR = proj(vR)
# get local fabric intensity
fab = prob.getLocalFabric()
dev_fab = 4.*(fab-trace(fab)/dim*kronecker(prob.getDomain()))
anis = sqrt(.5*inner(dev_fab,dev_fab))
# set anis to -1 if no contact
for i in range(numg):
if math.isnan(anis.getTupleForDataPoint(i)[0]): anis.setValueOfDataPoint(i,-1)
# get local rotation
rot = prob.getLocalAvgRotation(); rot = proj(rot)
# get local shear strain
strain = prob.getCurrentStrain()
volume_strain = trace(strain)
dev_strain = symmetric(strain) - volume_strain*kronecker(prob.getDomain())/dim
shear = sqrt(2*inner(dev_strain,dev_strain)); shear = proj(shear)
# export FE scene
saveVTK(vtkDir+"/ms"+mshName+"FE_%d.vtu"%t,u=u,sig=sig,shear=shear,e=vR,rot=rot,anis=anis)
# export DE scenes
prob.VTKExporter(vtkDir=vtkDir+"/ms"+mshName+"DE",t=t)
# export local responses at Gauss points
saveGauss2D(gaussDir+"/time_"+str(t)+".dat",strain=strain,stress=stress,fab=fab)
print "force at the bottom: %e"%magforceBot
# next iteration
print "Step NO.%d finished, current kinetic energy: %2.1e"%(t,Ek)
t += 1
prob.getCurrentPacking(pos=(),time=t,prefix=packDir)
time_elapse = time.time() - time_start
fout.write("#Elapsed time in hours: "+str(time_elapse/3600.)+'\n')
fout.close()
prob.exitSimulation()
|
chyalexcheng/multiscale
|
msTest1_explicit.py
|
Python
|
gpl-3.0
| 6,147
|
[
"VTK"
] |
18f2611a46dfbc92f740f0a634212c5a24010a10aeccc4e27d646cbc042d2644
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, print_function
import os.path
from unittest.case import expectedFailure
from commoncode.testcase import FileBasedTesting
from cluecode_assert_utils import check_detection
"""
This test suite is based a rather large subset of Android ICS, providing a
rather diversified sample of a typical Linux-based user space environment.
"""
class TestCopyright(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def test_ics_android_mock_android_mk(self):
test_file = self.get_test_loc('ics/android-mock/Android.mk')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_android_mock_notice(self):
test_file = self.get_test_loc('ics/android-mock/NOTICE')
expected = [
u'Copyright (c) 2005-2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_android_mock_regenerate_from_source_sh(self):
test_file = self.get_test_loc('ics/android-mock/regenerate_from_source.sh')
expected = [
u'Copyright (c) 2011 The Android Open Source Project.',
]
check_detection(expected, test_file)
def test_ics_android_mock_livetests_com_google_android_testing_mocking_test_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/android-mock-livetests-com-google-android-testing-mocking-test/AndroidManifest.xml')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_android_mock_src_com_google_android_testing_mocking_androidmock_java(self):
test_file = self.get_test_loc('ics/android-mock-src-com-google-android-testing-mocking/AndroidMock.java')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_android_mock_src_com_google_android_testing_mocking_generatedmockjar_readme(self):
test_file = self.get_test_loc('ics/android-mock-src-com-google-android-testing-mocking/GeneratedMockJar.readme')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_antlr_android_mk(self):
test_file = self.get_test_loc('ics/antlr/Android.mk')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_antlr_src_org_antlr_runtime_antlrfilestream_java(self):
test_file = self.get_test_loc('ics/antlr-src-org-antlr-runtime/ANTLRFileStream.java')
expected = [
u'Copyright (c) 2005-2009 Terence Parr',
]
check_detection(expected, test_file)
def test_ics_apache_harmony_notice(self):
test_file = self.get_test_loc('ics/apache-harmony/NOTICE')
expected = [
u'Copyright 2001-2004 The Apache Software Foundation.',
u'Copyright 2001-2006 The Apache Software Foundation.',
u'Copyright 2003-2004 The Apache Software Foundation.',
u'Copyright 2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_cleanspec_mk(self):
test_file = self.get_test_loc('ics/apache-http/CleanSpec.mk')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_apache_http_thirdpartyproject_prop(self):
test_file = self.get_test_loc('ics/apache-http/ThirdPartyProject.prop')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_codec_binarydecoder_java(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-codec/BinaryDecoder.java')
expected = [
u'Copyright 2001-2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_codec_overview_html(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-codec/overview.html')
expected = [
u'Copyright 2003-2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_logging_logfactory_java(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging/LogFactory.java')
expected = [
u'Copyright 2001-2006 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_logging_package_html(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging/package.html')
expected = [
u'Copyright 2001-2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_http_src_org_apache_commons_logging_impl_weakhashtable_java(self):
test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging-impl/WeakHashtable.java')
expected = [
u'Copyright 2004 The Apache Software Foundation.',
]
check_detection(expected, test_file)
def test_ics_apache_xml_notice(self):
test_file = self.get_test_loc('ics/apache-xml/NOTICE')
expected = [
u'Copyright 1999-2006 The Apache Software Foundation',
u'Copyright 1999-2006 The Apache Software Foundation',
u'copyright (c) 1999-2002, Lotus Development Corporation., http://www.lotus.com.',
u'copyright (c) 2001-2002, Sun Microsystems., http://www.sun.com.',
u'copyright (c) 2003, IBM Corporation., http://www.ibm.com.',
u'Copyright 1999-2006 The Apache Software Foundation',
u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.',
u'copyright (c) 1999, Sun Microsystems., http://www.sun.com.',
u'at iClick, Inc., software copyright (c) 1999.',
u'Copyright 2001-2003,2006 The Apache Software Foundation.',
u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.',
u'copyright (c) 1999, Sun Microsystems., http://www.sun.com.',
u'copyright (c) 2000 World Wide Web Consortium, http://www.w3.org',
]
check_detection(expected, test_file)
def test_ics_apache_xml_src_main_java_org_apache_xpath_domapi_xpathstylesheetdom3exception_java(self):
test_file = self.get_test_loc('ics/apache-xml-src-main-java-org-apache-xpath-domapi/XPathStylesheetDOM3Exception.java')
expected = [
u'Copyright (c) 2002 World Wide Web Consortium, Massachusetts Institute of Technology, Institut National de Recherche en Informatique',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_apache_xml_src_main_java_org_apache_xpath_domapi_xpathstylesheetdom3exception_java_trail_name(self):
test_file = self.get_test_loc('ics/apache-xml-src-main-java-org-apache-xpath-domapi/XPathStylesheetDOM3Exception.java')
expected = [
u'Copyright (c) 2002 World Wide Web Consortium, '
u'(Massachusetts Institute of Technology, '
u'Institut National de Recherche en Informatique et en Automatique, '
u'Keio University).',
]
check_detection(expected, test_file)
def test_ics_astl_android_mk(self):
test_file = self.get_test_loc('ics/astl/Android.mk')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_notice(self):
test_file = self.get_test_loc('ics/astl/NOTICE')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_algorithm(self):
test_file = self.get_test_loc('ics/astl-include/algorithm')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_basic_ios_h(self):
test_file = self.get_test_loc('ics/astl-include/basic_ios.h')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_streambuf(self):
test_file = self.get_test_loc('ics/astl-include/streambuf')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_include_string(self):
test_file = self.get_test_loc('ics/astl-include/string')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_src_ostream_cpp(self):
test_file = self.get_test_loc('ics/astl-src/ostream.cpp')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_astl_tests_test_vector_cpp(self):
test_file = self.get_test_loc('ics/astl-tests/test_vector.cpp')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bison_aclocal_m4(self):
test_file = self.get_test_loc('ics/bison/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1999, 2000, 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_android_mk(self):
test_file = self.get_test_loc('ics/bison/Android.mk')
expected = [
u'Copyright 2006 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bison_changelog(self):
test_file = self.get_test_loc('ics/bison/ChangeLog')
expected = [
u'Copyright (c) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_config_log(self):
test_file = self.get_test_loc('ics/bison/config.log')
expected = [
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_config_status(self):
test_file = self.get_test_loc('ics/bison/config.status')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_configure(self):
test_file = self.get_test_loc('ics/bison/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_configure_ac(self):
test_file = self.get_test_loc('ics/bison/configure.ac')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_copying(self):
test_file = self.get_test_loc('ics/bison/COPYING')
expected = [
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bison_gnumakefile(self):
test_file = self.get_test_loc('ics/bison/GNUmakefile')
expected = [
u'Copyright (c) 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_install(self):
test_file = self.get_test_loc('ics/bison/INSTALL')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile(self):
test_file = self.get_test_loc('ics/bison/Makefile')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile_am(self):
test_file = self.get_test_loc('ics/bison/Makefile.am')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile_cfg(self):
test_file = self.get_test_loc('ics/bison/Makefile.cfg')
expected = [
u'Copyright (c) 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_makefile_maint(self):
test_file = self.get_test_loc('ics/bison/Makefile.maint')
expected = [
u'Copyright (c) 2001-2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_news(self):
test_file = self.get_test_loc('ics/bison/NEWS')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_notice(self):
test_file = self.get_test_loc('ics/bison/NOTICE')
expected = [
u'Copyright (c) 1992-2006 Free Software Foundation, Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bison_packaging(self):
test_file = self.get_test_loc('ics/bison/PACKAGING')
expected = [
u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_readme(self):
test_file = self.get_test_loc('ics/bison/README')
expected = [
u'Copyright (c) 1992, 1998, 1999, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_todo(self):
test_file = self.get_test_loc('ics/bison/TODO')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_config_guess(self):
test_file = self.get_test_loc('ics/bison-build-aux/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_config_rpath(self):
test_file = self.get_test_loc('ics/bison-build-aux/config.rpath')
expected = [
u'Copyright 1996-2006 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_depcomp(self):
test_file = self.get_test_loc('ics/bison-build-aux/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_install_sh(self):
test_file = self.get_test_loc('ics/bison-build-aux/install-sh')
expected = [
u'Copyright (c) 1994 X Consortium',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_mdate_sh(self):
test_file = self.get_test_loc('ics/bison-build-aux/mdate-sh')
expected = [
u'Copyright (c) 1995, 1996, 1997, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_missing(self):
test_file = self.get_test_loc('ics/bison-build-aux/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_texinfo_tex(self):
test_file = self.get_test_loc('ics/bison-build-aux/texinfo.tex')
expected = [
u'Copyright (c) 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_build_aux_ylwrap(self):
test_file = self.get_test_loc('ics/bison-build-aux/ylwrap')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_c_m4(self):
test_file = self.get_test_loc('ics/bison-data/c.m4')
expected = [
u'Copyright (c) 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) $2 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_c_m4_2(self):
test_file = self.get_test_loc('ics/bison-data/c++.m4')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_makefile_am(self):
test_file = self.get_test_loc('ics/bison-data/Makefile.am')
expected = [
u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_readme(self):
test_file = self.get_test_loc('ics/bison-data/README')
expected = [
u'Copyright (c) 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_data_m4sugar_m4sugar_m4(self):
test_file = self.get_test_loc('ics/bison-data-m4sugar/m4sugar.m4')
expected = [
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_config_bat(self):
test_file = self.get_test_loc('ics/bison-djgpp/config.bat')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_config_sed(self):
test_file = self.get_test_loc('ics/bison-djgpp/config.sed')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_makefile_maint(self):
test_file = self.get_test_loc('ics/bison-djgpp/Makefile.maint')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_readme_in(self):
test_file = self.get_test_loc('ics/bison-djgpp/README.in')
expected = [
u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_djgpp_subpipe_h(self):
test_file = self.get_test_loc('ics/bison-djgpp/subpipe.h')
expected = [
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_bison_texinfo(self):
test_file = self.get_test_loc('ics/bison-doc/bison.texinfo')
expected = [
u'Copyright 1988, 1989, 1990, 1991, 1992, 1993, 1995, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_fdl_texi(self):
test_file = self.get_test_loc('ics/bison-doc/fdl.texi')
expected = [
u'Copyright 2000,2001,2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_gpl_texi(self):
test_file = self.get_test_loc('ics/bison-doc/gpl.texi')
expected = [
u'Copyright 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bison_doc_makefile_am(self):
test_file = self.get_test_loc('ics/bison-doc/Makefile.am')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_doc_refcard_tex(self):
test_file = self.get_test_loc('ics/bison-doc/refcard.tex')
expected = [
u'Copyright (c) 1998, 2001 Free Software Foundation, Inc.',
u'Copyright \\copyright\\ \\year\\ Free Software Foundation, Inc.',
u'Copyright \\copyright\\ \\year\\ Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_examples_extexi(self):
test_file = self.get_test_loc('ics/bison-examples/extexi')
expected = [
u'Copyright 1992, 2000, 2001, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_examples_makefile_am(self):
test_file = self.get_test_loc('ics/bison-examples/Makefile.am')
expected = [
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_abitset_c(self):
test_file = self.get_test_loc('ics/bison-lib/abitset.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_abitset_h(self):
test_file = self.get_test_loc('ics/bison-lib/abitset.h')
expected = [
u'Copyright (c) 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_argmatch_c(self):
test_file = self.get_test_loc('ics/bison-lib/argmatch.c')
expected = [
u'Copyright (c) 1990, 1998, 1999, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_argmatch_h(self):
test_file = self.get_test_loc('ics/bison-lib/argmatch.h')
expected = [
u'Copyright (c) 1990, 1998, 1999, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_basename_c(self):
test_file = self.get_test_loc('ics/bison-lib/basename.c')
expected = [
u'Copyright (c) 1990, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bbitset_h(self):
test_file = self.get_test_loc('ics/bison-lib/bbitset.h')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitset_c(self):
test_file = self.get_test_loc('ics/bison-lib/bitset.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitset_h(self):
test_file = self.get_test_loc('ics/bison-lib/bitset.h')
expected = [
u'Copyright (c) 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitsetv_c(self):
test_file = self.get_test_loc('ics/bison-lib/bitsetv.c')
expected = [
u'Copyright (c) 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_bitsetv_print_c(self):
test_file = self.get_test_loc('ics/bison-lib/bitsetv-print.c')
expected = [
u'Copyright (c) 2001, 2002, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_dirname_c(self):
test_file = self.get_test_loc('ics/bison-lib/dirname.c')
expected = [
u'Copyright (c) 1990, 1998, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_dirname_h(self):
test_file = self.get_test_loc('ics/bison-lib/dirname.h')
expected = [
u'Copyright (c) 1998, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_dup_safer_c(self):
test_file = self.get_test_loc('ics/bison-lib/dup-safer.c')
expected = [
u'Copyright (c) 2001, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_error_c(self):
test_file = self.get_test_loc('ics/bison-lib/error.c')
expected = [
u'Copyright (c) 1990-1998, 2000-2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_error_h(self):
test_file = self.get_test_loc('ics/bison-lib/error.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_exit_h(self):
test_file = self.get_test_loc('ics/bison-lib/exit.h')
expected = [
u'Copyright (c) 1995, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_exitfail_c(self):
test_file = self.get_test_loc('ics/bison-lib/exitfail.c')
expected = [
u'Copyright (c) 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_get_errno_c(self):
test_file = self.get_test_loc('ics/bison-lib/get-errno.c')
expected = [
u'Copyright (c) 2002, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt_c(self):
test_file = self.get_test_loc('ics/bison-lib/getopt.c')
expected = [
u'Copyright (c) 1987,88,89,90,91,92,93,94,95,96,98,99,2000,2001,2002,2003,2004,2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt_h(self):
test_file = self.get_test_loc('ics/bison-lib/getopt_.h')
expected = [
u'Copyright (c) 1989-1994,1996-1999,2001,2003,2004,2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt_int_h(self):
test_file = self.get_test_loc('ics/bison-lib/getopt_int.h')
expected = [
u'Copyright (c) 1989-1994,1996-1999,2001,2003,2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_getopt1_c(self):
test_file = self.get_test_loc('ics/bison-lib/getopt1.c')
expected = [
u'Copyright (c) 1987,88,89,90,91,92,93,94,96,97,98,2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_gettext_h(self):
test_file = self.get_test_loc('ics/bison-lib/gettext.h')
expected = [
u'Copyright (c) 1995-1998, 2000-2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hard_locale_c(self):
test_file = self.get_test_loc('ics/bison-lib/hard-locale.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hard_locale_h(self):
test_file = self.get_test_loc('ics/bison-lib/hard-locale.h')
expected = [
u'Copyright (c) 1999, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hash_c(self):
test_file = self.get_test_loc('ics/bison-lib/hash.c')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_hash_h(self):
test_file = self.get_test_loc('ics/bison-lib/hash.h')
expected = [
u'Copyright (c) 1998, 1999, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_makefile_am(self):
test_file = self.get_test_loc('ics/bison-lib/Makefile.am')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_malloc_c(self):
test_file = self.get_test_loc('ics/bison-lib/malloc.c')
expected = [
u'Copyright (c) 1997, 1998 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_mbswidth_c(self):
test_file = self.get_test_loc('ics/bison-lib/mbswidth.c')
expected = [
u'Copyright (c) 2000-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_mbswidth_h(self):
test_file = self.get_test_loc('ics/bison-lib/mbswidth.h')
expected = [
u'Copyright (c) 2000-2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_obstack_c(self):
test_file = self.get_test_loc('ics/bison-lib/obstack.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_obstack_h(self):
test_file = self.get_test_loc('ics/bison-lib/obstack.h')
expected = [
u'Copyright (c) 1988-1994,1996-1999,2003,2004,2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quote_c(self):
test_file = self.get_test_loc('ics/bison-lib/quote.c')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quote_h(self):
test_file = self.get_test_loc('ics/bison-lib/quote.h')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quotearg_c(self):
test_file = self.get_test_loc('ics/bison-lib/quotearg.c')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_quotearg_h(self):
test_file = self.get_test_loc('ics/bison-lib/quotearg.h')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stdbool_h(self):
test_file = self.get_test_loc('ics/bison-lib/stdbool_.h')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stdio_safer_h(self):
test_file = self.get_test_loc('ics/bison-lib/stdio-safer.h')
expected = [
u'Copyright (c) 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stpcpy_c(self):
test_file = self.get_test_loc('ics/bison-lib/stpcpy.c')
expected = [
u'Copyright (c) 1992, 1995, 1997, 1998 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stpcpy_h(self):
test_file = self.get_test_loc('ics/bison-lib/stpcpy.h')
expected = [
u'Copyright (c) 1995, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strdup_c(self):
test_file = self.get_test_loc('ics/bison-lib/strdup.c')
expected = [
u'Copyright (c) 1991, 1996, 1997, 1998, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strdup_h(self):
test_file = self.get_test_loc('ics/bison-lib/strdup.h')
expected = [
u'Copyright (c) 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strerror_c(self):
test_file = self.get_test_loc('ics/bison-lib/strerror.c')
expected = [
u'Copyright (c) 1986, 1988, 1989, 1991, 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_stripslash_c(self):
test_file = self.get_test_loc('ics/bison-lib/stripslash.c')
expected = [
u'Copyright (c) 1990, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strndup_c(self):
test_file = self.get_test_loc('ics/bison-lib/strndup.c')
expected = [
u'Copyright (c) 1996, 1997, 1998, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strndup_h(self):
test_file = self.get_test_loc('ics/bison-lib/strndup.h')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strtol_c(self):
test_file = self.get_test_loc('ics/bison-lib/strtol.c')
expected = [
u'Copyright (c) 1991, 1992, 1994, 1995, 1996, 1997, 1998, 1999, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strtoul_c(self):
test_file = self.get_test_loc('ics/bison-lib/strtoul.c')
expected = [
u'Copyright (c) 1991, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_strverscmp_c(self):
test_file = self.get_test_loc('ics/bison-lib/strverscmp.c')
expected = [
u'Copyright (c) 1997, 2000, 2002, 2004 Free Software Foundation, Inc.',
u'Jean-Francois Bignolles <bignolle@ecoledoc.ibp.fr>, 1997.'
]
check_detection(expected, test_file)
def test_ics_bison_lib_strverscmp_h(self):
test_file = self.get_test_loc('ics/bison-lib/strverscmp.h')
expected = [
u'Copyright (c) 1997, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_subpipe_c(self):
test_file = self.get_test_loc('ics/bison-lib/subpipe.c')
expected = [
u'Copyright (c) 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_subpipe_h(self):
test_file = self.get_test_loc('ics/bison-lib/subpipe.h')
expected = [
u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_timevar_c(self):
test_file = self.get_test_loc('ics/bison-lib/timevar.c')
expected = [
u'Copyright (c) 2000, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_timevar_h(self):
test_file = self.get_test_loc('ics/bison-lib/timevar.h')
expected = [
u'Copyright (c) 2000, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_unistd_safer_h(self):
test_file = self.get_test_loc('ics/bison-lib/unistd-safer.h')
expected = [
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_unlocked_io_h(self):
test_file = self.get_test_loc('ics/bison-lib/unlocked-io.h')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_xalloc_h(self):
test_file = self.get_test_loc('ics/bison-lib/xalloc.h')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_lib_xalloc_die_c(self):
test_file = self.get_test_loc('ics/bison-lib/xalloc-die.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000, 2002, 2003, 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_bison_i18n_m4(self):
test_file = self.get_test_loc('ics/bison-m4/bison-i18n.m4')
expected = [
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_c_working_m4(self):
test_file = self.get_test_loc('ics/bison-m4/c-working.m4')
expected = [
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_cxx_m4(self):
test_file = self.get_test_loc('ics/bison-m4/cxx.m4')
expected = [
u'Copyright (c) 2004, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_dirname_m4(self):
test_file = self.get_test_loc('ics/bison-m4/dirname.m4')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_dos_m4(self):
test_file = self.get_test_loc('ics/bison-m4/dos.m4')
expected = [
u'Copyright (c) 2000, 2001, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_error_m4(self):
test_file = self.get_test_loc('ics/bison-m4/error.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_exitfail_m4(self):
test_file = self.get_test_loc('ics/bison-m4/exitfail.m4')
expected = [
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_extensions_m4(self):
test_file = self.get_test_loc('ics/bison-m4/extensions.m4')
expected = [
u'Copyright (c) 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_gettext_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/gettext_gl.m4')
expected = [
u'Copyright (c) 1995-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_iconv_m4(self):
test_file = self.get_test_loc('ics/bison-m4/iconv.m4')
expected = [
u'Copyright (c) 2000-2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_inttypes_h_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/inttypes_h_gl.m4')
expected = [
u'Copyright (c) 1997-2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_lib_ld_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/lib-ld_gl.m4')
expected = [
u'Copyright (c) 1996-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_lib_link_m4(self):
test_file = self.get_test_loc('ics/bison-m4/lib-link.m4')
expected = [
u'Copyright (c) 2001-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_m4_m4(self):
test_file = self.get_test_loc('ics/bison-m4/m4.m4')
expected = [
u'Copyright 2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_mbrtowc_m4(self):
test_file = self.get_test_loc('ics/bison-m4/mbrtowc.m4')
expected = [
u'Copyright (c) 2001-2002, 2004-2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_mbstate_t_m4(self):
test_file = self.get_test_loc('ics/bison-m4/mbstate_t.m4')
expected = [
u'Copyright (c) 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_mbswidth_m4(self):
test_file = self.get_test_loc('ics/bison-m4/mbswidth.m4')
expected = [
u'Copyright (c) 2000-2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_nls_m4(self):
test_file = self.get_test_loc('ics/bison-m4/nls.m4')
expected = [
u'Copyright (c) 1995-2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_obstack_m4(self):
test_file = self.get_test_loc('ics/bison-m4/obstack.m4')
expected = [
u'Copyright (c) 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_onceonly_m4(self):
test_file = self.get_test_loc('ics/bison-m4/onceonly.m4')
expected = [
u'Copyright (c) 2002-2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_progtest_m4(self):
test_file = self.get_test_loc('ics/bison-m4/progtest.m4')
expected = [
u'Copyright (c) 1996-2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_quotearg_m4(self):
test_file = self.get_test_loc('ics/bison-m4/quotearg.m4')
expected = [
u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_stdbool_m4(self):
test_file = self.get_test_loc('ics/bison-m4/stdbool.m4')
expected = [
u'Copyright (c) 2002-2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_stdio_safer_m4(self):
test_file = self.get_test_loc('ics/bison-m4/stdio-safer.m4')
expected = [
u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_stpcpy_m4(self):
test_file = self.get_test_loc('ics/bison-m4/stpcpy.m4')
expected = [
u'Copyright (c) 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_strndup_m4(self):
test_file = self.get_test_loc('ics/bison-m4/strndup.m4')
expected = [
u'Copyright (c) 2002-2003, 2005-2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_strtol_m4(self):
test_file = self.get_test_loc('ics/bison-m4/strtol.m4')
expected = [
u'Copyright (c) 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_ulonglong_gl_m4(self):
test_file = self.get_test_loc('ics/bison-m4/ulonglong_gl.m4')
expected = [
u'Copyright (c) 1999-2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_unlocked_io_m4(self):
test_file = self.get_test_loc('ics/bison-m4/unlocked-io.m4')
expected = [
u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_warning_m4(self):
test_file = self.get_test_loc('ics/bison-m4/warning.m4')
expected = [
u'Copyright (c) 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_m4_xstrndup_m4(self):
test_file = self.get_test_loc('ics/bison-m4/xstrndup.m4')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_assoc_c(self):
test_file = self.get_test_loc('ics/bison-src/assoc.c')
expected = [
u'Copyright (c) 2002, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_closure_c(self):
test_file = self.get_test_loc('ics/bison-src/closure.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_closure_h(self):
test_file = self.get_test_loc('ics/bison-src/closure.h')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_complain_c(self):
test_file = self.get_test_loc('ics/bison-src/complain.c')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_complain_h(self):
test_file = self.get_test_loc('ics/bison-src/complain.h')
expected = [
u'Copyright (c) 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_conflicts_c(self):
test_file = self.get_test_loc('ics/bison-src/conflicts.c')
expected = [
u'Copyright (c) 1984, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_conflicts_h(self):
test_file = self.get_test_loc('ics/bison-src/conflicts.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_derives_c(self):
test_file = self.get_test_loc('ics/bison-src/derives.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_files_c(self):
test_file = self.get_test_loc('ics/bison-src/files.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_getargs_c(self):
test_file = self.get_test_loc('ics/bison-src/getargs.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'(c) d Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bison_src_getargs_c_lead_copy(self):
test_file = self.get_test_loc('ics/bison-src/getargs.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) d Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_getargs_h(self):
test_file = self.get_test_loc('ics/bison-src/getargs.h')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_gram_c(self):
test_file = self.get_test_loc('ics/bison-src/gram.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_gram_h(self):
test_file = self.get_test_loc('ics/bison-src/gram.h')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lalr_c(self):
test_file = self.get_test_loc('ics/bison-src/lalr.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lalr_h(self):
test_file = self.get_test_loc('ics/bison-src/lalr.h')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lr0_c(self):
test_file = self.get_test_loc('ics/bison-src/LR0.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_lr0_h(self):
test_file = self.get_test_loc('ics/bison-src/LR0.h')
expected = [
u'Copyright 1984, 1986, 1989, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_main_c(self):
test_file = self.get_test_loc('ics/bison-src/main.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 1995, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_muscle_tab_c(self):
test_file = self.get_test_loc('ics/bison-src/muscle_tab.c')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_muscle_tab_h(self):
test_file = self.get_test_loc('ics/bison-src/muscle_tab.h')
expected = [
u'Copyright (c) 2001, 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_nullable_c(self):
test_file = self.get_test_loc('ics/bison-src/nullable.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_nullable_h(self):
test_file = self.get_test_loc('ics/bison-src/nullable.h')
expected = [
u'Copyright (c) 2000, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_output_c(self):
test_file = self.get_test_loc('ics/bison-src/output.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_output_h(self):
test_file = self.get_test_loc('ics/bison-src/output.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_parse_gram_c(self):
test_file = self.get_test_loc('ics/bison-src/parse-gram.c')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_parse_gram_h(self):
test_file = self.get_test_loc('ics/bison-src/parse-gram.h')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_print_c(self):
test_file = self.get_test_loc('ics/bison-src/print.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_print_h(self):
test_file = self.get_test_loc('ics/bison-src/print.h')
expected = [
u'Copyright 2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_reader_c(self):
test_file = self.get_test_loc('ics/bison-src/reader.c')
expected = [
u'Copyright (c) 1984, 1986, 1989, 1992, 1998, 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_reader_h(self):
test_file = self.get_test_loc('ics/bison-src/reader.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_reduce_c(self):
test_file = self.get_test_loc('ics/bison-src/reduce.c')
expected = [
u'Copyright (c) 1988, 1989, 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_scan_skel_c(self):
test_file = self.get_test_loc('ics/bison-src/scan-skel.c')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_scan_skel_l(self):
test_file = self.get_test_loc('ics/bison-src/scan-skel.l')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_state_h(self):
test_file = self.get_test_loc('ics/bison-src/state.h')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_symtab_c(self):
test_file = self.get_test_loc('ics/bison-src/symtab.c')
expected = [
u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_symtab_h(self):
test_file = self.get_test_loc('ics/bison-src/symtab.h')
expected = [
u'Copyright (c) 1984, 1989, 1992, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_system_h(self):
test_file = self.get_test_loc('ics/bison-src/system.h')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_uniqstr_c(self):
test_file = self.get_test_loc('ics/bison-src/uniqstr.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_src_vcg_h(self):
test_file = self.get_test_loc('ics/bison-src/vcg.h')
expected = [
u'Copyright (c) 2001, 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_actions_at(self):
test_file = self.get_test_loc('ics/bison-tests/actions.at')
expected = [
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_atconfig(self):
test_file = self.get_test_loc('ics/bison-tests/atconfig')
expected = [
u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_atlocal(self):
test_file = self.get_test_loc('ics/bison-tests/atlocal')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_c_at(self):
test_file = self.get_test_loc('ics/bison-tests/c++.at')
expected = [
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_calc_at(self):
test_file = self.get_test_loc('ics/bison-tests/calc.at')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_conflicts_at(self):
test_file = self.get_test_loc('ics/bison-tests/conflicts.at')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_cxx_type_at(self):
test_file = self.get_test_loc('ics/bison-tests/cxx-type.at')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_existing_at(self):
test_file = self.get_test_loc('ics/bison-tests/existing.at')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1992, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_glr_regression_at(self):
test_file = self.get_test_loc('ics/bison-tests/glr-regression.at')
expected = [
u'Copyright (c) 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_headers_at(self):
test_file = self.get_test_loc('ics/bison-tests/headers.at')
expected = [
u'Copyright (c) 2001, 2002, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_local_at(self):
test_file = self.get_test_loc('ics/bison-tests/local.at')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_makefile_am(self):
test_file = self.get_test_loc('ics/bison-tests/Makefile.am')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_output_at(self):
test_file = self.get_test_loc('ics/bison-tests/output.at')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_sets_at(self):
test_file = self.get_test_loc('ics/bison-tests/sets.at')
expected = [
u'Copyright (c) 2001, 2002, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_synclines_at(self):
test_file = self.get_test_loc('ics/bison-tests/synclines.at')
expected = [
u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_testsuite_at(self):
test_file = self.get_test_loc('ics/bison-tests/testsuite.at')
expected = [
u'Copyright (c) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bison_tests_torture_at(self):
test_file = self.get_test_loc('ics/bison-tests/torture.at')
expected = [
u'Copyright (c) 2001, 2002, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkiomon_c(self):
test_file = self.get_test_loc('ics/blktrace/blkiomon.c')
expected = [
u'Copyright IBM Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkiomon_h(self):
test_file = self.get_test_loc('ics/blktrace/blkiomon.h')
expected = [
u'Copyright IBM Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkparse_c(self):
test_file = self.get_test_loc('ics/blktrace/blkparse.c')
expected = [
u'Copyright (c) 2005 Jens Axboe <axboe@suse.de>',
u'Copyright (c) 2006 Jens Axboe <axboe@kernel.dk>',
]
check_detection(expected, test_file)
def test_ics_blktrace_blkrawverify_c(self):
test_file = self.get_test_loc('ics/blktrace/blkrawverify.c')
expected = [
u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btrace(self):
test_file = self.get_test_loc('ics/blktrace/btrace')
expected = [
u'Copyright (c) 2005 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_blktrace_btrace_spec(self):
test_file = self.get_test_loc('ics/blktrace/btrace.spec')
expected = [
u'Copyright (c) 2005 SUSE LINUX Products GmbH, Nuernberg, Germany.',
]
check_detection(expected, test_file)
def test_ics_blktrace_jhash_h(self):
test_file = self.get_test_loc('ics/blktrace/jhash.h')
expected = [
u'Copyright (c) 2006. Bob Jenkins (bob_jenkins@burtleburtle.net)',
u'Copyright (c) 2009 Jozsef Kadlecsik (kadlec@blackhole.kfki.hu)',
]
check_detection(expected, test_file)
def test_ics_blktrace_notice(self):
test_file = self.get_test_loc('ics/blktrace/NOTICE')
expected = [
u'Copyright (c) 1997, 2002, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2005 Jens Axboe <axboe@suse.de>',
u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
u'Copyright (c) 2006 Jens Axboe <axboe@kernel.dk>',
u'Copyright (c) 2006. Bob Jenkins (bob_jenkins@burtleburtle.net)',
u'Copyright (c) 2009 Jozsef Kadlecsik (kadlec@blackhole.kfki.hu)',
u'Copyright IBM Corp. 2008',
u'Copyright (c) 2005 SUSE LINUX Products GmbH, Nuernberg, Germany.',
u'Copyright (c) 2005 Silicon Graphics, Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_blktrace_rbtree_c(self):
test_file = self.get_test_loc('ics/blktrace/rbtree.c')
expected = [
u'(c) 1999 Andrea Arcangeli <andrea@suse.de>',
u'(c) 2002 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_blktrace_rbtree_h(self):
test_file = self.get_test_loc('ics/blktrace/rbtree.h')
expected = [
u'(c) 1999 Andrea Arcangeli <andrea@suse.de>',
]
check_detection(expected, test_file)
def test_ics_blktrace_strverscmp_c(self):
test_file = self.get_test_loc('ics/blktrace/strverscmp.c')
expected = [
u'Copyright (c) 1997, 2002, 2005 Free Software Foundation, Inc.',
u'Jean-Francois Bignolles <bignolle@ecoledoc.ibp.fr>, 1997.'
]
check_detection(expected, test_file)
def test_ics_blktrace_btreplay_btrecord_c(self):
test_file = self.get_test_loc('ics/blktrace-btreplay/btrecord.c')
expected = [
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btreplay_btrecord_h(self):
test_file = self.get_test_loc('ics/blktrace-btreplay/btrecord.h')
expected = [
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btreplay_doc_abstract_tex(self):
test_file = self.get_test_loc('ics/blktrace-btreplay-doc/abstract.tex')
expected = [
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_bno_plot_py(self):
test_file = self.get_test_loc('ics/blktrace-btt/bno_plot.py')
expected = [
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_btt_plot_py(self):
test_file = self.get_test_loc('ics/blktrace-btt/btt_plot.py')
expected = [
u'(c) Copyright 2009 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_notice(self):
test_file = self.get_test_loc('ics/blktrace-btt/NOTICE')
expected = [
u'(c) Copyright 2007 Hewlett-Packard Development Company, L.P.',
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P.',
u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>',
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P.',
u'(c) Copyright 2009 Hewlett-Packard Development Company, L.P.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_plat_c(self):
test_file = self.get_test_loc('ics/blktrace-btt/plat.c')
expected = [
u'(c) Copyright 2008 Hewlett-Packard Development Company, L.P. Alan D. Brunelle <alan.brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_blktrace_btt_q2d_c(self):
test_file = self.get_test_loc('ics/blktrace-btt/q2d.c')
expected = [
u'(c) Copyright 2007 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_blktrace_doc_blktrace_tex(self):
test_file = self.get_test_loc('ics/blktrace-doc/blktrace.tex')
expected = [
u'Copyright (c) 2005, 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_android_mk(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/Android.mk')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_copying_lib(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/COPYING.LIB')
expected = [
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/NOTICE')
expected = [
u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2006-2009 Nokia Corporation',
u'Copyright (c) 2008 Joao Paulo Rechi Vita',
u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>',
u'Copyright (c) 2008-2009 Nokia Corporation',
u'Copyright (c) 2009 Lennart Poettering',
u'Copyright (c) 2009 Intel Corporation',
u'Copyright (c) 2009 Joao Paulo Rechi Vita',
u'Copyright (c) 2009-2010 Motorola Inc.',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
u'Copyright (c) 2005-2008 Brad Midgley <bmidgley@xmission.com>',
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_readme(self):
test_file = self.get_test_loc('ics/bluetooth-bluez/README')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_attrib_att_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-attrib/att.c')
expected = [
u'Copyright (c) 2010 Nokia Corporation',
u'Copyright (c) 2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_attrib_gatttool_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-attrib/gatttool.h')
expected = [
u'Copyright (c) 2011 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_a2dp_codecs_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/a2dp-codecs.h')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_android_audio_hw_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/android_audio_hw.c')
expected = [
u'Copyright (c) 2008-2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_ctl_bluetooth_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/ctl_bluetooth.c')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_gateway_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/gateway.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>',
u'Copyright (c) 2010 ProFUSION',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_audio_gateway_c_trail_name(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/gateway.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>',
u'Copyright (c) 2010 ProFUSION embedded systems',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_liba2dp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/liba2dp.c')
expected = [
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_media_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/media.c')
expected = [
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_sink_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/sink.c')
expected = [
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009-2010 Motorola Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_source_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/source.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Joao Paulo Rechi Vita',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_telephony_maemo5_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/telephony-maemo5.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_audio_telephony_ofono_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-audio/telephony-ofono.c')
expected = [
u'Copyright (c) 2009-2010 Intel Corporation',
u'Copyright (c) 2006-2009 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_btio_btio_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-btio/btio.c')
expected = [
u'Copyright (c) 2009-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009-2010 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_common_android_bluez_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-common/android_bluez.c')
expected = [
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_compat_bnep_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-compat/bnep.c')
expected = [
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_compat_fakehid_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-compat/fakehid.c')
expected = [
u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_adapter_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/adapter-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2006 Johan Hedberg <johan.hedberg@nokia.com>',
u'Copyright (c) 2005-2006 Claudio Takahasi <claudio.takahasi@indt.org.br>',
u'Copyright (c) 2006-2007 Luiz von Dentz',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_agent_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/agent-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2006 Johan Hedberg <johan.hedberg@nokia.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_attribute_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/attribute-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_audio_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/audio-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2007 Johan Hedberg <johan.hedberg@nokia.com>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_control_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/control-api.txt')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 David Stockwell <dstockwell@frequency-one.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_mgmt_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/mgmt-api.txt')
expected = [
u'Copyright (c) 2008-2009 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_oob_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/oob-api.txt')
expected = [
u'Copyright (c) 2011 Szymon Janc <szymon.janc@tieto.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_doc_sap_api_txt(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-doc/sap-api.txt')
expected = [
u'Copyright (c) 2010 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_gdbus_gdbus_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-gdbus/gdbus.h')
expected = [
u'Copyright (c) 2004-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_gdbus_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-gdbus/NOTICE')
expected = [
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_health_hdp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/hdp.c')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos. Authors Santiago Carot Nemesio',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_health_hdp_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/hdp.c')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_health_mcap_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/mcap.c')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_health_mcap_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-health/mcap.h')
expected = [
u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.',
u'Copyright (c) 2010 Signove',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_bluetooth_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/bluetooth.c')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_sdp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/sdp.c')
expected = [
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_uuid_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib/uuid.c')
expected = [
u'Copyright (c) 2011 Nokia Corporation',
u'Copyright (c) 2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_lib_bluetooth_cmtp_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-lib-bluetooth/cmtp.h')
expected = [
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_plugins_builtin_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-plugins/builtin.h')
expected = [
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_plugins_dbusoob_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-plugins/dbusoob.c')
expected = [
u'Copyright (c) 2011 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_main_c_trail_institut(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/main.c')
expected = [
u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_sap_h_trail_institut(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/sap.h')
expected = [
u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT',
u'Copyright (c) 2010 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_sap_dummy_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/sap-dummy.c')
expected = [
u'Copyright (c) 2010 ST-Ericsson SA',
u'Copyright (c) 2011 Tieto Poland',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_server_c_trail_institut(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/server.c')
expected = [
u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT',
u'Copyright (c) 2010 ST-Ericsson SA',
u'Copyright (c) 2011 Tieto Poland',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sap_server_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sap/server.h')
expected = [
u'Copyright (c) 2010 ST-Ericsson SA',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_formats_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/formats.h')
expected = [
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbc_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2005-2008 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbc_h(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc.h')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbc_primitives_iwmmxt_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc_primitives_iwmmxt.c')
expected = [
u'Copyright (c) 2010 Keith Mok <ek9852@gmail.com>',
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>',
u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbcdec_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbcdec.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'(c) 2004-2010 Marcel Holtmann',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_sbc_sbcdec_c_lead_copy(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbcdec.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2010 Marcel Holtmann',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_sbc_sbctester_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbctester.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 Frederic Dalleau <fdalleau@free.fr>',
u'(c) 2007-2010 Marcel Holtmann',
u'(c) 2007-2008 Frederic Dalleau',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_bluez_sbc_sbctester_c_lead_copy_lead_copy(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbctester.c')
expected = [
u'Copyright (c) 2008-2010 Nokia Corporation',
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 Frederic Dalleau <fdalleau@free.fr>',
u'Copyright (c) 2007-2010 Marcel Holtmann',
u'Copyright (c) 2007-2008 Frederic Dalleau',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_dbus_common_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/dbus-common.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2007 Johan Hedberg <johan.hedberg@nokia.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_error_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/error.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2008 Fabien Chevalier <fabchevalier@free.fr>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_src_sdp_xml_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-src/sdp-xml.c')
expected = [
u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_attest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/attest.c')
expected = [
u'Copyright (c) 2001-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_avtest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/avtest.c')
expected = [
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009-2010 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_gaptest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/gaptest.c')
expected = [
u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_hciemu_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/hciemu.c')
expected = [
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_ipctest_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/ipctest.c')
expected = [
u'Copyright (c) 2006-2010 Nokia Corporation',
u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Lennart Poettering',
u'Copyright (c) 2008 Joao Paulo Rechi Vita',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_test_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-test/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2001-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2003-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2007-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2009 Nokia Corporation',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hciattach_ath3k_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_ath3k.c')
expected = [
u'Copyright (c) 2009-2010 Atheros Communications Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hciattach_qualcomm_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_qualcomm.c')
expected = [
u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2010, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hciattach_ti_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_ti.c')
expected = [
u'Copyright (c) 2007-2008 Texas Instruments, Inc.',
u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_hid2hci_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hid2hci.c')
expected = [
u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2008-2009 Mario Limonciello <mario_limonciello@dell.com>',
u'Copyright (c) 2009-2011 Kay Sievers <kay.sievers@vrfy.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_lexer_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/lexer.c')
expected = [
u'Copyright (c) 2002-2008 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_notice(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/NOTICE')
expected = [
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Jean Tourrilhes <jt@hpl.hp.com>',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2003-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2005-2009 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2006-2007 Nokia Corporation',
u'Copyright (c) 2007-2008 Texas Instruments, Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_sdptool_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/sdptool.c')
expected = [
u'Copyright (c) 2001-2002 Nokia Corporation',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>',
u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>',
u'Copyright (c) 2002-2003 Jean Tourrilhes <jt@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_bluez_tools_ubcsp_c(self):
test_file = self.get_test_loc('ics/bluetooth-bluez-tools/ubcsp.c')
expected = [
u'Copyright (c) 2000-2005 CSR Ltd.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_acinclude_m4(self):
test_file = self.get_test_loc('ics/bluetooth-glib/acinclude.m4')
expected = [
u'Copyright (c) 2001-2002 Free Software Foundation, Inc.',
u'Copyright (c) 1999-2003 Free Software Foundation, Inc.',
u'Copyright (c) 2002 Free Software Foundation, Inc.',
u'Copyright (c) 2002 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 1997-2002 Free Software Foundation, Inc.',
u'Copyright (c) 1997-2002 Free Software Foundation, Inc.',
u'Copyright (c) 1997-2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_copying(self):
test_file = self.get_test_loc('ics/bluetooth-glib/COPYING')
expected = [
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib/glib.h')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gettextize_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib/glib-gettextize.in')
expected = [
u'Copyright (c) 1995-1998, 2000, 2001 Free Software Foundation, Inc.',
u'Copyright (c) 1995-1998, 2000, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_docs_reference_glib_regex_syntax_sgml(self):
test_file = self.get_test_loc('ics/bluetooth-glib-docs-reference-glib/regex-syntax.sgml')
expected = [
u'Copyright (c) 1997-2006 University of Cambridge.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gappinfo_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gappinfo.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gbufferedinputstream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gbufferedinputstream.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Jurg Billeter',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gdatainputstream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gdatainputstream.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Jurg Billeter',
u'Copyright (c) 2009 Codethink Limited',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gdesktopappinfo_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gdesktopappinfo.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Ryan Lortie',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gemblem_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gemblem.c')
expected = [
u'Copyright (c) 2008 Clemens N. Buss <cebuzz@gmail.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gmount_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gmount.c')
expected = [
u'Copyright (c) 2006-2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_gwin32mount_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio/gwin32mount.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2008 Hans Breuer',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_fam_fam_module_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-fam/fam-module.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Sebastian Droge.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_fen_fen_data_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-fen/fen-data.c')
expected = [
u'Copyright (c) 2008 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_fen_gfendirectorymonitor_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-fen/gfendirectorymonitor.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2007 Sebastian Droge.',
u'Copyright (c) 2008 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_diag_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-diag.c')
expected = [
u'Copyright (c) 2005 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_diag_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-diag.h')
expected = [
u'Copyright (c) 2006 John McCutchan <john@johnmccutchan.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_helper_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-helper.c')
expected = [
u'Copyright (c) 2007 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_inotify_inotify_path_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-path.c')
expected = [
u'Copyright (c) 2006 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_buffered_input_stream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/buffered-input-stream.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc. Authors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gio_tests_buffered_input_stream_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/buffered-input-stream.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_desktop_app_info_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/desktop-app-info.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_filter_streams_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/filter-streams.c')
expected = [
u'Copyright (c) 2009 Codethink Limited',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_memory_input_stream_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/memory-input-stream.c')
expected = [
u'Copyright (c) 2007 Imendio AB Authors Tim Janik',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gio_tests_memory_input_stream_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/memory-input-stream.c')
expected = [
u'Copyright (c) 2007 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_tests_simple_async_result_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/simple-async-result.c')
expected = [
u'Copyright (c) 2009 Ryan Lortie',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_win32_gwinhttpfile_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-win32/gwinhttpfile.c')
expected = [
u'Copyright (c) 2006-2007 Red Hat, Inc.',
u'Copyright (c) 2008 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_win32_winhttp_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-win32/winhttp.h')
expected = [
u'Copyright (c) 2007 Francois Gouget',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_test_mime_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/test-mime.c')
expected = [
u'Copyright (c) 2003,2004 Red Hat, Inc.',
u'Copyright (c) 2003,2004 Jonathan Blandford <jrb@alum.mit.edu>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmime_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmime.h')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 Jonathan Blandford <jrb@alum.mit.edu>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimealias_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimealias.c')
expected = [
u'Copyright (c) 2004 Red Hat, Inc.',
u'Copyright (c) 2004 Matthias Clasen <mclasen@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimealias_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimealias.h')
expected = [
u'Copyright (c) 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimecache_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimecache.c')
expected = [
u'Copyright (c) 2005 Matthias Clasen <mclasen@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimeicon_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimeicon.c')
expected = [
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gio_xdgmime_xdgmimemagic_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimemagic.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 Jonathan Blandford <jrb@alum.mit.edu>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gatomic_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gatomic.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 2003 Sebastian Wilhelmi',
u'Copyright (c) 2007 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gatomic_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gatomic.h')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 2003 Sebastian Wilhelmi',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gbase64_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbase64.h')
expected = [
u'Copyright (c) 2005 Alexander Larsson <alexl@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gbookmarkfile_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbookmarkfile.h')
expected = [
u'Copyright (c) 2005-2006 Emmanuele Bassi',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gbsearcharray_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbsearcharray.h')
expected = [
u'Copyright (c) 2000-2003 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gchecksum_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gchecksum.h')
expected = [
u'Copyright (c) 2007 Emmanuele Bassi <ebassi@gnome.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gconvert_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gconvert.c')
expected = [
u'Copyright Red Hat Inc., 2000',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdataset_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdataset.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 1998 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdatasetprivate_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdatasetprivate.h')
expected = [
u'Copyright (c) 2005 Red Hat',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdir_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdir.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 2001 Hans Breuer',
u'Copyright 2004 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gdir_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdir.h')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 2001 Hans Breuer',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gerror_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gerror.h')
expected = [
u'Copyright 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gfileutils_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gfileutils.c')
expected = [
u'Copyright 2000 Red Hat, Inc.',
u'Copyright (c) 1991,92,93,94,95,96,97,98,99 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gi18n_lib_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gi18n-lib.h')
expected = [
u'Copyright (c) 1995-1997, 2002 Peter Mattis, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_giochannel_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/giochannel.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998 Owen Taylor',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gkeyfile_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gkeyfile.c')
expected = [
u'Copyright 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gkeyfile_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gkeyfile.h')
expected = [
u'Copyright 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_glib_object_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/glib-object.h')
expected = [
u'Copyright (c) 1998, 1999, 2000 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gmain_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gmain.h')
expected = [
u'Copyright (c) 1998-2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gmappedfile_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gmappedfile.h')
expected = [
u'Copyright 2005 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_goption_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/goption.c')
expected = [
u'Copyright (c) 1999, 2003 Red Hat Software',
u'Copyright (c) 2004 Anders Carlsson <andersca@gnome.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_goption_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/goption.h')
expected = [
u'Copyright (c) 2004 Anders Carlsson <andersca@gnome.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gpattern_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gpattern.c')
expected = [
u'Copyright (c) 1995-1997, 1999 Peter Mattis, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gpoll_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gpoll.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998 Owen Taylor',
u'Copyright 2008 Red Hat, Inc.',
u'Copyright (c) 1994, 1996, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gqsort_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gqsort.c')
expected = [
u'Copyright (c) 1991, 1992, 1996, 1997,1999,2004 Free Software Foundation, Inc.',
u'Copyright (c) 2000 Eazel, Inc.',
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gregex_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gregex.h')
expected = [
u'Copyright (c) 1999, 2000 Scott Wimer',
u'Copyright (c) 2004, Matthias Clasen <mclasen@redhat.com>',
u'Copyright (c) 2005 - 2007, Marco Barisione <marco@barisione.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gsequence_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gsequence.h')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007 Soeren Sandmann (sandmann@daimi.au.dk)',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gslice_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gslice.c')
expected = [
u'Copyright (c) 2005 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gstdio_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstdio.c')
expected = [
u'Copyright 2004 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gstrfuncs_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstrfuncs.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 1991,92,94,95,96,97,98,99,2000,01,02 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gstring_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstring.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gtestutils_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gtestutils.c')
expected = [
u'Copyright (c) 2007 Imendio AB Authors Tim Janik, Sven Herzberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_glib_gtestutils_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gtestutils.c')
expected = [
u'Copyright (c) 2007 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gthread_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gthread.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998 Sebastian Wilhelmi University of Karlsruhe Owen Taylor',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gthreadprivate_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gthreadprivate.h')
expected = [
u'Copyright (c) 2003 Sebastian Wilhelmi',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gunicode_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunicode.h')
expected = [
u'Copyright (c) 1999, 2000 Tom Tromey',
u'Copyright 2000, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gunicodeprivate_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunicodeprivate.h')
expected = [
u'Copyright (c) 2003 Noah Levitt',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gunidecomp_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunidecomp.c')
expected = [
u'Copyright (c) 1999, 2000 Tom Tromey',
u'Copyright 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_guniprop_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/guniprop.c')
expected = [
u'Copyright (c) 1999 Tom Tromey',
u'Copyright (c) 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gutils_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib/gutils.c')
expected = [
u'Copyright (c) 1995-1998 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 2007 Red Hat Inc.',
u'Copyright (c) 1995, 1996, 1997, 1998 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_asnprintf_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/asnprintf.c')
expected = [
u'Copyright (c) 1999, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_g_gnulib_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/g-gnulib.h')
expected = [
u'Copyright (c) 2003 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_printf_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf.c')
expected = [
u'Copyright (c) 2003 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_printf_args_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf-args.c')
expected = [
u'Copyright (c) 1999, 2002-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_printf_parse_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf-parse.c')
expected = [
u'Copyright (c) 1999-2000, 2002-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_glib_gnulib_vasnprintf_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/vasnprintf.h')
expected = [
u'Copyright (c) 2002-2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule.c')
expected = [
u'Copyright (c) 1998 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_rc_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule.rc.in')
expected = [
u'Copyright (c) 1998-2000 Tim Janik.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_ar_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-ar.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_beos_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-beos.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (c) 1999 Richard',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gmodule_gmodule_beos_c_trail_name(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-beos.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (C) 1999 Richard Offer and Shawn T. Amundson (amundson@gtk.org)',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_dyld_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-dyld.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (c) 2001 Dan Winship',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gmodule_gmodule_win32_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-win32.c')
expected = [
u'Copyright (c) 1998, 2000 Tim Janik',
u'Copyright (c) 1998 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gboxed_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gboxed.c')
expected = [
u'Copyright (c) 2000-2001 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gclosure_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gclosure.c')
expected = [
u'Copyright (c) 2000-2001 Red Hat, Inc.',
u'Copyright (c) 2005 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_genums_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/genums.c')
expected = [
u'Copyright (c) 1998-1999, 2000-2001 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gobject_rc_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gobject.rc.in')
expected = [
u'Copyright (c) 1998-2004 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gparam_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gparam.c')
expected = [
u'Copyright (c) 1997-1999, 2000-2001 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gsourceclosure_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gsourceclosure.c')
expected = [
u'Copyright (c) 2001 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_gtypemodule_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gtypemodule.c')
expected = [
u'Copyright (c) 2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_makefile_am(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject/Makefile.am')
expected = [
u'Copyright (c) 1997,98,99,2000 Tim Janik and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gobject_tests_threadtests_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject-tests/threadtests.c')
expected = [
u'Copyright (c) 2008 Imendio AB Authors Tim Janik',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gobject_tests_threadtests_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gobject-tests/threadtests.c')
expected = [
u'Copyright (c) 2008 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gthread_gthread_rc_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread.rc.in')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball',
u'Copyright (c) 1998 Sebastian Wilhelmi.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_gthread_gthread_rc_in_trail_name(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread.rc.in')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald.',
u'Copyright (c) 1998 Sebastian Wilhelmi.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_gthread_gthread_win32_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread-win32.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright 1998-2001 Sebastian Wilhelmi University of Karlsruhe',
u'Copyright 2001 Hans Breuer',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_m4macros_glib_gettext_m4(self):
test_file = self.get_test_loc('ics/bluetooth-glib-m4macros/glib-gettext.m4')
expected = [
u'Copyright (c) 1995-2002 Free Software Foundation, Inc.',
u'Copyright (c) 2001-2003,2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_po_makefile_in_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-po/Makefile.in.in')
expected = [
u'Copyright (c) 1995, 1996, 1997 by Ulrich Drepper <drepper@gnu.ai.mit.edu>',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_po_po2tbl_sed_in(self):
test_file = self.get_test_loc('ics/bluetooth-glib-po/po2tbl.sed.in')
expected = [
u'Copyright (c) 1995 Free Software Foundation, Inc. Ulrich Drepper <drepper@gnu.ai.mit.edu>, 1995.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gio_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/gio-test.c')
expected = [
u'Copyright (c) 2000 Tor Lillqvist',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_hash_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/hash-test.c')
expected = [
u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald',
u'Copyright (c) 1999 The Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_mapping_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/mapping-test.c')
expected = [
u'Copyright (c) 2005 Matthias Clasen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_markup_collect_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/markup-collect.c')
expected = [
u'Copyright (c) 2007 Ryan Lortie',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_onceinit_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/onceinit.c')
expected = [
u'Copyright (c) 2007 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_patterntest_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/patterntest.c')
expected = [
u'Copyright (c) 2001 Matthias Clasen <matthiasc@poet.de>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_regex_test_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/regex-test.c')
expected = [
u'Copyright (c) 2005 - 2006, Marco Barisione <marco@barisione.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_scannerapi_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/scannerapi.c')
expected = [
u'Copyright (c) 2007 Patrick Hulin',
u'Copyright (c) 2007 Imendio AB Authors Tim Janik',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_bluetooth_glib_tests_scannerapi_c_extra_author(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/scannerapi.c')
expected = [
u'Copyright (c) 2007 Patrick Hulin',
u'Copyright (c) 2007 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_slice_concurrent_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/slice-concurrent.c')
expected = [
u'Copyright (c) 2006 Stefan Westerfeld',
u'Copyright (c) 2007 Tim Janik',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_testingbase64_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests/testingbase64.c')
expected = [
u'Copyright (c) 2008 Asbjoern Pettersen',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_accumulator_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/accumulator.c')
expected = [
u'Copyright (c) 2001, 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_deftype_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/deftype.c')
expected = [
u'Copyright (c) 2006 Behdad Esfahbod',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_override_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/override.c')
expected = [
u'Copyright (c) 2001, James Henstridge',
u'Copyright (c) 2003, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_references_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/references.c')
expected = [
u'Copyright (c) 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_singleton_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/singleton.c')
expected = [
u'Copyright (c) 2006 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_gobject_testcommon_h(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/testcommon.h')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_bluetooth_glib_tests_refcount_closures_c(self):
test_file = self.get_test_loc('ics/bluetooth-glib-tests-refcount/closures.c')
expected = [
u'Copyright (c) 2005 Imendio AB',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_readme(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump/README')
expected = [
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_att_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/att.c')
expected = [
u'Copyright (c) 2011 Andre Dieb Martins <andre.dieb@gmail.com>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_bnep_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/bnep.c')
expected = [
u'Copyright (c) 2002-2003 Takashi Sasai <sasai@sm.sony.co.jp>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_cmtp_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/cmtp.c')
expected = [
u'Copyright (c) 2002-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_hci_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/hci.c')
expected = [
u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_hidp_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/hidp.c')
expected = [
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_rfcomm_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/rfcomm.c')
expected = [
u'Copyright (c) 2001-2002 Wayne Lee <waynelee@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bluetooth_hcidump_parser_sdp_c(self):
test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/sdp.c')
expected = [
u'Copyright (c) 2001-2002 Ricky Yuen <ryuen@qualcomm.com>',
u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_bouncycastle_notice(self):
test_file = self.get_test_loc('ics/bouncycastle/NOTICE')
expected = [
u'Copyright (c) 2000-2010 The Legion Of The Bouncy Castle',
]
check_detection(expected, test_file)
def test_ics_bouncycastle_src_main_java_org_bouncycastle_crypto_digests_openssldigest_java(self):
test_file = self.get_test_loc('ics/bouncycastle-src-main-java-org-bouncycastle-crypto-digests/OpenSSLDigest.java')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_bsdiff_bsdiff_1(self):
test_file = self.get_test_loc('ics/bsdiff/bsdiff.1')
expected = [
u'Copyright 2003-2005 Colin Percival',
]
check_detection(expected, test_file)
def test_ics_bsdiff_bsdiff_c(self):
test_file = self.get_test_loc('ics/bsdiff/bsdiff.c')
expected = [
u'Copyright 2003-2005 Colin Percival',
]
check_detection(expected, test_file)
def test_ics_bzip2_blocksort_c(self):
test_file = self.get_test_loc('ics/bzip2/blocksort.c')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
]
check_detection(expected, test_file)
def test_ics_bzip2_bzip2_c(self):
test_file = self.get_test_loc('ics/bzip2/bzip2.c')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
u'Copyright (c) 1996-2010 by Julian Seward.',
]
check_detection(expected, test_file)
def test_ics_bzip2_license(self):
test_file = self.get_test_loc('ics/bzip2/LICENSE')
expected = [
u'copyright (c) 1996-2010 Julian R Seward.',
]
check_detection(expected, test_file)
def test_ics_bzip2_makefile(self):
test_file = self.get_test_loc('ics/bzip2/Makefile')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
]
check_detection(expected, test_file)
def test_ics_bzip2_manual_html(self):
test_file = self.get_test_loc('ics/bzip2/manual.html')
expected = [
u'Copyright (c) 1996-2010 Julian Seward',
u'copyright (c) 1996-2010 Julian Seward.',
]
check_detection(expected, test_file)
def test_ics_bzip2_xmlproc_sh(self):
test_file = self.get_test_loc('ics/bzip2/xmlproc.sh')
expected = [
u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>',
]
check_detection(expected, test_file)
def test_ics_chromium_license(self):
test_file = self.get_test_loc('ics/chromium/LICENSE')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_android_execinfo_cc(self):
test_file = self.get_test_loc('ics/chromium-android/execinfo.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_android_prefix_h(self):
test_file = self.get_test_loc('ics/chromium-android/prefix.h')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_chromium_android_jni_jni_utils_cc(self):
test_file = self.get_test_loc('ics/chromium-android-jni/jni_utils.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_android_ui_base_l10n_l10n_util_cc(self):
test_file = self.get_test_loc('ics/chromium-android-ui-base-l10n/l10n_util.cc')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_chromium_app_sql_init_status_h(self):
test_file = self.get_test_loc('ics/chromium-app-sql/init_status.h')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_atomicops_internals_x86_gcc_cc(self):
test_file = self.get_test_loc('ics/chromium-base/atomicops_internals_x86_gcc.cc')
expected = [
u'Copyright (c) 2006-2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_atomicops_internals_x86_gcc_h(self):
test_file = self.get_test_loc('ics/chromium-base/atomicops_internals_x86_gcc.h')
expected = [
u'Copyright (c) 2006-2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_base_gyp(self):
test_file = self.get_test_loc('ics/chromium-base/base.gyp')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_compat_execinfo_h(self):
test_file = self.get_test_loc('ics/chromium-base/compat_execinfo.h')
expected = [
u'Copyright (c) 2006-2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_file_version_info_h(self):
test_file = self.get_test_loc('ics/chromium-base/file_version_info.h')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_file_version_info_mac_mm(self):
test_file = self.get_test_loc('ics/chromium-base/file_version_info_mac.mm')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_foundation_utils_mac_h(self):
test_file = self.get_test_loc('ics/chromium-base/foundation_utils_mac.h')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_md5_cc(self):
test_file = self.get_test_loc('ics/chromium-base/md5.cc')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_string_tokenizer_h(self):
test_file = self.get_test_loc('ics/chromium-base/string_tokenizer.h')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_allocator_allocator_gyp(self):
test_file = self.get_test_loc('ics/chromium-base-allocator/allocator.gyp')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_i18n_icu_string_conversions_cc(self):
test_file = self.get_test_loc('ics/chromium-base-i18n/icu_string_conversions.cc')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 1995-2006 International Business Machines Corporation and others',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_dtoa_cc(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/dtoa.cc')
expected = [
u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_g_fmt_cc(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/g_fmt.cc')
expected = [
u'Copyright (c) 1991, 1996 by Lucent Technologies.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_license(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/LICENSE')
expected = [
u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dmg_fp_thirdpartyproject_prop(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/ThirdPartyProject.prop')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dynamic_annotations_dynamic_annotations_c(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dynamic_annotations/dynamic_annotations.c')
expected = [
u'Copyright (c) 2008-2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_dynamic_annotations_dynamic_annotations_gyp(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-dynamic_annotations/dynamic_annotations.gyp')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_icu_icu_utf_cc_trail_other(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.cc')
expected = [
u'Copyright (c) 1999-2006, International Business Machines Corporation and others.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_icu_icu_utf_h(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.h')
expected = [
u'Copyright (c) 1999-2004, International Business Machines Corporation and others.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_icu_license_trail_other(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-icu/LICENSE')
expected = [
u'Copyright (c) 1995-2009 International Business Machines Corporation and others',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_nspr_license(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/LICENSE')
expected = [
u'Copyright (c) 1998-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_nspr_prcpucfg_h(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/prcpucfg.h')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_base_third_party_nspr_prtime_cc(self):
test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/prtime.cc')
expected = [
u'Copyright (c) 2011 Google Inc',
u'Copyright (c) 1998-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_branding_value_sh(self):
test_file = self.get_test_loc('ics/chromium-build/branding_value.sh')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_install_build_deps_sh(self):
test_file = self.get_test_loc('ics/chromium-build/install-build-deps.sh')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.',
u'Copyright 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_whitespace_file_txt(self):
test_file = self.get_test_loc('ics/chromium-build/whitespace_file.txt')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_build_mac_strip_from_xcode(self):
test_file = self.get_test_loc('ics/chromium-build-mac/strip_from_xcode')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_nacl_loader_sb(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser/nacl_loader.sb')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_chromeos_panels_panel_scroller_container_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-chromeos-panels/panel_scroller_container.cc')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_cocoa_authorization_util_mm(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-cocoa/authorization_util.mm')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_download_download_extensions_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-download/download_extensions.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 1998-1999 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_posix_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock_posix.cc')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_win_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock_win.cc')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_mork_reader_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/mork_reader.cc')
expected = [
u'Copyright (c) 2006 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_nss_decryptor_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_nss_decryptor_mac_h(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor_mac.h')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_importer_nss_decryptor_win_h(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor_win.h')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_metrics_system_metrics_proto(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-metrics/system_metrics.proto')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_renderer_host_render_widget_host_view_mac_mm(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-renderer_host/render_widget_host_view_mac.mm')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2005, 2006, 2007, 2008, 2009 Apple Inc.',
u'(c) 2006, 2007 Graham Dennis (graham.dennis@gmail.com)',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_about_credits_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/about_credits.html')
expected = [
u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.',
u'Copyright (c) 2008-2009, Google Inc.',
u'Copyright (c) 1998-2000 the Initial Developer.',
u'Copyright (c) 1994-2000 the Initial Developer.',
u'(c) Copyright IBM Corporation. 2006, 2006.',
u'Copyright (c) 2006, Google Inc.',
u'Copyright (c) 2000-2008 Julian Seward.',
u'Copyright (c) 2007 Red Hat, inc',
u'Copyright 2003-2005 Colin Percival',
u'Copyright (c) 2000 the Initial Developer.',
u'Copyright 1993 by OpenVision Technologies, Inc.',
u'Copyright 2007 Google Inc.',
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Alexander Kellett, Alexey Proskuryakov, Alex Mathews, Allan Sandfeld Jensen, Alp Toker, Anders Carlsson, Andrew Wellington, Antti',
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 2002-2010 The ANGLE Project',
u'Copyright (c) 2009 Apple Inc.',
u'Portions Copyright (c) 1999-2007 Apple Inc.',
u'copyright (c) 1996-2010 Julian R Seward.',
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 1998-1999 Netscape Communications Corporation.',
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd and Clark Cooper',
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers.',
u'Copyright (c) 2008 The Khronos Group Inc.',
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 by Remco Treffkorn, and others',
u'Copyright (c) 2005 by Eric S. Raymond.',
u'Copyright (c) 2007, 2010 Linux Foundation',
u'Copyright (c) 2006 IBM Corporation',
u'Copyright (c) 2000, 2006 Sun Microsystems, Inc.',
u'copyright (c) 1991-1998, Thomas G. Lane.',
u'Copyright (c) 1995-2009 International Business Machines Corporation and others',
u'(c) 1999 TaBE Project.',
u'Copyright (c) 1999 Pai-Hsiang Hsiao.',
u'Copyright (c) 1999 Computer Systems and Communication Lab, Institute of Information Science, Academia Sinica.',
u'Copyright 1996 Chih-Hao Tsai Beckman Institute, University of Illinois',
u'Copyright 2000, 2001, 2002, 2003 Nara Institute of Science and Technology.',
u'Copyright (c) 2002 the Initial Developer.',
u'Copyright (c) 2006-2008 Jason Evans',
u'Copyright (c) International Business Machines Corp., 2002,2007',
u'Copyright 2000-2007 Niels Provos',
u'Copyright 2007-2009 Niels Provos and Nick Mathewson',
u'Copyright (c) 2004 2005, Google Inc.',
u'copyright (c) 1991-1998, Thomas G. Lane.',
u'copyright by the Free Software Foundation',
u'Copyright (c) 1998-2005 Julian Smart, Robert Roebling',
u'Copyright (c) 2004, 2006-2009 Glenn Randers-Pehrson',
u'Copyright (c) 2000-2002 Glenn Randers-Pehrson',
u'Copyright (c) 1998, 1999 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 2001-2006 Cisco Systems, Inc.',
u'Copyright (c) 2010, Google Inc.',
u'Copyright (c) 2010, Google Inc.',
u'Copyright (c) 1998-2003 Daniel Veillard.',
u'Copyright (c) 2001-2002 Daniel Veillard.',
u'Copyright (c) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard.',
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 2005, 2006 Nick Galbreath',
u'Copyright 2008 MolokoCacao',
u'Copyright (c) 2004-2009 Sergey Lyubka',
u'Portions Copyright (c) 2009 Gilbert Wellisch',
u'Copyright (c) 2002 the Initial Developer.',
u'Copyright (c) 1998 the Initial Developer.',
u'Copyright (c) 2004-2009 by Mulle Kybernetik.',
u'Copyright (c) 2008 The Khronos Group Inc.',
u'Copyright (c) 1998-2008 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright 2007 Google Inc.',
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright 2008, Google Inc.',
u'Copyright (c) 2007 Giampaolo Rodola',
u'Copyright 2009, Google Inc.',
u'Copyright (c) 2009 Mozilla Corporation',
u'Copyright (c) 1998-2007 Marti Maria',
u'Copyright (c) 1994-1996 SunSoft, Inc.',
u'Copyright 2009 Google Inc.',
u'Copyright (c) 2006 Bob Ippolito',
u'Copyright 2002-2008 Xiph.org Foundation',
u'Copyright 2002-2008 Jean-Marc Valin',
u'Copyright 2005-2007 Analog Devices Inc.',
u'Copyright 2005-2008 Commonwealth Scientific and Industrial Research Organisation (CSIRO)',
u'Copyright 1993, 2002, 2006 David Rowe',
u'Copyright 2003 EpicGames',
u'Copyright 1992-1994 Jutta Degener, Carsten Bormann',
u'Copyright (c) 1995-1998 The University of Utah and the Regents of the University of California',
u'Copyright (c) 1998-2005 University of Chicago.',
u'Copyright (c) 2005-2006 Arizona Board of Regents (University of Arizona).',
u'Copyright (c) Andrew Tridgell 2004-2005',
u'Copyright (c) Stefan Metzmacher 2006',
u'Copyright (c) 2005, Google Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
u'Copyright (c) 1998-1999 Netscape Communications Corporation.',
u'Copyright (c) 2001-2010 Peter Johnson and other Yasm developers.',
u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler',
u'Copyright (c) 1994-2006 Sun Microsystems Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_gpu_internals_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/gpu_internals.html')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_keyboard_overlay_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/keyboard_overlay.js')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_file_manager_harness_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources-file_manager/harness.html')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_resources_file_manager_css_file_manager_css(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-resources-file_manager-css/file_manager.css')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_sync_engine_change_reorder_buffer_cc(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-sync-engine/change_reorder_buffer.cc')
expected = [
u'Copyright (c) 2006-2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_sync_engine_clear_data_command_h(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-sync-engine/clear_data_command.h')
expected = [
u'Copyright (c) 2006-2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_ui_cocoa_applescript_examples_advanced_tab_manipulation_applescript(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-ui-cocoa-applescript-examples/advanced_tab_manipulation.applescript')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_userfeedback_proto_annotations_proto(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-userfeedback-proto/annotations.proto')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_browser_userfeedback_proto_chrome_proto(self):
test_file = self.get_test_loc('ics/chromium-chrome-browser-userfeedback-proto/chrome.proto')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_api_i18n_cld_background_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-api-i18n-cld/background.html')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_api_notifications_background_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-api-notifications/background.html')
expected = [
u'Copyright 2010 the Chromium Authors',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_java_hellolicenseservlet_java(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-java/HelloLicenseServlet.java')
expected = [
u'Copyright 2010 the Chromium Authors',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php/NOTICE')
expected = [
u'Copyright 2009 Google Inc.',
u'Copyright (c) 2010 John Resig',
u'Copyright (c) 2007 Andy Smith',
u'Copyright (c) 2010, Mewp',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_popuplib_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php/popuplib.js')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_lib_oauth_license_txt(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php-lib-oauth/LICENSE.txt')
expected = [
u'Copyright (c) 2007 Andy Smith',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python/NOTICE')
expected = [
u'Copyright (c) 2007 Leah Culver',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_httplib2_init_py(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-httplib2/__init__.py')
expected = [
u'Copyright 2006, Joe Gregorio contributors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_httplib2_init_py_extra_contributors(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-httplib2/__init__.py')
expected = [
u'Copyright 2006, Joe Gregorio',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_oauth2_init_py(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-oauth2/__init__.py')
expected = [
u'Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_jquery_jquery_1_4_2_min_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-jquery/jquery-1.4.2.min.js')
expected = [
u'Copyright 2010, John Resig',
u'Copyright 2010, The Dojo Foundation',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_jst_jsevalcontext_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-jst/jsevalcontext.js')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_util_sorttable_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-util/sorttable.js')
expected = [
u'Copyright 2006, Dean Edwards',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_gdocs_chrome_ex_oauthsimple_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-gdocs/chrome_ex_oauthsimple.js')
expected = [
u'copyright unitedHeroes.net',
u'Copyright (c) 2009, unitedHeroes.net',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo/NOTICE')
expected = [
u'Copyright (c) 2008 Jacob Seidelin, jseidelin@nihilogic.dk, http://blog.nihilogic.dk',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_imageinfo_binaryajax_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo-imageinfo/binaryajax.js')
expected = [
u'Copyright (c) 2008 Jacob Seidelin, cupboy@gmail.com, http://blog.nihilogic.dk',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_imageinfo_imageinfo_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo-imageinfo/imageinfo.js')
expected = [
u'Copyright (c) 2008 Jacob Seidelin, jseidelin@nihilogic.dk, http://blog.nihilogic.dk',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_oauth_contacts_notice(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-oauth_contacts/NOTICE')
expected = [
u'copyright unitedHeroes.net',
u'Copyright (c) 2009, unitedHeroes.net',
u'Copyright Paul Johnston 2000 - 2002.',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_proxy_configuration_test_jsunittest_js(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-proxy_configuration-test/jsunittest.js')
expected = [
u'(c) 2008 Dr Nic Williams',
]
check_detection(expected, test_file)
def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_wave_background_html(self):
test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-wave/background.html')
expected = [
u'Copyright 2010 Google',
]
check_detection(expected, test_file)
def test_ics_chromium_crypto_third_party_nss_blapi_h(self):
test_file = self.get_test_loc('ics/chromium-crypto-third_party-nss/blapi.h')
expected = [
u'Copyright (c) 1994-2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_crypto_third_party_nss_sha256_h(self):
test_file = self.get_test_loc('ics/chromium-crypto-third_party-nss/sha256.h')
expected = [
u'Copyright (c) 2002 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_license_txt(self):
test_file = self.get_test_loc('ics/chromium-googleurl/LICENSE.txt')
expected = [
u'Copyright 2007, Google Inc.',
u'Copyright (c) 1998 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_basictypes_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/basictypes.h')
expected = [
u'Copyright 2001 - 2003 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_logging_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/logging.cc')
expected = [
u'Copyright 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_logging_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/logging.h')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_base_scoped_ptr_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-base/scoped_ptr.h')
expected = [
u'(c) Copyright Greg Colvin and Beman Dawes 1998, 1999.',
u'Copyright (c) 2001, 2002 Peter Dimov',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_gurl_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/gurl_unittest.cc')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_canon_ip_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_canon_ip.cc')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_common_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_common.h')
expected = [
u'Copyright 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_parse_cc(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_parse.cc')
expected = [
u'Copyright (c) 1998 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_googleurl_src_url_test_utils_h(self):
test_file = self.get_test_loc('ics/chromium-googleurl-src/url_test_utils.h')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_cookie_monster_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/cookie_monster.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2003 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_effective_tld_names_dat(self):
test_file = self.get_test_loc('ics/chromium-net-base/effective_tld_names.dat')
expected = [
u'Copyright (c) 2007 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_ssl_false_start_blacklist_process_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/ssl_false_start_blacklist_process.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_x509_cert_types_mac_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/x509_cert_types_mac_unittest.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'(c) Kasm 2005',
u'(c) 1999 Entrust.net Limited',
u'(c) Kasm 2005',
u"(c) 1999 Entrust.net Limited",
]
check_detection(expected, test_file)
def test_ics_chromium_net_base_x509_certificate_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-net-base/x509_certificate_unittest.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_data_proxy_resolver_perftest_no_ads_pac(self):
test_file = self.get_test_loc('ics/chromium-net-data-proxy_resolver_perftest/no-ads.pac')
expected = [
u'Copyright 1996-2004, John',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_net_data_proxy_resolver_perftest_no_ads_pac_trail_name(self):
test_file = self.get_test_loc('ics/chromium-net-data-proxy_resolver_perftest/no-ads.pac')
expected = [
u'Copyright 1996-2004, John LoVerso.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_disk_cache_sparse_control_cc(self):
test_file = self.get_test_loc('ics/chromium-net-disk_cache/sparse_control.cc')
expected = [
u'Copyright (c) 2009-2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_ftp_ftp_network_layer_cc(self):
test_file = self.get_test_loc('ics/chromium-net-ftp/ftp_network_layer.cc')
expected = [
u'Copyright (c) 2008 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_des_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/des.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2003 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_http_auth_handler_ntlm_portable_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/http_auth_handler_ntlm_portable.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2003 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_http_chunked_decoder_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/http_chunked_decoder.cc')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
u'Copyright (c) 2001 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_http_md4_cc(self):
test_file = self.get_test_loc('ics/chromium-net-http/md4.cc')
expected = [
u'Copyright (c) 2003 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_socket_ssl_client_socket_nss_cc(self):
test_file = self.get_test_loc('ics/chromium-net-socket/ssl_client_socket_nss.cc')
expected = [
u'Copyright (c) 2011 The Chromium Authors.',
u'Copyright (c) 2000 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_third_party_gssapi_gssapi_h(self):
test_file = self.get_test_loc('ics/chromium-net-third_party-gssapi/gssapi.h')
expected = [
u'Copyright 1993 by OpenVision Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_third_party_gssapi_license(self):
test_file = self.get_test_loc('ics/chromium-net-third_party-gssapi/LICENSE')
expected = [
u'Copyright 1993 by OpenVision Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_spdyshark_makefile_am(self):
test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/Makefile.am')
expected = [
u'Copyright 1998 Gerald Combs',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_spdyshark_packet_spdy_c(self):
test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/packet-spdy.c')
expected = [
u'Copyright 2010, Google Inc. Eric Shienbrood <ers@google.com>',
u'Copyright 1998 Gerald Combs',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_spdyshark_plugin_rc_in(self):
test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/plugin.rc.in')
expected = [
u'Copyright (c) 1998 Gerald Combs <gerald@wireshark.org>, Gilbert Ramirez <gram@alumni.rice.edu> and others',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_testserver_chromiumsync_py(self):
test_file = self.get_test_loc('ics/chromium-net-tools-testserver/chromiumsync.py')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_net_tools_tld_cleanup_tld_cleanup_cc(self):
test_file = self.get_test_loc('ics/chromium-net-tools-tld_cleanup/tld_cleanup.cc')
expected = [
u'Copyright (c) 2006-2008 The Chromium Authors.',
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_aclocal_m4(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_compile(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/compile')
expected = [
u'Copyright 1999, 2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_configure(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_copying(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/COPYING')
expected = [
u'Copyright (c) 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_depcomp(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_install(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/INSTALL')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_ltmain_sh(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_missing(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_man_vcdiff_1(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-man/vcdiff.1')
expected = [
u'Copyright (c) 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_addrcache_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/addrcache.cc')
expected = [
u'Copyright 2007 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_addrcache_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/addrcache.cc')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_adler32_c(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/adler32.c')
expected = [
u'Copyright (c) 1995-2004 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash.cc')
expected = [
u'Copyright 2006, 2008 Google Inc. Authors Chandra Chereddi, Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash.cc')
expected = [
u'Copyright 2006, 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_test_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash_test.cc')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_blockhash_test_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash_test.cc')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_codetablewriter_interface_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/codetablewriter_interface.h')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_gflags_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/gflags.cc')
expected = [
u'Copyright (c) 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_mutex_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/mutex.h')
expected = [
u'Copyright (c) 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_rolling_hash_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/rolling_hash.h')
expected = [
u'Copyright 2007, 2008 Google Inc. Authors Jeff Dean, Sanjay Ghemawat, Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_rolling_hash_h_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/rolling_hash.h')
expected = [
u'Copyright 2007, 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_vcdiff_test_sh(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/vcdiff_test.sh')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_vcdiff_test_sh_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/vcdiff_test.sh')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_zconf_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/zconf.h')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_zlib_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/zlib.h')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_google_output_string_h(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-google/output_string.h')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_src_google_output_string_h_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-google/output_string.h')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_gtest_gtest_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-gtest/gtest.cc')
expected = [
u'Copyright 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_src_gtest_gtest_main_cc(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-gtest/gtest_main.cc')
expected = [
u'Copyright 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_sdch_open_vcdiff_vsprojects_vcdiff_test_bat(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-vsprojects/vcdiff_test.bat')
expected = [
u'Copyright 2008 Google Inc. Author Lincoln Smith',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_sdch_open_vcdiff_vsprojects_vcdiff_test_bat_extra_author(self):
test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-vsprojects/vcdiff_test.bat')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_generate_gmock_mutant_py(self):
test_file = self.get_test_loc('ics/chromium-testing/generate_gmock_mutant.py')
expected = [
u'Copyright (c) 2009 The Chromium Authors.',
u'Copyright (c) 2009 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_copying(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock/COPYING')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_include_gmock_gmock_cardinalities_h(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-include-gmock/gmock-cardinalities.h')
expected = [
u'Copyright 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_fuse_gmock_files_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/fuse_gmock_files.py')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_gmock_doctor_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/gmock_doctor.py')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_upload_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/upload.py')
expected = [
u'Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_generator_gmock_gen_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator/gmock_gen.py')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_generator_cpp_ast_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator-cpp/ast.py')
expected = [
u'Copyright 2007 Neal Norwitz',
u'Portions Copyright 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_scripts_generator_cpp_gmock_class_test_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator-cpp/gmock_class_test.py')
expected = [
u'Copyright 2009 Neal Norwitz',
u'Portions Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gmock_test_gmock_test_utils_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gmock-test/gmock_test_utils.py')
expected = [
u'Copyright 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_include_gtest_internal_gtest_linked_ptr_h(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-include-gtest-internal/gtest-linked_ptr.h')
expected = [
u'Copyright 2003 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_include_gtest_internal_gtest_tuple_h(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-include-gtest-internal/gtest-tuple.h')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_samples_sample10_unittest_cc(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-samples/sample10_unittest.cc')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_scripts_gen_gtest_pred_impl_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-scripts/gen_gtest_pred_impl.py')
expected = [
u'Copyright 2006, Google Inc.',
u'Copyright 2006, Google Inc.',
u'Copyright 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_src_gtest_port_cc(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-src/gtest-port.cc')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_catch_exceptions_test_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_catch_exceptions_test.py')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_filter_unittest_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_filter_unittest.py')
expected = [
u'Copyright 2005 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_shuffle_test_py(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_shuffle_test.py')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_testing_gtest_test_gtest_linked_ptr_test_cc(self):
test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest-linked_ptr_test.cc')
expected = [
u'Copyright 2003, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_buffer_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/buffer.c')
expected = [
u'Copyright (c) 2002, 2003 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_config_guess(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_configure(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_devpoll_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/devpoll.c')
expected = [
u'Copyright 2000-2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_epoll_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/epoll.c')
expected = [
u'Copyright 2000-2003 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_epoll_sub_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/epoll_sub.c')
expected = [
u'Copyright 2003 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evbuffer_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evbuffer.c')
expected = [
u'Copyright (c) 2002-2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evdns_3(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evdns.3')
expected = [
u'Copyright (c) 2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evdns_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evdns.h')
expected = [
u'Copyright (c) 2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_3(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event.3')
expected = [
u'Copyright (c) 2000 Artur Grabowski <art@openbsd.org>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event.h')
expected = [
u'Copyright (c) 2000-2007 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_rpcgen_py(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event_rpcgen.py')
expected = [
u'Copyright (c) 2005 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_tagging_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event_tagging.c')
expected = [
u'Copyright (c) 2003, 2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_event_internal_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/event-internal.h')
expected = [
u'Copyright (c) 2000-2004 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evport_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evport.c')
expected = [
u'Copyright (c) 2007 Sun Microsystems.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evsignal_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evsignal.h')
expected = [
u'Copyright 2000-2002 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_evutil_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/evutil.c')
expected = [
u'Copyright (c) 2007 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_http_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/http.c')
expected = [
u'Copyright (c) 2002-2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_http_internal_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/http-internal.h')
expected = [
u'Copyright 2001 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_license(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/LICENSE')
expected = [
u'Copyright 2000-2007 Niels Provos <provos@citi.umich.edu>',
u'Copyright 2007-2009 Niels Provos and Nick Mathewson',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_log_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/log.c')
expected = [
u'Copyright (c) 2005 Nick Mathewson <nickm@freehaven.net>',
u'Copyright (c) 2000 Dug Song <dugsong@monkey.org>',
u'Copyright (c) 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_min_heap_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/min_heap.h')
expected = [
u'Copyright (c) 2006 Maxim Yegorushkin <maxim.yegorushkin@gmail.com>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_missing(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_strlcpy_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent/strlcpy.c')
expected = [
u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_compat_sys_libevent_time_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent-compat-sys/_libevent_time.h')
expected = [
u'Copyright (c) 1982, 1986, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_compat_sys_queue_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent-compat-sys/queue.h')
expected = [
u'Copyright (c) 1991, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libevent_test_regress_dns_c(self):
test_file = self.get_test_loc('ics/chromium-third_party-libevent-test/regress_dns.c')
expected = [
u'Copyright (c) 2003-2006 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_overrides_talk_base_logging_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-overrides-talk-base/logging.h')
expected = [
u'Copyright 2004 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_copying(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source/COPYING')
expected = [
u'Copyright (c) 2004 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_asyncfile_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/asyncfile.cc')
expected = [
u'Copyright 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_asyncfile_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/asyncfile.h')
expected = [
u'Copyright 2004 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_base64_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/base64.cc')
expected = [
u'Copyright (c) 1999, Bob Withers',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_base64_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/base64.h')
expected = [
u'Copyright (c) 1999, Bob Withers',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_basicpacketsocketfactory_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/basicpacketsocketfactory.cc')
expected = [
u'Copyright 2011, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_buffer_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/buffer.h')
expected = [
u'Copyright 2004-2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_event_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/event.cc')
expected = [
u'Copyright 2004 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_fileutils_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/fileutils.cc')
expected = [
u'Copyright 2004 2006, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_httpbase_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/httpbase.cc')
expected = [
u'Copyright 2004 2005, Google Inc.',
u'Copyright 2005 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_macconversion_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/macconversion.cc')
expected = [
u'Copyright 2004 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_macutils_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/macutils.cc')
expected = [
u'Copyright 2007 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_socketstream_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/socketstream.h')
expected = [
u'Copyright 2005 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_base_stringutils_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/stringutils.cc')
expected = [
u'Copyright 2004 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_call_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/call.cc')
expected = [
u'Copyright 2004 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_codec_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/codec.h')
expected = [
u'Copyright 2004 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_mediamonitor_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/mediamonitor.cc')
expected = [
u'Copyright 2005 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_mediamonitor_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/mediamonitor.h')
expected = [
u'Copyright 2005 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_srtpfilter_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/srtpfilter.h')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_v4llookup_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/v4llookup.cc')
expected = [
u'Copyright 2009, Google Inc. Author lexnikitin@google.com',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_v4llookup_cc_extra_author(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/v4llookup.cc')
expected = [
u'Copyright 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_session_phone_videocommon_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/videocommon.h')
expected = [
u'Copyright 2011, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_libjingle_source_talk_third_party_libudev_libudev_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-third_party-libudev/libudev.h')
expected = [
u'Copyright (c) 2008-2010 Kay Sievers <kay.sievers@vrfy.org>',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_modp_b64_license(self):
test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/LICENSE')
expected = [
u'Copyright (c) 2005, 2006 Nick Galbreath',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_modp_b64_modp_b64_cc(self):
test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/modp_b64.cc')
expected = [
u'Copyright (c) 2005, 2006 Nick Galbreath',
]
check_detection(expected, test_file)
def test_ics_chromium_third_party_modp_b64_modp_b64_h(self):
test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/modp_b64.h')
expected = [
u'Copyright (c) 2005, 2006, Nick Galbreath',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_inspector_strings_grd(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/inspector_strings.grd')
expected = [
u'Copyright (c) 2007, 2008 Apple Inc.',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_multipart_response_delegate_h(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/multipart_response_delegate.h')
expected = [
u'Copyright (c) 2006-2009 The Chromium Authors.',
u'Copyright (c) 1998 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_webcursor_gtk_data_h(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/webcursor_gtk_data.h')
expected = [
u'Copyright (c) 2001 Tim Copperfield <timecop@network.email.ne.jp>',
u'Copyright (c) 2007 Christian Dywan <christian@twotoasts.de>',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_webkit_strings_grd(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue/webkit_strings.grd')
expected = [
u'Copyright (c) 2007 Apple Inc.',
u'Copyright (c) 2001 the Initial Developer.',
]
check_detection(expected, test_file)
def test_ics_chromium_webkit_glue_resources_readme_txt(self):
test_file = self.get_test_loc('ics/chromium-webkit-glue-resources/README.txt')
expected = [
u'Copyright (c) 1998 the Initial Developer.',
u'Copyright (c) 2005 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_notice_trail_place(self):
test_file = self.get_test_loc('ics/clang/NOTICE')
expected = [
u'Copyright (c) 2007-2011 University of Illinois at Urbana-Champaign.',
]
check_detection(expected, test_file)
def test_ics_clang_docs_block_abi_apple_txt(self):
test_file = self.get_test_loc('ics/clang-docs/Block-ABI-Apple.txt')
expected = [
u'Copyright 2008-2010 Apple, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_docs_blocklanguagespec_txt(self):
test_file = self.get_test_loc('ics/clang-docs/BlockLanguageSpec.txt')
expected = [
u'Copyright 2008-2009 Apple, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_include_clang_basic_convertutf_h(self):
test_file = self.get_test_loc('ics/clang-include-clang-Basic/ConvertUTF.h')
expected = [
u'Copyright 2001-2004 Unicode, Inc.',
]
check_detection(expected, test_file)
def test_ics_clang_lib_headers_iso646_h(self):
test_file = self.get_test_loc('ics/clang-lib-Headers/iso646.h')
expected = [
u'Copyright (c) 2008 Eli Friedman',
]
check_detection(expected, test_file)
def test_ics_clang_lib_headers_limits_h(self):
test_file = self.get_test_loc('ics/clang-lib-Headers/limits.h')
expected = [
u'Copyright (c) 2009 Chris Lattner',
]
check_detection(expected, test_file)
def test_ics_clang_lib_headers_tgmath_h(self):
test_file = self.get_test_loc('ics/clang-lib-Headers/tgmath.h')
expected = [
u'Copyright (c) 2009 Howard Hinnant',
]
check_detection(expected, test_file)
def test_ics_collada_license_txt(self):
test_file = self.get_test_loc('ics/collada/license.txt')
expected = [
u'Copyright 2006 Sony Computer Entertainment Inc.',
]
check_detection(expected, test_file)
def test_ics_collada_include_dae_h(self):
test_file = self.get_test_loc('ics/collada-include/dae.h')
expected = [
u'Copyright 2006 Sony Computer Entertainment Inc.',
]
check_detection(expected, test_file)
def test_ics_collada_include_dae_daezaeuncompresshandler_h(self):
test_file = self.get_test_loc('ics/collada-include-dae/daeZAEUncompressHandler.h')
expected = [
u'Copyright 2008 Netallied Systems GmbH.',
]
check_detection(expected, test_file)
def test_ics_collada_src_1_4_dom_domasset_cpp(self):
test_file = self.get_test_loc('ics/collada-src-1.4-dom/domAsset.cpp')
expected = [
u'Copyright 2006 Sony Computer Entertainment Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_acinclude_m4(self):
test_file = self.get_test_loc('ics/dbus/acinclude.m4')
expected = [
u'Copyright (c) 2004 Scott James Remnant <scott@netsplit.com>.',
u'(c) 2003, 2004, 2005 Thomas Vander Stichele',
]
check_detection(expected, test_file)
def test_ics_dbus_configure_in(self):
test_file = self.get_test_loc('ics/dbus/configure.in')
expected = [
u'Copyright (c) 2000-2002, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_copying(self):
test_file = self.get_test_loc('ics/dbus/COPYING')
expected = [
u'Copyright (c) 2003-2004 Lawrence E. Rosen.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_activation_c(self):
test_file = self.get_test_loc('ics/dbus-bus/activation.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2004 Imendio HB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_activation_h(self):
test_file = self.get_test_loc('ics/dbus-bus/activation.h')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_activation_exit_codes_h(self):
test_file = self.get_test_loc('ics/dbus-bus/activation-exit-codes.h')
expected = [
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_bus_c(self):
test_file = self.get_test_loc('ics/dbus-bus/bus.c')
expected = [
u'Copyright (c) 2003, 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_config_parser_trivial_c(self):
test_file = self.get_test_loc('ics/dbus-bus/config-parser-trivial.c')
expected = [
u'Copyright (c) 2003, 2004, 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_connection_c(self):
test_file = self.get_test_loc('ics/dbus-bus/connection.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_connection_h(self):
test_file = self.get_test_loc('ics/dbus-bus/connection.h')
expected = [
u'Copyright (c) 2003, 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_dbus_daemon_1_in(self):
test_file = self.get_test_loc('ics/dbus-bus/dbus-daemon.1.in')
expected = [
u'Copyright (c) 2003,2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_desktop_file_c(self):
test_file = self.get_test_loc('ics/dbus-bus/desktop-file.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_dir_watch_inotify_c(self):
test_file = self.get_test_loc('ics/dbus-bus/dir-watch-inotify.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'(c) 2006 Mandriva',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_dispatch_c(self):
test_file = self.get_test_loc('ics/dbus-bus/dispatch.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2004 Imendio HB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_driver_c(self):
test_file = self.get_test_loc('ics/dbus-bus/driver.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_main_c(self):
test_file = self.get_test_loc('ics/dbus-bus/main.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2002, 2003 Red Hat, Inc., CodeFactory AB, and others',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_messagebus_config_in(self):
test_file = self.get_test_loc('ics/dbus-bus/messagebus-config.in')
expected = [
u'Copyright 2009 Yaakov Selkowitz',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_services_c(self):
test_file = self.get_test_loc('ics/dbus-bus/services.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_signals_c(self):
test_file = self.get_test_loc('ics/dbus-bus/signals.c')
expected = [
u'Copyright (c) 2003, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_bus_utils_c(self):
test_file = self.get_test_loc('ics/dbus-bus/utils.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_cmake_bus_dbus_daemon_xml(self):
test_file = self.get_test_loc('ics/dbus-cmake-bus/dbus-daemon.xml')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_cmake_modules_win32macros_cmake(self):
test_file = self.get_test_loc('ics/dbus-cmake-modules/Win32Macros.cmake')
expected = [
u'Copyright (c) 2006-2007, Ralf Habacker',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_address_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-address.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2004,2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_auth_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-auth.h')
expected = [
u'Copyright (c) 2002 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_auth_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-auth-util.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_connection_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-connection.c')
expected = [
u'Copyright (c) 2002-2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_connection_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-connection.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_credentials_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-credentials-util.c')
expected = [
u'Copyright (c) 2007 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_errors_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-errors.c')
expected = [
u'Copyright (c) 2002, 2004 Red Hat Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_errors_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-errors.h')
expected = [
u'Copyright (c) 2002 Red Hat Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_file_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-file.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_file_unix_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-file-unix.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_hash_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-hash.c')
expected = [
u'Copyright (c) 2002 Red Hat, Inc.',
u'Copyright (c) 1991-1993 The Regents of the University of California.',
u'Copyright (c) 1994 Sun Microsystems, Inc.',
u'Copyright (c) 1991-1993 The Regents of the University of California.',
u'Copyright (c) 1994 Sun Microsystems, Inc.',
u'copyrighted by the Regents of the University of California, Sun Microsystems, Inc., Scriptics Corporation'
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_hash_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-hash.h')
expected = [
u'Copyright (c) 2002 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_internals_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-internals.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_internals_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-internals.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_keyring_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-keyring.c')
expected = [
u'Copyright (c) 2003, 2004 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_keyring_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-keyring.h')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_marshal_basic_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-basic.c')
expected = [
u'Copyright (c) 2002 CodeFactory AB',
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_marshal_basic_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-basic.h')
expected = [
u'Copyright (c) 2002 CodeFactory AB',
u'Copyright (c) 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_marshal_recursive_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-recursive-util.c')
expected = [
u'Copyright (c) 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_md5_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-md5.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1999, 2000 Aladdin Enterprises.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_memory_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-memory.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message.h')
expected = [
u'Copyright (c) 2002, 2003, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_factory_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-factory.c')
expected = [
u'Copyright (c) 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_private_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-private.h')
expected = [
u'Copyright (c) 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_message_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-util.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat Inc.',
u'Copyright (c) 2002, 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_misc_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-misc.c')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_nonce_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-nonce.c')
expected = [
u'Copyright (c) 2009 Klaralvdalens Datakonsult AB, a KDAB Group company, info@kdab.net',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_nonce_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-nonce.h')
expected = [
u'Copyright (c) 2009 Klaralvdalens Datakonsult AB, a KDAB Group company, info@kdab.net',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_object_tree_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-object-tree.c')
expected = [
u'Copyright (c) 2003, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_protocol_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-protocol.h')
expected = [
u'Copyright (c) 2002, 2003 CodeFactory AB',
u'Copyright (c) 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_debug_pipe_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-debug-pipe.c')
expected = [
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2003, 2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_socket_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-socket.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_socket_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-socket.h')
expected = [
u'Copyright (c) 2002, 2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-win.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat Inc.',
u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_server_win_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-win.h')
expected = [
u'Copyright (c) 2002 Red Hat Inc.',
u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sha_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sha.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1995 A. M. Kuchling',
u'Copyright (c) 1995, A.M.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_dbus_dbus_dbus_sha_c_trail_name(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sha.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1995 A. M. Kuchling',
u'Copyright (c) 1995 A. M. Kuchling',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sockets_win_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sockets-win.h')
expected = [
u'Copyright (c) 2005 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_spawn_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-spawn.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_spawn_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-spawn-win.c')
expected = [
u'Copyright (c) 2002, 2003, 2004 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_string_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-string.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_string_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-string-util.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_pthread_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-pthread.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_util_unix_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-util-unix.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_util_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-util-win.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2000 Werner Almesberger',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_win_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-win.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
u'Copyright (c) 2006 Peter Kummel <syntheticpp@gmx.net>',
u'Copyright (c) 2006 Christian Ehrlicher <ch.ehrlicher@gmx.de>',
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
u'Copyright 2004 Eric Poech',
u'Copyright 2004 Robert Shearman',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_win_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-win.h')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_sysdeps_wince_glue_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-wince-glue.c')
expected = [
u'Copyright (c) 2002, 2003 Red Hat, Inc.',
u'Copyright (c) 2003 CodeFactory AB',
u'Copyright (c) 2005 Novell, Inc.',
u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>',
u'Copyright (c) 2006 Peter Kummel <syntheticpp@gmx.net>',
u'Copyright (c) 2006 Christian Ehrlicher <ch.ehrlicher@gmx.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_threads_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-threads.c')
expected = [
u'Copyright (c) 2002, 2003, 2006 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_threads_internal_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-threads-internal.h')
expected = [
u'Copyright (c) 2002, 2005 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_transport_protected_h(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-transport-protected.h')
expected = [
u'Copyright (c) 2002, 2004 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_dbus_userdb_util_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/dbus-userdb-util.c')
expected = [
u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_sd_daemon_c(self):
test_file = self.get_test_loc('ics/dbus-dbus/sd-daemon.c')
expected = [
u'Copyright 2010 Lennart Poettering',
]
check_detection(expected, test_file)
def test_ics_dbus_dbus_versioninfo_rc_in(self):
test_file = self.get_test_loc('ics/dbus-dbus/versioninfo.rc.in')
expected = [
u'Copyright (c) 2005 g10 Code GmbH',
u'Copyright (c) 2009 FreeDesktop.org',
]
check_detection(expected, test_file)
def test_ics_dbus_doc_introspect_dtd(self):
test_file = self.get_test_loc('ics/dbus-doc/introspect.dtd')
expected = [
u'(c) 2005-02-02 David A. Wheeler',
]
check_detection(expected, test_file)
def test_ics_dbus_doc_introspect_xsl(self):
test_file = self.get_test_loc('ics/dbus-doc/introspect.xsl')
expected = [
u'Copyright (c) 2005 Lennart Poettering.',
]
check_detection(expected, test_file)
def test_ics_dbus_test_decode_gcov_c(self):
test_file = self.get_test_loc('ics/dbus-test/decode-gcov.c')
expected = [
u'Copyright (c) 2003 Red Hat Inc.',
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_cleanup_sockets_1(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-cleanup-sockets.1')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_cleanup_sockets_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-cleanup-sockets.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2002 Michael Meeks',
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2002 Michael Meeks',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_launch_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-launch.c')
expected = [
u'Copyright (c) 2003, 2006 Red Hat, Inc.',
u'Copyright (c) 2006 Thiago Macieira <thiago@kde.org>',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_launch_win_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-launch-win.c')
expected = [
u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_launch_x11_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-launch-x11.c')
expected = [
u'Copyright (c) 2006 Thiago Macieira <thiago@kde.org>',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_monitor_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-monitor.c')
expected = [
u'Copyright (c) 2003 Philip Blundell <philb@gnu.org>',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_print_message_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-print-message.c')
expected = [
u'Copyright (c) 2003 Philip Blundell <philb@gnu.org>',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_uuidgen_1(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-uuidgen.1')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_uuidgen_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-uuidgen.c')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_dbus_viewer_c(self):
test_file = self.get_test_loc('ics/dbus-tools/dbus-viewer.c')
expected = [
u'Copyright (c) 2003 Red Hat, Inc.',
u'Copyright (c) 2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_dbus_tools_strtoll_c(self):
test_file = self.get_test_loc('ics/dbus-tools/strtoll.c')
expected = [
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_arp_c(self):
test_file = self.get_test_loc('ics/dhcpcd/arp.c')
expected = [
u'Copyright (c) 2006-2008 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_bind_c(self):
test_file = self.get_test_loc('ics/dhcpcd/bind.c')
expected = [
u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_bpf_filter_h(self):
test_file = self.get_test_loc('ics/dhcpcd/bpf-filter.h')
expected = [
u'Copyright (c) 2006-2008 Roy Marples <roy@marples.name>',
u'Copyright (c) 2004,2007 by Internet Systems Consortium, Inc.',
u'Copyright (c) 1996-2003 by Internet Software Consortium',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_client_c(self):
test_file = self.get_test_loc('ics/dhcpcd/client.c')
expected = [
u'Copyright 2006-2008 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_common_c(self):
test_file = self.get_test_loc('ics/dhcpcd/common.c')
expected = [
u'Copyright (c) 2006-2009 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_dhcpcd_8(self):
test_file = self.get_test_loc('ics/dhcpcd/dhcpcd.8')
expected = [
u'Copyright (c) 2006-2010 Roy Marples',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_dhcpcd_c(self):
test_file = self.get_test_loc('ics/dhcpcd/dhcpcd.c')
expected = [
u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>',
u'Copyright (c) 2006-2010 Roy Marples',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_ifaddrs_c(self):
test_file = self.get_test_loc('ics/dhcpcd/ifaddrs.c')
expected = [
u'Copyright 2011, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_if_linux_wireless_c(self):
test_file = self.get_test_loc('ics/dhcpcd/if-linux-wireless.c')
expected = [
u'Copyright (c) 2009-2010 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_notice(self):
test_file = self.get_test_loc('ics/dhcpcd/NOTICE')
expected = [
u'Copyright 2006-2008 Roy Marples <roy@marples.name>',
u'Copyright (c) 2004,2007 by Internet Systems Consortium, Inc.',
u'Copyright (c) 1996-2003 by Internet Software Consortium',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_readme(self):
test_file = self.get_test_loc('ics/dhcpcd/README')
expected = [
u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_compat_arc4random_c(self):
test_file = self.get_test_loc('ics/dhcpcd-compat/arc4random.c')
expected = [
u'Copyright 1996 David Mazieres <dm@lcs.mit.edu>.',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_compat_linkaddr_c(self):
test_file = self.get_test_loc('ics/dhcpcd-compat/linkaddr.c')
expected = [
u'Copyright (c) 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_mk_cc_mk(self):
test_file = self.get_test_loc('ics/dhcpcd-mk/cc.mk')
expected = [
u'Copyright 2008 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dhcpcd_mk_dist_mk(self):
test_file = self.get_test_loc('ics/dhcpcd-mk/dist.mk')
expected = [
u'Copyright 2008-2009 Roy Marples <roy@marples.name>',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_copying_v3(self):
test_file = self.get_test_loc('ics/dnsmasq/COPYING-v3')
expected = [
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_makefile(self):
test_file = self.get_test_loc('ics/dnsmasq/Makefile')
expected = [
u'Copyright (c) 2000-2009 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_suse_dnsmasq_suse_spec(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-Suse/dnsmasq-suse.spec')
expected = [
u'Copyright GPL Group',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_wrt_dhcp_lease_time_c(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/dhcp_lease_time.c')
expected = [
u'Copyright (c) 2007 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_wrt_dhcp_release_c(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/dhcp_release.c')
expected = [
u'Copyright (c) 2006 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_contrib_wrt_lease_update_sh(self):
test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/lease_update.sh')
expected = [
u'Copyright (c) 2006 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_src_bpf_c(self):
test_file = self.get_test_loc('ics/dnsmasq-src/bpf.c')
expected = [
u'Copyright (c) 2000-2009 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_src_dnsmasq_h(self):
test_file = self.get_test_loc('ics/dnsmasq-src/dnsmasq.h')
expected = [
u'Copyright (c) 2000-2009 Simon Kelley',
u'Copyright (c) 2000-2009 Simon Kelley',
]
check_detection(expected, test_file)
def test_ics_dnsmasq_src_nameser_h(self):
test_file = self.get_test_loc('ics/dnsmasq-src/nameser.h')
expected = [
u'Copyright (c) 1983, 1989, 1993 The Regents of the University of California.',
u'Portions Copyright (c) 1993 by Digital Equipment Corporation.',
u'Portions Copyright (c) 1995 by International Business Machines, Inc.',
]
check_detection(expected, test_file)
def test_ics_doclava_notice(self):
test_file = self.get_test_loc('ics/doclava/NOTICE')
expected = [
u'Copyright (c) 2010 Google Inc.',
u'Copyright (c) 2008 John Resig (jquery.com)',
u'Copyright (c) 2009 John Resig, http://jquery.com',
]
check_detection(expected, test_file)
def test_ics_doclava_res_assets_templates_assets_jquery_history_js(self):
test_file = self.get_test_loc('ics/doclava-res-assets-templates-assets/jquery-history.js')
expected = [
u'Copyright (c) 2008 Tom Rodenberg',
]
check_detection(expected, test_file)
def test_ics_doclava_res_assets_templates_assets_jquery_resizable_min_js(self):
test_file = self.get_test_loc('ics/doclava-res-assets-templates-assets/jquery-resizable.min.js')
expected = [
u'Copyright (c) 2009 John Resig',
u'Copyright 2009, The Dojo Foundation',
u'Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)',
u'Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)',
]
check_detection(expected, test_file)
def test_ics_doclava_src_com_google_doclava_annotationinstanceinfo_java(self):
test_file = self.get_test_loc('ics/doclava-src-com-google-doclava/AnnotationInstanceInfo.java')
expected = [
u'Copyright (c) 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_doclava_src_com_google_doclava_doclava2_java(self):
test_file = self.get_test_loc('ics/doclava-src-com-google-doclava/Doclava2.java')
expected = [
u'Copyright (c) 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_doclava_src_com_google_doclava_parser_java_g(self):
test_file = self.get_test_loc('ics/doclava-src-com-google-doclava-parser/Java.g')
expected = [
u'Copyright (c) 2007-2008 Terence Parr',
]
check_detection(expected, test_file)
def test_ics_dropbear_agentfwd_h(self):
test_file = self.get_test_loc('ics/dropbear/agentfwd.h')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_atomicio_c(self):
test_file = self.get_test_loc('ics/dropbear/atomicio.c')
expected = [
u'Copyright (c) 1995,1999 Theo de Raadt.',
]
check_detection(expected, test_file)
def test_ics_dropbear_circbuffer_c(self):
test_file = self.get_test_loc('ics/dropbear/circbuffer.c')
expected = [
u'Copyright (c) 2002-2004 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_cli_algo_c(self):
test_file = self.get_test_loc('ics/dropbear/cli-algo.c')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
u'Copyright (c) 2004 by Mihnea Stoenescu',
]
check_detection(expected, test_file)
def test_ics_dropbear_cli_authinteract_c(self):
test_file = self.get_test_loc('ics/dropbear/cli-authinteract.c')
expected = [
u'Copyright (c) 2005 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_cli_kex_c(self):
test_file = self.get_test_loc('ics/dropbear/cli-kex.c')
expected = [
u'Copyright (c) 2002-2004 Matt Johnston',
u'Copyright (c) 2004 by Mihnea Stoenescu',
]
check_detection(expected, test_file)
def test_ics_dropbear_common_kex_c(self):
test_file = self.get_test_loc('ics/dropbear/common-kex.c')
expected = [
u'Copyright (c) 2002-2004 Matt Johnston',
u'Portions Copyright (c) 2004 by Mihnea Stoenescu',
]
check_detection(expected, test_file)
def test_ics_dropbear_compat_c(self):
test_file = self.get_test_loc('ics/dropbear/compat.c')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>',
u'Copyright (c) 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dropbear_configure(self):
test_file = self.get_test_loc('ics/dropbear/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_dropbear_dbutil_c(self):
test_file = self.get_test_loc('ics/dropbear/dbutil.c')
expected = [
u'Copyright (c) 2002,2003 Matt Johnston',
u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>',
]
check_detection(expected, test_file)
def test_ics_dropbear_fake_rfc2553_c(self):
test_file = self.get_test_loc('ics/dropbear/fake-rfc2553.c')
expected = [
u'Copyright (c) 2000-2003 Damien Miller.',
u'Copyright (c) 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_dropbear_install_sh(self):
test_file = self.get_test_loc('ics/dropbear/install-sh')
expected = [
u'Copyright 1991 by the Massachusetts Institute of Technology',
]
check_detection(expected, test_file)
def test_ics_dropbear_keyimport_c_trail_name(self):
test_file = self.get_test_loc('ics/dropbear/keyimport.c')
expected = [
u'copyright 2003 Matt Johnston',
u'copyright 1997-2003 Simon Tatham.',
u'Portions copyright Robert de Bath, Joris van Rantwijk, Delian Delchev, Andreas Schultz, Jeroen Massar, Wez Furlong, Nicolas Barry, Justin Bradford, and CORE SDI S.A.',
]
check_detection(expected, test_file)
def test_ics_dropbear_license_extra_portion_trail_name(self):
test_file = self.get_test_loc('ics/dropbear/LICENSE')
expected = [
u'(c) 2004 Mihnea Stoenescu',
u'Copyright (c) 2002-2006 Matt Johnston',
u'Portions copyright (c) 2004 Mihnea Stoenescu',
u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland',
u'(c) Todd C. Miller',
u'copyright 1997-2003 Simon Tatham.',
u'Portions copyright Robert de Bath, Joris van Rantwijk, Delian Delchev, Andreas Schultz, Jeroen Massar, Wez Furlong, Nicolas Barry, Justin Bradford, and CORE SDI S.A.',
]
check_detection(expected, test_file)
def test_ics_dropbear_loginrec_c_extra_portion_extra_portion(self):
test_file = self.get_test_loc('ics/dropbear/loginrec.c')
expected = [
u'Copyright (c) 2000 Andre Lucas.',
u'Portions copyright (c) 1998 Todd C. Miller',
u'Portions copyright (c) 1996 Jason Downs',
u'Portions copyright (c) 1996 Theo de Raadt',
]
check_detection(expected, test_file)
def test_ics_dropbear_loginrec_h(self):
test_file = self.get_test_loc('ics/dropbear/loginrec.h')
expected = [
u'Copyright (c) 2000 Andre Lucas.',
]
check_detection(expected, test_file)
def test_ics_dropbear_netbsd_getpass_c(self):
test_file = self.get_test_loc('ics/dropbear/netbsd_getpass.c')
expected = [
u'Copyright (c) 1988, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dropbear_progressmeter_c(self):
test_file = self.get_test_loc('ics/dropbear/progressmeter.c')
expected = [
u'Copyright (c) 2003 Nils Nordman.',
]
check_detection(expected, test_file)
def test_ics_dropbear_progressmeter_h(self):
test_file = self.get_test_loc('ics/dropbear/progressmeter.h')
expected = [
u'Copyright (c) 2002 Nils Nordman.',
]
check_detection(expected, test_file)
def test_ics_dropbear_scp_c(self):
test_file = self.get_test_loc('ics/dropbear/scp.c')
expected = [
u'Copyright (c) 1999 Theo de Raadt.',
u'Copyright (c) 1999 Aaron Campbell.',
u'Copyright (c) 1983, 1990, 1992, 1993, 1995 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_dropbear_scpmisc_c(self):
test_file = self.get_test_loc('ics/dropbear/scpmisc.c')
expected = [
u'Copyright (c) 2000 Markus Friedl.',
u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland',
]
check_detection(expected, test_file)
def test_ics_dropbear_scpmisc_h(self):
test_file = self.get_test_loc('ics/dropbear/scpmisc.h')
expected = [
u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland',
]
check_detection(expected, test_file)
def test_ics_dropbear_svr_authpam_c(self):
test_file = self.get_test_loc('ics/dropbear/svr-authpam.c')
expected = [
u'Copyright (c) 2004 Martin Carlsson',
u'Portions (c) 2004 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_svr_main_c(self):
test_file = self.get_test_loc('ics/dropbear/svr-main.c')
expected = [
u'Copyright (c) 2002-2006 Matt Johnston',
]
check_detection(expected, test_file)
def test_ics_dropbear_libtommath_mtest_mpi_c(self):
test_file = self.get_test_loc('ics/dropbear-libtommath-mtest/mpi.c')
expected = [
u'Copyright (c) 1998 Michael J. Fromberger',
]
check_detection(expected, test_file)
def test_ics_dropbear_libtommath_mtest_mpi_h(self):
test_file = self.get_test_loc('ics/dropbear-libtommath-mtest/mpi.h')
expected = [
u'Copyright (c) 1998 Michael J. Fromberger',
]
check_detection(expected, test_file)
def test_ics_easymock_src_org_easymock_abstractmatcher_java_trail_name(self):
test_file = self.get_test_loc('ics/easymock-src-org-easymock/AbstractMatcher.java')
expected = [
u'Copyright 2001-2009 OFFIS, Tammo Freese',
]
check_detection(expected, test_file)
def test_ics_easymock_src_org_easymock_capture_java_trail_name(self):
test_file = self.get_test_loc('ics/easymock-src-org-easymock/Capture.java')
expected = [
u'Copyright 2003-2009 OFFIS, Henri Tremblay',
]
check_detection(expected, test_file)
def test_ics_easymock_src_org_easymock_iargumentmatcher_java_trail_name(self):
test_file = self.get_test_loc('ics/easymock-src-org-easymock/IArgumentMatcher.java')
expected = [
u'Copyright 2001-2006 OFFIS, Tammo Freese',
]
check_detection(expected, test_file)
def test_ics_embunit_inc_assertimpl_h(self):
test_file = self.get_test_loc('ics/embunit-inc/AssertImpl.h')
expected = [
u'Copyright (c) 2003 Embedded Unit Project',
]
check_detection(expected, test_file)
def test_ics_embunit_src_stdimpl_c(self):
test_file = self.get_test_loc('ics/embunit-src/stdImpl.c')
expected = [
u'Copyright (c) 2003 Embedded Unit Project',
]
check_detection(expected, test_file)
def test_ics_emma_android_mk(self):
test_file = self.get_test_loc('ics/emma/Android.mk')
expected = [
u'Copyright 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_emma_build_txt(self):
test_file = self.get_test_loc('ics/emma/BUILD.txt')
expected = [
u'Copyright (c) 2003-2004 Vlad Roubtsov.',
]
check_detection(expected, test_file)
def test_ics_emma_test_sh(self):
test_file = self.get_test_loc('ics/emma/test.sh')
expected = [
u'Copyright 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_emma_ant_ant14_com_vladium_emma_antmain_java(self):
test_file = self.get_test_loc('ics/emma-ant-ant14-com-vladium-emma/ANTMain.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2004',
]
check_detection(expected, test_file)
def test_ics_emma_ant_ant14_com_vladium_emma_emmajavatask_java(self):
test_file = self.get_test_loc('ics/emma-ant-ant14-com-vladium-emma/emmajavaTask.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2003',
]
check_detection(expected, test_file)
def test_ics_emma_core_data_manifest_mf_extra_implementation(self):
test_file = self.get_test_loc('ics/emma-core-data/MANIFEST.MF')
expected = [
u'(c) Vladimir Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_iappconstants_java_extra_string(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma/IAppConstants.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2003',
u'(c) Vladimir Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_processor_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma/Processor.java')
expected = [
u'Copyright (c) 2004 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2004',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_data_imetadataconstants_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma-data/IMetadataConstants.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_emma_report_lcov_reportgenerator_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma-report-lcov/ReportGenerator.java')
expected = [
u'Copyright 2009 Google Inc.',
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2003',
u'Tim Baverstock, (c) 2009',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_jcd_cls_abstractclassdefvisitor_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-jcd-cls/AbstractClassDefVisitor.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'(c) 2001, Vlad Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_jcd_cls_constantcollection_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-jcd-cls/ConstantCollection.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'(c) 2001, Vladimir Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_logging_iloglevels_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-logging/ILogLevels.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2001',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_util_softvaluemap_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-util/SoftValueMap.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'(c) 2002, Vlad Roubtsov',
]
check_detection(expected, test_file)
def test_ics_emma_core_java12_com_vladium_util_wcmatcher_java(self):
test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-util/WCMatcher.java')
expected = [
u'Copyright (c) 2003 Vladimir Roubtsov.',
u'Vlad Roubtsov, (c) 2002',
]
check_detection(expected, test_file)
def test_ics_esd_include_audiofile_h(self):
test_file = self.get_test_loc('ics/esd-include/audiofile.h')
expected = [
u'Copyright (c) 1998-2000, Michael Pruett <michael@68k.org>',
]
check_detection(expected, test_file)
def test_ics_expat_configure(self):
test_file = self.get_test_loc('ics/expat/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_expat_configure_in(self):
test_file = self.get_test_loc('ics/expat/configure.in')
expected = [
u'Copyright 2000 Clark Cooper',
]
check_detection(expected, test_file)
def test_ics_expat_notice(self):
test_file = self.get_test_loc('ics/expat/NOTICE')
expected = [
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd and Clark Cooper',
u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers.'
]
check_detection(expected, test_file)
def test_ics_expat_amiga_expat_lib_c_trail_maint(self):
test_file = self.get_test_loc('ics/expat-amiga/expat_lib.c')
expected = [
u'Copyright (c) 2001-2007 Expat maintainers.',
]
check_detection(expected, test_file)
def test_ics_expat_conftools_libtool_m4(self):
test_file = self.get_test_loc('ics/expat-conftools/libtool.m4')
expected = [
u'Copyright 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_expat_conftools_ltmain_sh(self):
test_file = self.get_test_loc('ics/expat-conftools/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_expat_doc_reference_html(self):
test_file = self.get_test_loc('ics/expat-doc/reference.html')
expected = [
u'Copyright 1999,2000 Clark Cooper <coopercc@netheaven.com>',
]
check_detection(expected, test_file)
def test_ics_expat_examples_outline_c(self):
test_file = self.get_test_loc('ics/expat-examples/outline.c')
expected = [
u'Copyright 1999, Clark Cooper',
]
check_detection(expected, test_file)
def test_ics_expat_lib_ascii_h(self):
test_file = self.get_test_loc('ics/expat-lib/ascii.h')
expected = [
u'Copyright (c) 1998, 1999 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_lib_expat_h(self):
test_file = self.get_test_loc('ics/expat-lib/expat.h')
expected = [
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_lib_macconfig_h(self):
test_file = self.get_test_loc('ics/expat-lib/macconfig.h')
expected = [
u'Copyright 2000, Clark Cooper',
]
check_detection(expected, test_file)
def test_ics_expat_lib_makefile_mpw_extra_portion(self):
test_file = self.get_test_loc('ics/expat-lib/Makefile.MPW')
expected = [
u'Copyright (c) 2002 Daryle Walker',
u'Portions Copyright (c) 2002 Thomas Wegner',
]
check_detection(expected, test_file)
def test_ics_expat_lib_xmlparse_c(self):
test_file = self.get_test_loc('ics/expat-lib/xmlparse.c')
expected = [
u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_lib_xmltok_c(self):
test_file = self.get_test_loc('ics/expat-lib/xmltok.c')
expected = [
u'Copyright (c) 1998, 1999 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_tests_chardata_c(self):
test_file = self.get_test_loc('ics/expat-tests/chardata.c')
expected = [
u'Copyright (c) 1998-2003 Thai Open Source Software Center Ltd',
]
check_detection(expected, test_file)
def test_ics_expat_win32_expat_iss(self):
test_file = self.get_test_loc('ics/expat-win32/expat.iss')
expected = [
u'Copyright (c) 1998-2006 Thai Open Source Software Center, Clark Cooper, and the Expat maintainers',
]
check_detection(expected, test_file)
def test_ics_eyes_free_notice(self):
test_file = self.get_test_loc('ics/eyes-free/NOTICE')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_configure(self):
test_file = self.get_test_loc('ics/fdlibm/configure')
expected = [
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_e_acos_c(self):
test_file = self.get_test_loc('ics/fdlibm/e_acos.c')
expected = [
u'Copyright (c) 1993 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_e_exp_c(self):
test_file = self.get_test_loc('ics/fdlibm/e_exp.c')
expected = [
u'Copyright (c) 2004 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_k_tan_c(self):
test_file = self.get_test_loc('ics/fdlibm/k_tan.c')
expected = [
u'Copyright 2004 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_makefile_in(self):
test_file = self.get_test_loc('ics/fdlibm/makefile.in')
expected = [
u'Copyright (c) 1993 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_fdlibm_notice(self):
test_file = self.get_test_loc('ics/fdlibm/NOTICE')
expected = [
u'Copyright (c) 1993 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_flac_notice(self):
test_file = self.get_test_loc('ics/flac/NOTICE')
expected = [
u'Copyright (c) 2000,2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_flac_all_h(self):
test_file = self.get_test_loc('ics/flac-include-FLAC/all.h')
expected = [
u'Copyright (c) 2000,2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_flac_assert_h(self):
test_file = self.get_test_loc('ics/flac-include-FLAC/assert.h')
expected = [
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_flac_callback_h(self):
test_file = self.get_test_loc('ics/flac-include-FLAC/callback.h')
expected = [
u'Copyright (c) 2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_include_share_alloc_h(self):
test_file = self.get_test_loc('ics/flac-include-share/alloc.h')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_makefile_am(self):
test_file = self.get_test_loc('ics/flac-libFLAC/Makefile.am')
expected = [
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_makefile_in(self):
test_file = self.get_test_loc('ics/flac-libFLAC/Makefile.in')
expected = [
u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ogg_decoder_aspect_c(self):
test_file = self.get_test_loc('ics/flac-libFLAC/ogg_decoder_aspect.c')
expected = [
u'Copyright (c) 2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_window_c(self):
test_file = self.get_test_loc('ics/flac-libFLAC/window.c')
expected = [
u'Copyright (c) 2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ia32_bitreader_asm_nasm(self):
test_file = self.get_test_loc('ics/flac-libFLAC-ia32/bitreader_asm.nasm')
expected = [
u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ppc_makefile_am(self):
test_file = self.get_test_loc('ics/flac-libFLAC-ppc/Makefile.am')
expected = [
u'Copyright (c) 2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_flac_libflac_ppc_makefile_in(self):
test_file = self.get_test_loc('ics/flac-libFLAC-ppc/Makefile.in')
expected = [
u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.',
u'Copyright (c) 2004,2005,2006,2007 Josh Coalson',
]
check_detection(expected, test_file)
def test_ics_freetype_notice(self):
test_file = self.get_test_loc('ics/freetype/NOTICE')
expected = [
u'Copyright 1996-2002, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg',
u'copyright (c) The FreeType Project (www.freetype.org).',
u'copyright (c) 1996-2000 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_builds_ft2unix_h(self):
test_file = self.get_test_loc('ics/freetype-builds/ft2unix.h')
expected = [
u'Copyright 1996-2001, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_ft2build_h(self):
test_file = self.get_test_loc('ics/freetype-include/ft2build.h')
expected = [
u'Copyright 1996-2001, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_freetype_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/freetype.h')
expected = [
u'Copyright 1996-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftadvanc_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftadvanc.h')
expected = [
u'Copyright 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftbbox_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftbbox.h')
expected = [
u'Copyright 1996-2001, 2003, 2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftbdf_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftbdf.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftbitmap_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftbitmap.h')
expected = [
u'Copyright 2004, 2005, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftcache_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftcache.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftcid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftcid.h')
expected = [
u'Copyright 2007, 2009 by Dereg Clegg, Michael Toftdal.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fterrdef_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fterrdef.h')
expected = [
u'Copyright 2002, 2004, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fterrors_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fterrors.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftgasp_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftgasp.h')
expected = [
u'Copyright 2007, 2008, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftglyph_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftglyph.h')
expected = [
u'Copyright 1996-2003, 2006, 2008, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftgxval_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftgxval.h')
expected = [
u'Copyright 2004, 2005, 2006 by Masatake YAMATO, Redhat K.K, David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftgzip_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftgzip.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftimage_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftimage.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftincrem_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftincrem.h')
expected = [
u'Copyright 2002, 2003, 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftlcdfil_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftlcdfil.h')
expected = [
u'Copyright 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftlist_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftlist.h')
expected = [
u'Copyright 1996-2001, 2003, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftlzw_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftlzw.h')
expected = [
u'Copyright 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmac_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmac.h')
expected = [
u'Copyright 1996-2001, 2004, 2006, 2007 by Just van Rossum, David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmm_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmm.h')
expected = [
u'Copyright 1996-2001, 2003, 2004, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmodapi_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmodapi.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftmoderr_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftmoderr.h')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftotval_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftotval.h')
expected = [
u'Copyright 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftoutln_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftoutln.h')
expected = [
u'Copyright 1996-2003, 2005-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftpfr_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftpfr.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftrender_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftrender.h')
expected = [
u'Copyright 1996-2001, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftsnames_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftsnames.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftstroke_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftstroke.h')
expected = [
u'Copyright 2002-2006, 2008, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftsynth_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftsynth.h')
expected = [
u'Copyright 2000-2001, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftsystem_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftsystem.h')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fttrigon_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fttrigon.h')
expected = [
u'Copyright 2001, 2003, 2005, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_fttypes_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/fttypes.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftwinfnt_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftwinfnt.h')
expected = [
u'Copyright 2003, 2004, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ftxf86_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ftxf86.h')
expected = [
u'Copyright 2002, 2003, 2004, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_t1tables_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/t1tables.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ttnameid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ttnameid.h')
expected = [
u'Copyright 1996-2002, 2003, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_tttables_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/tttables.h')
expected = [
u'Copyright 1996-2005, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_tttags_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/tttags.h')
expected = [
u'Copyright 1996-2001, 2004, 2005, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_ttunpat_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype/ttunpat.h')
expected = [
u'Copyright 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_config_ftconfig_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-config/ftconfig.h')
expected = [
u'Copyright 1996-2004, 2006-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_config_ftstdlib_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-config/ftstdlib.h')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_autohint_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/autohint.h')
expected = [
u'Copyright 1996-2001, 2002, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftcalc_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftcalc.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftdebug_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftdebug.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftdriver_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftdriver.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftgloadr_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftgloadr.h')
expected = [
u'Copyright 2002, 2003, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftmemory_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftmemory.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftobjs_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftobjs.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftpic_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftpic.h')
expected = [
u'Copyright 2009 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftrfork_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftrfork.h')
expected = [
u'Copyright 2004, 2006, 2007 by Masatake YAMATO and Redhat K.K.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftserv_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftserv.h')
expected = [
u'Copyright 2003, 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftstream_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftstream.h')
expected = [
u'Copyright 1996-2002, 2004-2006, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_fttrace_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/fttrace.h')
expected = [
u'Copyright 2002, 2004-2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_ftvalid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftvalid.h')
expected = [
u'Copyright 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_internal_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/internal.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_pcftypes_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/pcftypes.h')
expected = [
u'Copyright (c) 2000, 2001, 2002 by Francesco Zappa Nardelli',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_pshints_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/pshints.h')
expected = [
u'Copyright 2001, 2002, 2003, 2005, 2006, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_sfnt_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/sfnt.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_tttypes_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal/tttypes.h')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svbdf_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svbdf.h')
expected = [
u'Copyright 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svcid_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svcid.h')
expected = [
u'Copyright 2007, 2009 by Derek Clegg, Michael Toftdal.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svgxval_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svgxval.h')
expected = [
u'Copyright 2004, 2005 by Masatake YAMATO, Red Hat K.K., David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svkern_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svkern.h')
expected = [
u'Copyright 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svmm_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svmm.h')
expected = [
u'Copyright 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svpostnm_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svpostnm.h')
expected = [
u'Copyright 2003, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svpsinfo_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svpsinfo.h')
expected = [
u'Copyright 2003, 2004, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svttcmap_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svttcmap.h')
expected = [
u'Copyright 2003 by Masatake YAMATO, Redhat K.K.',
u'Copyright 2003, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_include_freetype_internal_services_svttglyf_h(self):
test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svttglyf.h')
expected = [
u'Copyright 2007 by David Turner.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afangles_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afangles.c')
expected = [
u'Copyright 2003-2006, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afcjk_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afcjk.c')
expected = [
u'Copyright 2006-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afcjk_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afcjk.h')
expected = [
u'Copyright 2006, 2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afdummy_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afdummy.c')
expected = [
u'Copyright 2003-2005, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_aferrors_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/aferrors.h')
expected = [
u'Copyright 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afglobal_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afglobal.c')
expected = [
u'Copyright 2003-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afglobal_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afglobal.h')
expected = [
u'Copyright 2003-2005, 2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afhints_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afhints.c')
expected = [
u'Copyright 2003-2007, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afhints_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afhints.h')
expected = [
u'Copyright 2003-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afindic_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afindic.c')
expected = [
u'Copyright 2007, 2011 by Rahul Bhalerao <rahul.bhalerao@redhat.com>, <b.rahul.pm@gmail.com>.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afindic_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afindic.h')
expected = [
u'Copyright 2007 by Rahul Bhalerao <rahul.bhalerao@redhat.com>, <b.rahul.pm@gmail.com>.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_aflatin_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/aflatin.h')
expected = [
u'Copyright 2003-2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afloader_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afloader.c')
expected = [
u'Copyright 2003-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afmodule_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afmodule.c')
expected = [
u'Copyright 2003-2006, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afmodule_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afmodule.h')
expected = [
u'Copyright 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afpic_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afpic.c')
expected = [
u'Copyright 2009, 2010, 2011 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afpic_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afpic.h')
expected = [
u'Copyright 2009, 2011 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_afwarp_h(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/afwarp.h')
expected = [
u'Copyright 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_autofit_autofit_c(self):
test_file = self.get_test_loc('ics/freetype-src-autofit/autofit.c')
expected = [
u'Copyright 2003-2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftadvanc_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftadvanc.c')
expected = [
u'Copyright 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftapi_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftapi.c')
expected = [
u'Copyright 2002 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbase_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbase.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbase_h(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbase.h')
expected = [
u'Copyright 2008, 2010 by David Turner, Robert Wilhelm, Werner Lemberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_freetype_src_base_ftbase_h_trail_name(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbase.h')
expected = [
u'Copyright 2008, 2010 by David Turner, Robert Wilhelm, Werner Lemberg and suzuki toshiya.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbbox_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbbox.c')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftbitmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftbitmap.c')
expected = [
u'Copyright 2004, 2005, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftcalc_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftcalc.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftdbgmem_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftdbgmem.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftdebug_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftdebug.c')
expected = [
u'Copyright 1996-2001, 2002, 2004, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftgloadr_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftgloadr.c')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftglyph_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftglyph.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftinit_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftinit.c')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftlcdfil_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftlcdfil.c')
expected = [
u'Copyright 2006, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftmm_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftmm.c')
expected = [
u'Copyright 1996-2001, 2003, 2004, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftobjs_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftobjs.c')
expected = [
u'Copyright 1996-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftpatent_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftpatent.c')
expected = [
u'Copyright 2007, 2008, 2010 by David Turner.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftrfork_c_trail_name(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftrfork.c')
expected = [
u'Copyright 2004, 2005, 2006, 2007, 2008, 2009, 2010 by Masatake YAMATO and Redhat K.K.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftsnames_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftsnames.c')
expected = [
u'Copyright 1996-2001, 2002, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftstream_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftstream.c')
expected = [
u'Copyright 2000-2002, 2004-2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftstroke_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftstroke.c')
expected = [
u'Copyright 2002-2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftsynth_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftsynth.c')
expected = [
u'Copyright 2000-2001, 2002, 2003, 2004, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftsystem_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftsystem.c')
expected = [
u'Copyright 1996-2002, 2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_fttrigon_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/fttrigon.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftutil_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftutil.c')
expected = [
u'Copyright 2002, 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_base_ftxf86_c(self):
test_file = self.get_test_loc('ics/freetype-src-base/ftxf86.c')
expected = [
u'Copyright 2002, 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cff_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cff.c')
expected = [
u'Copyright 1996-2001, 2002 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffcmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffcmap.c')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffcmap_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffcmap.h')
expected = [
u'Copyright 2002, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cfferrs_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cfferrs.h')
expected = [
u'Copyright 2001 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffload_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffload.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffload_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffload.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffobjs_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffobjs.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffparse_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffparse.c')
expected = [
u'Copyright 1996-2004, 2007-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffparse_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffparse.h')
expected = [
u'Copyright 1996-2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cffpic_c(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cffpic.c')
expected = [
u'Copyright 2009, 2010 by Oran Agra and Mickey Gabel.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_cff_cfftypes_h(self):
test_file = self.get_test_loc('ics/freetype-src-cff/cfftypes.h')
expected = [
u'Copyright 1996-2003, 2006-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_afmparse_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/afmparse.c')
expected = [
u'Copyright 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psaux_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psaux.c')
expected = [
u'Copyright 1996-2001, 2002, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psauxmod_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psauxmod.c')
expected = [
u'Copyright 2000-2001, 2002, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psauxmod_h(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psauxmod.h')
expected = [
u'Copyright 2000-2001 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_psconv_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/psconv.c')
expected = [
u'Copyright 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_t1cmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/t1cmap.c')
expected = [
u'Copyright 2002, 2003, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_t1decode_c(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/t1decode.c')
expected = [
u'Copyright 2000-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psaux_t1decode_h(self):
test_file = self.get_test_loc('ics/freetype-src-psaux/t1decode.h')
expected = [
u'Copyright 2000-2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshalgo_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshalgo.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshalgo_h(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshalgo.h')
expected = [
u'Copyright 2001, 2002, 2003, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshglob_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshglob.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshglob_h(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshglob.h')
expected = [
u'Copyright 2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshinter_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshinter.c')
expected = [
u'Copyright 2001, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshmod_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshmod.c')
expected = [
u'Copyright 2001, 2002, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshrec_c(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshrec.c')
expected = [
u'Copyright 2001, 2002, 2003, 2004, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_pshinter_pshrec_h(self):
test_file = self.get_test_loc('ics/freetype-src-pshinter/pshrec.h')
expected = [
u'Copyright 2001, 2002, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psnames_psmodule_c(self):
test_file = self.get_test_loc('ics/freetype-src-psnames/psmodule.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psnames_psmodule_h(self):
test_file = self.get_test_loc('ics/freetype-src-psnames/psmodule.h')
expected = [
u'Copyright 1996-2001 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_psnames_pstables_h(self):
test_file = self.get_test_loc('ics/freetype-src-psnames/pstables.h')
expected = [
u'Copyright 2005, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_raster_ftmisc_h(self):
test_file = self.get_test_loc('ics/freetype-src-raster/ftmisc.h')
expected = [
u'Copyright 2005, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_raster_ftraster_c(self):
test_file = self.get_test_loc('ics/freetype-src-raster/ftraster.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_raster_ftrend1_c(self):
test_file = self.get_test_loc('ics/freetype-src-raster/ftrend1.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_sfdriver_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/sfdriver.c')
expected = [
u'Copyright 1996-2007, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_sferrors_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/sferrors.h')
expected = [
u'Copyright 2001, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_sfobjs_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/sfobjs.c')
expected = [
u'Copyright 1996-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttbdf_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttbdf.c')
expected = [
u'Copyright 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttcmap_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttcmap.c')
expected = [
u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttcmap_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttcmap.h')
expected = [
u'Copyright 2002, 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttkern_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttkern.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttkern_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttkern.h')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttload_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttload.h')
expected = [
u'Copyright 1996-2001, 2002, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttmtx_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttmtx.c')
expected = [
u'Copyright 2006-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttpost_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttpost.c')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttsbit_h(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttsbit.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_sfnt_ttsbit0_c(self):
test_file = self.get_test_loc('ics/freetype-src-sfnt/ttsbit0.c')
expected = [
u'Copyright 2005, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_smooth_ftgrays_c(self):
test_file = self.get_test_loc('ics/freetype-src-smooth/ftgrays.c')
expected = [
u'Copyright 2000-2003, 2005-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_smooth_ftsmooth_c(self):
test_file = self.get_test_loc('ics/freetype-src-smooth/ftsmooth.c')
expected = [
u'Copyright 2000-2006, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_truetype_c(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/truetype.c')
expected = [
u'Copyright 1996-2001, 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttgload_c(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttgload.c')
expected = [
u'Copyright 1996-2011 David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttgload_h(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttgload.h')
expected = [
u'Copyright 1996-2006, 2008, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttinterp_h(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttinterp.h')
expected = [
u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttobjs_h(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttobjs.h')
expected = [
u'Copyright 1996-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_freetype_src_truetype_ttpload_c(self):
test_file = self.get_test_loc('ics/freetype-src-truetype/ttpload.c')
expected = [
u'Copyright 1996-2002, 2004-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_boot_c(self):
test_file = self.get_test_loc('ics/fsck_msdos/boot.c')
expected = [
u'Copyright (c) 1995, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_check_c(self):
test_file = self.get_test_loc('ics/fsck_msdos/check.c')
expected = [
u'Copyright (c) 1995, 1996, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_main_c(self):
test_file = self.get_test_loc('ics/fsck_msdos/main.c')
expected = [
u'Copyright (c) 1995 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_fsck_msdos_notice(self):
test_file = self.get_test_loc('ics/fsck_msdos/NOTICE')
expected = [
u'Copyright (c) 1995, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
u'Copyright (c) 1995, 1996, 1997 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
u'Copyright (c) 1995 Wolfgang Solfrank',
u'Copyright (c) 1995 Martin Husemann',
]
check_detection(expected, test_file)
def test_ics_genext2fs_aclocal_m4(self):
test_file = self.get_test_loc('ics/genext2fs/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_genext2fs_configure(self):
test_file = self.get_test_loc('ics/genext2fs/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_genext2fs_genext2fs_c(self):
test_file = self.get_test_loc('ics/genext2fs/genext2fs.c')
expected = [
u'Copyright (c) 2000 Xavier Bestel <xavier.bestel@free.fr>',
u'Copyright (c) 1999,2000 by Lineo, inc. and John Beppu',
u'Copyright (c) 1999,2000,2001 by John Beppu <beppu@codepoet.org>',
u'Copyright (c) 2002 Edward Betts <edward@debian.org>',
u'Copyright (c) 2002 Ixia',
u'Copyright (c) 2002 Ixia',
u'Copyright (c) 2002 Ixia',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_genext2fs_genext2fs_c_trail_name_trail_name_trail_name_trail_name(self):
test_file = self.get_test_loc('ics/genext2fs/genext2fs.c')
expected = [
u'Copyright (c) 2000 Xavier Bestel <xavier.bestel@free.fr>',
u'Copyright (c) 1999,2000 by Lineo, inc. and John Beppu',
u'Beppu Copyright (c) 1999,2000,2001 by John Beppu <beppu@codepoet.org>',
u'Copyright (c) 2002 Edward Betts <edward@debian.org>',
u'Copyright (c) 2002 Ixia communications',
u'Copyright (c) 2002 Ixia communications',
u'Copyright (c) 2002 Ixia communications',
]
check_detection(expected, test_file)
def test_ics_genext2fs_m4_ac_func_scanf_can_malloc_m4(self):
test_file = self.get_test_loc('ics/genext2fs-m4/ac_func_scanf_can_malloc.m4')
expected = [
u'(c) Finn Thain 2006',
]
check_detection(expected, test_file)
def test_ics_giflib_gif_lib_private_h(self):
test_file = self.get_test_loc('ics/giflib/gif_lib_private.h')
expected = [
u'(c) Copyright 1997 Eric S. Raymond',
u'(c) Copyright 1997 Eric S. Raymond',
]
check_detection(expected, test_file)
def test_ics_giflib_notice(self):
test_file = self.get_test_loc('ics/giflib/NOTICE')
expected = [
u'Copyright (c) 1997 Eric S. Raymond',
]
check_detection(expected, test_file)
def test_ics_google_diff_match_patch_name_fraser_neil_plaintext_diff_match_patch_java(self):
test_file = self.get_test_loc('ics/google-diff-match-patch-name-fraser-neil-plaintext/diff_match_patch.java')
expected = [
u'Copyright 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_gtest_test_gtest_filter_unittest_py(self):
test_file = self.get_test_loc('ics/gtest-test/gtest_filter_unittest.py')
expected = [
u'Copyright 2005, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_gtest_test_gtest_nc_test_py(self):
test_file = self.get_test_loc('ics/gtest-test/gtest_nc_test.py')
expected = [
u'Copyright 2007, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_guava_ipr(self):
test_file = self.get_test_loc('ics/guava/guava.ipr')
expected = []
check_detection(expected, test_file)
@expectedFailure
def test_ics_guava_guava_ipr_markup(self):
test_file = self.get_test_loc('ics/guava/guava.ipr')
expected = [
u'Copyright (c) today.year Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_annotations_gwtcompatible_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-annotations/GwtCompatible.java')
expected = [
u'Copyright (c) 2009 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_annotations_visiblefortesting_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-annotations/VisibleForTesting.java')
expected = [
u'Copyright (c) 2006 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_base_charmatcher_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-base/CharMatcher.java')
expected = [
u'Copyright (c) 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_base_charsets_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-base/Charsets.java')
expected = [
u'Copyright (c) 2007 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_guava_src_com_google_common_io_nulloutputstream_java(self):
test_file = self.get_test_loc('ics/guava-src-com-google-common-io/NullOutputStream.java')
expected = [
u'Copyright (c) 2004 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_contrib_harfbuzz_unicode_icu_c(self):
test_file = self.get_test_loc('ics/harfbuzz-contrib/harfbuzz-unicode-icu.c')
expected = [
u'Copyright 2010, The Android Open Source Project',
u'Copyright 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_contrib_tables_bidimirroring_txt(self):
test_file = self.get_test_loc('ics/harfbuzz-contrib-tables/BidiMirroring.txt')
expected = [
u'Copyright (c) 1991-2008 Unicode, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_arabic_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-arabic.c')
expected = [
u'Copyright (c) 2008 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_arabic_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-arabic.c')
expected = [
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_buffer_private_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-buffer-private.h')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2004,2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_dump_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-dump.c')
expected = [
u'Copyright (c) 2000, 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_external_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-external.h')
expected = [
u'Copyright (c) 2008 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_external_h_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-external.h')
expected = [
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_gdef_private_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-gdef-private.h')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2006 Behdad Esfahbod',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_global_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-global.h')
expected = [
u'Copyright (c) 2008 Nokia Corporation',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_global_h_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-global.h')
expected = [
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_gpos_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-gpos.c')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2006 Behdad Esfahbod',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_greek_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-greek.c')
expected = [
u'Copyright (c) 2010 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_greek_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-greek.c')
expected = [
u'Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_impl_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-impl.c')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (c) 2008 Nokia Corporation',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_impl_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-impl.c')
expected = [
u'Copyright (c) 1998-2004 David Turner and Werner Lemberg',
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_shape_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-shape.h')
expected = [
u'Copyright (c) 2006 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_stream_c(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.c')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (c) 2008 Nokia Corporation',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_stream_c_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.c')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
u'Copyright (c) 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_harfbuzz_src_harfbuzz_stream_h(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.h')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (c) 2008 Nokia Corporation',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_harfbuzz_src_harfbuzz_stream_h_trail_misc(self):
test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.h')
expected = [
u'Copyright (c) 2005 David Turner',
u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hnjalloc_c(self):
test_file = self.get_test_loc('ics/hyphenation/hnjalloc.c')
expected = [
u'Copyright (c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hnjalloc_h(self):
test_file = self.get_test_loc('ics/hyphenation/hnjalloc.h')
expected = [
u'Copyright (c) 1998 Raph Levien',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hyphen_c(self):
test_file = self.get_test_loc('ics/hyphenation/hyphen.c')
expected = [
u'Copyright (c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
u'(c) 2001 Peter Novodvorsky (nidd@cs.msu.su)',
u'(c) 2006, 2007, 2008 Laszlo Nemeth',
]
check_detection(expected, test_file)
def test_ics_hyphenation_hyphen_h(self):
test_file = self.get_test_loc('ics/hyphenation/hyphen.h')
expected = [
u'(c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
u'(c) 2006, 2007, 2008 Laszlo Nemeth',
u'Copyright (c) 1998 Raph Levien',
]
check_detection(expected, test_file)
def test_ics_hyphenation_readme(self):
test_file = self.get_test_loc('ics/hyphenation/README')
expected = [
u'(c) 1998 Raph Levien',
u'(c) 2001 ALTLinux, Moscow',
u'(c) 2006, 2007, 2008 Laszlo Nemeth',
]
check_detection(expected, test_file)
def test_ics_iproute2_readme_lnstat(self):
test_file = self.get_test_loc('ics/iproute2/README.lnstat')
expected = [
u'(c) 2004 Harald Welte laforge@gnumonks.org',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_if_addrlabel_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/if_addrlabel.h')
expected = [
u'Copyright (c) 2007 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_if_arp_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/if_arp.h')
expected = [
u'(c) UCB 1986-1988',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_if_tun_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/if_tun.h')
expected = [
u'Copyright (c) 1999-2000 Maxim Krasnyansky <max_mk@yahoo.com>',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_netfilter_ipv4_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux/netfilter_ipv4.h')
expected = [
u'(c) 1998 Rusty Russell',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_can_netlink_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux-can/netlink.h')
expected = [
u'Copyright (c) 2009 Wolfgang Grandegger <wg@grandegger.com>',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_linux_tc_act_tc_skbedit_h(self):
test_file = self.get_test_loc('ics/iproute2-include-linux-tc_act/tc_skbedit.h')
expected = [
u'Copyright (c) 2008, Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_iproute2_include_netinet_icmp6_h(self):
test_file = self.get_test_loc('ics/iproute2-include-netinet/icmp6.h')
expected = [
u'Copyright (c) 1991-1997,2000,2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ip6tunnel_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ip6tunnel.c')
expected = [
u'Copyright (c) 2006 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ipaddrlabel_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ipaddrlabel.c')
expected = [
u'Copyright (c) 2007 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ipprefix_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ipprefix.c')
expected = [
u'Copyright (c) 2005 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_ip_ipxfrm_c(self):
test_file = self.get_test_loc('ics/iproute2-ip/ipxfrm.c')
expected = [
u'Copyright (c) 2004 USAGI/WIDE Project',
]
check_detection(expected, test_file)
def test_ics_iproute2_misc_lnstat_c(self):
test_file = self.get_test_loc('ics/iproute2-misc/lnstat.c')
expected = [
u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>',
u'Copyright 2001 by Robert Olsson <robert.olsson@its.uu.se> Uppsala University, Sweden',
u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iproute2_misc_lnstat_util_c(self):
test_file = self.get_test_loc('ics/iproute2-misc/lnstat_util.c')
expected = [
u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>',
u'Copyright 2001 by Robert Olsson <robert.olsson@its.uu.se> Uppsala University, Sweden',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_notice_extra_contributed(self):
test_file = self.get_test_loc('ics/ipsec-tools/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2004 Emmanuel Dreyfus',
u'Copyright (c) 2004-2006 Emmanuel Dreyfus',
u'Copyright (c) 2000 WIDE Project.',
u'Copyright (c) 2004-2005 Emmanuel Dreyfus',
u'Copyright (c) 2000, 2001 WIDE Project.',
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.',
u'Copyright 2000 Wasabi Systems, Inc.',
u'Copyright (c) 2005 International Business Machines Corporation',
u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.',
u'Copyright 2000 Aaron D. Gifford.',
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1991, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_include_glibc_notice(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-include-glibc/NOTICE')
expected = [
u'Copyright (c) 1991, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_ipsec_dump_policy_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/ipsec_dump_policy.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_ipsec_set_policy_3(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/ipsec_set_policy.3')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_key_debug_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/key_debug.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_notice(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_libipsec_policy_parse_y(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/policy_parse.y')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_cfparse_y(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/cfparse.y')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_dump_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/dump.h')
expected = [
u'Copyright (c) 2000 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_evt_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/evt.c')
expected = [
u'Copyright (c) 2004 Emmanuel Dreyfus',
u'Copyright (c) 2008 Timo Teras',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_gcmalloc_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/gcmalloc.h')
expected = [
u'Copyright (c) 2000, 2001 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_genlist_c_extra_contributed(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/genlist.c')
expected = [
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_grabmyaddr_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/grabmyaddr.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2008 Timo Teras <timo.teras@iki.fi>.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_gssapi_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/gssapi.c')
expected = [
u'Copyright 2000 Wasabi Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_handler_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/handler.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_isakmp_cfg_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_cfg.c')
expected = [
u'Copyright (c) 2004-2006 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_isakmp_cfg_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_cfg.h')
expected = [
u'Copyright (c) 2004 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_isakmp_xauth_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_xauth.c')
expected = [
u'Copyright (c) 2004-2005 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_notice_extra_contributed(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2004 Emmanuel Dreyfus',
u'Copyright (c) 2004-2006 Emmanuel Dreyfus',
u'Copyright (c) 2000 WIDE Project.',
u'Copyright (c) 2004-2005 Emmanuel Dreyfus',
u'Copyright (c) 2000, 2001 WIDE Project.',
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.',
u'Copyright 2000 Wasabi Systems, Inc.',
u'Copyright (c) 2005 International Business Machines Corporation',
u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.',
u'Copyright 2000 Aaron D. Gifford.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_plainrsa_gen_8(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/plainrsa-gen.8')
expected = [
u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_racoon_8(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoon.8')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_racoonctl_8(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoonctl.8')
expected = [
u'Copyright (c) 2004 Emmanuel Dreyfus',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_racoonctl_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoonctl.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 2008 Timo Teras.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_security_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/security.c')
expected = [
u'Copyright (c) 2005 International Business Machines Corporation',
u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_missing_crypto_sha2_sha2_c(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon-missing-crypto-sha2/sha2.c')
expected = [
u'Copyright 2000 Aaron D. Gifford.',
]
check_detection(expected, test_file)
def test_ics_ipsec_tools_src_racoon_missing_crypto_sha2_sha2_h(self):
test_file = self.get_test_loc('ics/ipsec-tools-src-racoon-missing-crypto-sha2/sha2.h')
expected = [
u'Copyright 2000 Aaron D. Gifford.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libip6t_reject_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libip6t_REJECT.c')
expected = [
u'(c) 2000 Jozsef Kadlecsik <kadlec@blackhole.kfki.hu>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libipt_clusterip_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libipt_CLUSTERIP.c')
expected = [
u'(c) 2003 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libipt_ecn_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libipt_ECN.c')
expected = [
u'(c) 2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libipt_ttl_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libipt_TTL.c')
expected = [
u'(c) 2000 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_audit_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_AUDIT.c')
expected = [
u'(c) 2010-2011, Thomas Graf <tgraf@redhat.com>',
u'(c) 2010-2011, Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_checksum_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_CHECKSUM.c')
expected = [
u'(c) 2002 by Harald Welte <laforge@gnumonks.org>',
u'(c) 2010 by Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_cluster_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_cluster.c')
expected = [
u'(c) 2009 by Pablo Neira Ayuso <pablo@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_connmark_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_connmark.c')
expected = [
u'(c) 2002,2004 MARA Systems AB',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_connsecmark_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_CONNSECMARK.c')
expected = [
u'Copyright (c) 2006 Red Hat, Inc., James Morris <jmorris@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_conntrack_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_conntrack.c')
expected = [
u'(c) 2001 Marc Boucher (marc@mbsi.ca).',
u'Copyright (c) CC Computer Consultants GmbH, 2007 - 2008 Jan Engelhardt <jengelh@computergmbh.de>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_dccp_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_dccp.c')
expected = [
u'(c) 2005 by Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_devgroup_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_devgroup.c')
expected = [
u'Copyright (c) 2011 Patrick McHardy <kaber@trash.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_hashlimit_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_hashlimit.c')
expected = [
u'(c) 2003-2004 by Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_idletimer_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_IDLETIMER.c')
expected = [
u'Copyright (c) 2010 Nokia Corporation.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_led_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_LED.c')
expected = [
u'(c) 2008 Adam Nielsen <a.nielsen@shikadi.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_osf_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_osf.c')
expected = [
u'Copyright (c) 2003+ Evgeniy Polyakov <zbr@ioremap.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_owner_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_owner.c')
expected = [
u'Copyright (c) CC Computer Consultants GmbH, 2007 - 2008 Jan Engelhardt <jengelh@computergmbh.de>'
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_set_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_SET.c')
expected = [
u'Copyright (c) 2000-2002 Joakim Axelsson <gozem@linux.nu> Patrick Schaaf <bof@bof.de> Martin Josefsson <gandalf@wlug.westbo.se>',
u'Copyright (c) 2003-2010 Jozsef Kadlecsik <kadlec@blackhole.kfki.hu>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_socket_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_socket.c')
expected = [
u'Copyright (c) 2007 BalaBit IT Ltd.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_string_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_string.c')
expected = [
u'Copyright (c) 2000 Emmanuel Roger <winfield@freegates.be>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_tcpoptstrip_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_TCPOPTSTRIP.c')
expected = [
u'Copyright (c) 2007 Sven Schnelle <svens@bitebene.org>',
u'Copyright (c) CC Computer Consultants GmbH, 2007 Jan Engelhardt <jengelh@computergmbh.de>',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_tee_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_TEE.c')
expected = [
u'Copyright (c) Sebastian Claen , 2007 Jan Engelhardt',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_time_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_time.c')
expected = [
u'Copyright (c) CC Computer Consultants GmbH, 2007',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_tproxy_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_TPROXY.c')
expected = [
u'Copyright (c) 2002-2008 BalaBit IT Ltd.',
]
check_detection(expected, test_file)
def test_ics_iptables_extensions_libxt_u32_c(self):
test_file = self.get_test_loc('ics/iptables-extensions/libxt_u32.c')
expected = [
u'(c) 2002 by Don Cohen <don-netf@isis.cs3-inc.com>',
u'Copyright (c) CC Computer Consultants GmbH, 2007',
]
check_detection(expected, test_file)
def test_ics_iptables_include_libipq_libipq_h(self):
test_file = self.get_test_loc('ics/iptables-include-libipq/libipq.h')
expected = [
u'Copyright (c) 2000-2001 Netfilter Core Team',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_ipv6_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux/netfilter_ipv6.h')
expected = [
u'(c) 1998 Rusty Russell',
u'(c) 1999 David Jeffery',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_audit_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_AUDIT.h')
expected = [
u'(c) 2010-2011 Thomas Graf <tgraf@redhat.com>',
u'(c) 2010-2011 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_checksum_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_CHECKSUM.h')
expected = [
u'(c) 2002 by Harald Welte <laforge@gnumonks.org>',
u'(c) 2010 Red Hat Inc',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_conntrack_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_conntrack.h')
expected = [
u'(c) 2001 Marc Boucher (marc@mbsi.ca).',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_dscp_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_DSCP.h')
expected = [
u'(c) 2002 Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_idletimer_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_IDLETIMER.h')
expected = [
u'Copyright (c) 2004, 2010 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_nfqueue_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_NFQUEUE.h')
expected = [
u'(c) 2005 Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_xt_osf_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_osf.h')
expected = [
u'Copyright (c) 2003+ Evgeniy Polyakov <johnpol@2ka.mxt.ru>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_ipv4_ipt_ttl_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter_ipv4/ipt_ttl.h')
expected = [
u'(c) 2000 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_include_linux_netfilter_ipv4_ipt_ulog_h(self):
test_file = self.get_test_loc('ics/iptables-include-linux-netfilter_ipv4/ipt_ULOG.h')
expected = [
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_ip6tables_standalone_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/ip6tables-standalone.c')
expected = [
u'(c) 2000-2002 by the netfilter coreteam <coreteam@netfilter.org> Paul Rusty Russell <rusty@rustcorp.com.au> Marc Boucher <marc+nf@mbsi.ca>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_xslt(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables.xslt')
expected = [
u'Copyright 2006 UfoMechanic Author azez@ufomechanic.net',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_iptables_iptables_iptables_xslt_extra_author(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables.xslt')
expected = [
u'Copyright 2006 UfoMechanic',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_apply(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-apply')
expected = [
u'Copyright (c) Martin F. Krafft <madduck@madduck.net>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_apply_8(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-apply.8')
expected = [
u'copyright by Martin F. Krafft.',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_restore_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-restore.c')
expected = [
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_save_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-save.c')
expected = [
u'(c) 1999 by Paul Rusty Russell <rusty@rustcorp.com.au>',
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_iptables_xml_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/iptables-xml.c')
expected = [
u'(c) 2006 Ufo Mechanic <azez@ufomechanic.net>',
u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_xtables_c_trail_name(self):
test_file = self.get_test_loc('ics/iptables-iptables/xtables.c')
expected = [
u'(c) 2000-2006 by the netfilter coreteam <coreteam@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_iptables_xtoptions_c(self):
test_file = self.get_test_loc('ics/iptables-iptables/xtoptions.c')
expected = [
u'Copyright (c) Jan Engelhardt, 2011',
]
check_detection(expected, test_file)
def test_ics_iptables_libipq_ipq_create_handle_3(self):
test_file = self.get_test_loc('ics/iptables-libipq/ipq_create_handle.3')
expected = [
u'Copyright (c) 2000-2001 Netfilter Core Team',
u'Copyright (c) 2000-2001 Netfilter Core Team.',
]
check_detection(expected, test_file)
def test_ics_iptables_libipq_ipq_errstr_3(self):
test_file = self.get_test_loc('ics/iptables-libipq/ipq_errstr.3')
expected = [
u'Copyright (c) 2000 Netfilter Core Team',
u'Copyright (c) 2000-2001 Netfilter Core Team.',
]
check_detection(expected, test_file)
def test_ics_iptables_libiptc_libip4tc_c(self):
test_file = self.get_test_loc('ics/iptables-libiptc/libip4tc.c')
expected = [
u'(c) 1999 Paul Rusty Russell',
]
check_detection(expected, test_file)
def test_ics_iptables_libiptc_libiptc_c(self):
test_file = self.get_test_loc('ics/iptables-libiptc/libiptc.c')
expected = [
u'(c) 1999 Paul Rusty Russell',
u'(c) 2000-2004 by the Netfilter Core Team <coreteam@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_iptables_m4_ax_check_linker_flags_m4(self):
test_file = self.get_test_loc('ics/iptables-m4/ax_check_linker_flags.m4')
expected = [
u'Copyright (c) 2009 Mike Frysinger <vapier@gentoo.org>',
u'Copyright (c) 2009 Steven G. Johnson <stevenj@alum.mit.edu>',
u'Copyright (c) 2009 Matteo Frigo',
]
check_detection(expected, test_file)
def test_ics_iptables_utils_nfnl_osf_c(self):
test_file = self.get_test_loc('ics/iptables-utils/nfnl_osf.c')
expected = [
u'Copyright (c) 2005 Evgeniy Polyakov <johnpol@2ka.mxt.ru>',
]
check_detection(expected, test_file)
def test_ics_iptables_utils_pf_os(self):
test_file = self.get_test_loc('ics/iptables-utils/pf.os')
expected = [
u'(c) Copyright 2000-2003 by Michal Zalewski <lcamtuf@coredump.cx>',
u'(c) Copyright 2003 by Mike Frantzen <frantzen@w4g.org>',
]
check_detection(expected, test_file)
def test_ics_javasqlite_src_main_native_sqlite_jni_defs_h(self):
test_file = self.get_test_loc('ics/javasqlite-src-main-native/sqlite_jni_defs.h')
expected = [
u'Copyright 2007, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_javassist_license_html(self):
test_file = self.get_test_loc('ics/javassist/License.html')
expected = [
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_notice(self):
test_file = self.get_test_loc('ics/javassist/NOTICE')
expected = [
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_readme_html(self):
test_file = self.get_test_loc('ics/javassist/Readme.html')
expected = [
u'Copyright (c) 1999-2010 by Shigeru Chiba',
u'Copyright (c) 1999-2010 Shigeru Chiba.',
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_sample_preproc_assistant_java(self):
test_file = self.get_test_loc('ics/javassist-sample-preproc/Assistant.java')
expected = [
u'Copyright (c) 1999-2005 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytearrayclasspath_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist/ByteArrayClassPath.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_ctclass_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist/CtClass.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba.',
u'(c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_javassist_src_main_javassist_ctclass_java_lead_copy(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist/CtClass.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba.',
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_bytestream_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode/ByteStream.java')
expected = [
u'Copyright (c) 1999-2010 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_instructionprinter_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode/InstructionPrinter.java')
expected = [
u'Copyright (c) 1999-2007 Shigeru Chiba, and others.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_annotation_annotation_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode-annotation/Annotation.java')
expected = [
u'Copyright (c) 2004 Bill Burke.',
]
check_detection(expected, test_file)
def test_ics_javassist_src_main_javassist_bytecode_annotation_nosuchclasserror_java(self):
test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode-annotation/NoSuchClassError.java')
expected = [
u'Copyright (c) 1999-2009 Shigeru Chiba.',
]
check_detection(expected, test_file)
def test_ics_javassist_tutorial_tutorial_html(self):
test_file = self.get_test_loc('ics/javassist-tutorial/tutorial.html')
expected = [
u'Copyright (c) 2000-2010 by Shigeru Chiba',
]
check_detection(expected, test_file)
def test_ics_jdiff_src_jdiff_diffmyers_java(self):
test_file = self.get_test_loc('ics/jdiff-src-jdiff/DiffMyers.java')
expected = [
u'Copyright (c) 2000 Business Management Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_jhead_main_c(self):
test_file = self.get_test_loc('ics/jhead/main.c')
expected = [
u'Copyright (c) 2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_jpeg_ansi2knr_c(self):
test_file = self.get_test_loc('ics/jpeg/ansi2knr.c')
expected = [
u'Copyright (c) 1988 Richard M. Stallman',
u'Copyright (c) 1989 Aladdin Enterprises.',
]
check_detection(expected, test_file)
def test_ics_jpeg_cderror_h(self):
test_file = self.get_test_loc('ics/jpeg/cderror.h')
expected = [
u'Copyright (c) 1994-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_cdjpeg_c(self):
test_file = self.get_test_loc('ics/jpeg/cdjpeg.c')
expected = [
u'Copyright (c) 1991-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_cjpeg_c(self):
test_file = self.get_test_loc('ics/jpeg/cjpeg.c')
expected = [
u'Copyright (c) 1991-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_ckconfig_c(self):
test_file = self.get_test_loc('ics/jpeg/ckconfig.c')
expected = [
u'Copyright (c) 1991-1994, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_coderules_doc(self):
test_file = self.get_test_loc('ics/jpeg/coderules.doc')
expected = [
u'Copyright (c) 1991-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_config_guess(self):
test_file = self.get_test_loc('ics/jpeg/config.guess')
expected = [
u'Copyright (c) 1992, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_jpeg_config_sub(self):
test_file = self.get_test_loc('ics/jpeg/config.sub')
expected = [
u'Copyright (c) 1991, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_jpeg_configure(self):
test_file = self.get_test_loc('ics/jpeg/configure')
expected = [
u'Copyright (c) 1992, 93, 94, 95, 96 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_jpeg_filelist_doc(self):
test_file = self.get_test_loc('ics/jpeg/filelist.doc')
expected = [
u'Copyright (c) 1994-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_install_doc(self):
test_file = self.get_test_loc('ics/jpeg/install.doc')
expected = [
u'Copyright (c) 1991-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jcapimin_c(self):
test_file = self.get_test_loc('ics/jpeg/jcapimin.c')
expected = [
u'Copyright (c) 1994-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jcapistd_c(self):
test_file = self.get_test_loc('ics/jpeg/jcapistd.c')
expected = [
u'Copyright (c) 1994-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jccolor_c(self):
test_file = self.get_test_loc('ics/jpeg/jccolor.c')
expected = [
u'Copyright (c) 1991-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jcphuff_c(self):
test_file = self.get_test_loc('ics/jpeg/jcphuff.c')
expected = [
u'Copyright (c) 1995-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jctrans_c(self):
test_file = self.get_test_loc('ics/jpeg/jctrans.c')
expected = [
u'Copyright (c) 1995-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jmem_android_c(self):
test_file = self.get_test_loc('ics/jpeg/jmem-android.c')
expected = [
u'Copyright (c) 2007-2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_jpeg_jmemansi_c(self):
test_file = self.get_test_loc('ics/jpeg/jmemansi.c')
expected = [
u'Copyright (c) 1992-1996, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jmemdos_c(self):
test_file = self.get_test_loc('ics/jpeg/jmemdos.c')
expected = [
u'Copyright (c) 1992-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_jversion_h(self):
test_file = self.get_test_loc('ics/jpeg/jversion.h')
expected = [
u'Copyright (c) 1991-1998, Thomas G. Lane.',
u'Copyright (c) 1998, Thomas G. Lane',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_jpeg_ltconfig(self):
test_file = self.get_test_loc('ics/jpeg/ltconfig')
expected = [
u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_jpeg_ltmain_sh(self):
test_file = self.get_test_loc('ics/jpeg/ltmain.sh')
expected = [
u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_jpeg_notice(self):
test_file = self.get_test_loc('ics/jpeg/NOTICE')
expected = [
u'copyright (c) 1991-1998, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_rdcolmap_c(self):
test_file = self.get_test_loc('ics/jpeg/rdcolmap.c')
expected = [
u'Copyright (c) 1994-1996, Thomas G. Lane.',
u'Copyright (c) 1988 by Jef Poskanzer.',
]
check_detection(expected, test_file)
def test_ics_jpeg_rdppm_c(self):
test_file = self.get_test_loc('ics/jpeg/rdppm.c')
expected = [
u'Copyright (c) 1991-1997, Thomas G. Lane.',
u'Copyright (c) 1988 by Jef Poskanzer.',
]
check_detection(expected, test_file)
def test_ics_jpeg_readme(self):
test_file = self.get_test_loc('ics/jpeg/README')
expected = [
u'copyright (c) 1991-1998, Thomas G. Lane.',
u'copyright by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_jpeg_structure_doc(self):
test_file = self.get_test_loc('ics/jpeg/structure.doc')
expected = [
u'Copyright (c) 1991-1995, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_transupp_c(self):
test_file = self.get_test_loc('ics/jpeg/transupp.c')
expected = [
u'Copyright (c) 1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jpeg_wrgif_c(self):
test_file = self.get_test_loc('ics/jpeg/wrgif.c')
expected = [
u'Copyright (c) 1991-1997, Thomas G. Lane.',
u'Copyright (c) 1989 by Jef Poskanzer.',
]
check_detection(expected, test_file)
def test_ics_jpeg_wrjpgcom_c(self):
test_file = self.get_test_loc('ics/jpeg/wrjpgcom.c')
expected = [
u'Copyright (c) 1994-1997, Thomas G. Lane.',
]
check_detection(expected, test_file)
def test_ics_jsr305_notice_trail_name(self):
test_file = self.get_test_loc('ics/jsr305/NOTICE')
expected = [
u'Copyright (c) 2007-2009, JSR305 expert group',
]
check_detection(expected, test_file)
def test_ics_jsr305_ri_src_main_java_javax_annotation_concurrent_guardedby_java(self):
test_file = self.get_test_loc('ics/jsr305-ri-src-main-java-javax-annotation-concurrent/GuardedBy.java')
expected = [
u'Copyright (c) 2005 Brian Goetz',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_atomic_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/atomic.h')
expected = [
u'Copyright (c) 1996 Russell King.',
u'Copyright (c) 2002 Deep Blue Solutions Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_bitops_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/bitops.h')
expected = [
u'Copyright 1995, Russell King. Various',
u'Copyright 2001, Nicolas Pitre',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_bitops_h_extra_various(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/bitops.h')
expected = [
u'Copyright 1995, Russell King.',
u'Copyright 2001, Nicolas Pitre',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_cacheflush_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/cacheflush.h')
expected = [
u'Copyright (c) 1999-2002 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_delay_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/delay.h')
expected = [
u'Copyright (c) 1995-2004 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_domain_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/domain.h')
expected = [
u'Copyright (c) 1999 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_fpstate_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/fpstate.h')
expected = [
u'Copyright (c) 1995 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_glue_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/glue.h')
expected = [
u'Copyright (c) 1997-1999 Russell King',
u'Copyright (c) 2000-2002 Deep Blue Solutions Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_hardware_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/hardware.h')
expected = [
u'Copyright (c) 1996 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_ide_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/ide.h')
expected = [
u'Copyright (c) 1994-1996 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_io_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/io.h')
expected = [
u'Copyright (c) 1996-2000 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_locks_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/locks.h')
expected = [
u'Copyright (c) 2000 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_memory_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/memory.h')
expected = [
u'Copyright (c) 2000-2002 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_mtd_xip_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/mtd-xip.h')
expected = [
u'Copyright (c) 2004 MontaVista Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_page_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/page.h')
expected = [
u'Copyright (c) 1995-2003 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_param_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/param.h')
expected = [
u'Copyright (c) 1995-1999 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_pgalloc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/pgalloc.h')
expected = [
u'Copyright (c) 2000-2001 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_pgtable_hwdef_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/pgtable-hwdef.h')
expected = [
u'Copyright (c) 1995-2002 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_posix_types_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/posix_types.h')
expected = [
u'Copyright (c) 1996-1998 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_proc_fns_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/proc-fns.h')
expected = [
u'Copyright (c) 1997-1999 Russell King',
u'Copyright (c) 2000 Deep Blue Solutions Ltd',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_procinfo_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/procinfo.h')
expected = [
u'Copyright (c) 1996-1999 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_ptrace_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/ptrace.h')
expected = [
u'Copyright (c) 1996-2003 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_sizes_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/sizes.h')
expected = [
u'Copyright (c) ARM Limited 1998.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_smp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/smp.h')
expected = [
u'Copyright (c) 2004-2005 ARM Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_thread_info_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/thread_info.h')
expected = [
u'Copyright (c) 2002 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_timex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/timex.h')
expected = [
u'Copyright (c) 1997,1998 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_tlbflush_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/tlbflush.h')
expected = [
u'Copyright (c) 1999-2003 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_unistd_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/unistd.h')
expected = [
u'Copyright (c) 2001-2005 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_board_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board.h')
expected = [
u'Copyright (c) 2004 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_board_perseus2_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board-perseus2.h')
expected = [
u'Copyright 2003 by Texas Instruments Incorporated OMAP730 / Perseus2',
u'Copyright (c) 2001 RidgeRun, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_arch_board_perseus2_h_extra_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board-perseus2.h')
expected = [
u'Copyright 2003 by Texas Instruments Incorporated',
u'Copyright (c) 2001 RidgeRun, Inc. (http://www.ridgerun.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_dma_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/dma.h')
expected = [
u'Copyright (c) 2003 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_fpga_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/fpga.h')
expected = [
u'Copyright (c) 2001 RidgeRun, Inc.',
u'Copyright (c) 2002 MontaVista Software, Inc.',
u'Copyright (c) 2004 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_gpio_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/gpio.h')
expected = [
u'Copyright (c) 2003-2005 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_gpio_switch_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/gpio-switch.h')
expected = [
u'Copyright (c) 2006 Nokia Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_hardware_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/hardware.h')
expected = [
u'Copyright (c) 2001 RidgeRun, Inc. Author RidgeRun, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_arch_hardware_h_extra_author(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/hardware.h')
expected = [
u'Copyright (c) 2001 RidgeRun, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_io_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/io.h')
expected = [
u'Copyright (c) 1997-1999 Russell King',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_irqs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/irqs.h')
expected = [
u'Copyright (c) Greg Lonnon 2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_mcbsp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mcbsp.h')
expected = [
u'Copyright (c) 2002 RidgeRun, Inc. Author Steve Johnson',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_asm_arm_arch_mcbsp_h_extra_author(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mcbsp.h')
expected = [
u'Copyright (c) 2002 RidgeRun, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_memory_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/memory.h')
expected = [
u'Copyright (c) 2000 RidgeRun, Inc.',
u'Copyright (c) 1999 ARM Limited',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_mtd_xip_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mtd-xip.h')
expected = [
u'(c) 2005 MontaVista Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_mux_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mux.h')
expected = [
u'Copyright (c) 2003 - 2005 Nokia Corporation',
u'Copyright (c) 2004 Texas Instruments',
u'Copyright (c) 2004 Texas Instruments',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_timex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/timex.h')
expected = [
u'Copyright (c) 2000 RidgeRun, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_arm_arch_vmalloc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/vmalloc.h')
expected = [
u'Copyright (c) 2000 Russell King.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_generic_tlb_h_trail_other(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-generic/tlb.h')
expected = [
u'Copyright 2001 Red Hat, Inc.',
u'Copyright Linus Torvalds and others.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_generic_topology_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-generic/topology.h')
expected = [
u'Copyright (c) 2002, IBM Corp.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_acpi_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/acpi_32.h')
expected = [
u'Copyright (c) 2001 Paul Diefenbaugh <paul.s.diefenbaugh@intel.com>',
u'Copyright (c) 2001 Patrick Mochel <mochel@osdl.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_bitops_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/bitops_32.h')
expected = [
u'Copyright 1992, Linus Torvalds.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_delay_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/delay.h')
expected = [
u'Copyright (c) 1993 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_fixmap_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/fixmap_32.h')
expected = [
u'Copyright (c) 1998 Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_genapic_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/genapic_32.h')
expected = [
u'Copyright 2003 Andi Kleen, SuSE Labs.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_highmem_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/highmem.h')
expected = [
u'Copyright (c) 1999 Gerhard Wichert, Siemens AG',
u'Copyright (c) 1999 Ingo Molnar <mingo@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_hw_irq_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/hw_irq_32.h')
expected = [
u'(c) 1992, 1993 Linus Torvalds',
u'(c) 1997 Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_i387_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/i387_32.h')
expected = [
u'Copyright (c) 1994 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_io_apic_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/io_apic_32.h')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000 Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_ist_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/ist.h')
expected = [
u'Copyright 2002 Andy Grover <andrew.grover@intel.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_semaphore_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/semaphore_32.h')
expected = [
u'(c) Copyright 1996 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_thread_info_32_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/thread_info_32.h')
expected = [
u'Copyright (c) 2002 David Howells (dhowells@redhat.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_voyager_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/voyager.h')
expected = [
u'Copyright (c) 1999,2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_asm_x86_xen_hypercall_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86-xen/hypercall.h')
expected = [
u'Copyright (c) 2002-2004, K A Fraser',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_a1026_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/a1026.h')
expected = [
u'Copyright (c) 2009 HTC Corporation.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_aio_abi_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/aio_abi.h')
expected = [
u'Copyright 2000,2001,2002 Red Hat.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_android_alarm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_alarm.h')
expected = [
u'Copyright 2006, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_android_pmem_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_pmem.h')
expected = [
u'Copyright (c) 2007 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_android_power_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_power.h')
expected = [
u'Copyright 2005-2006, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_apm_bios_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/apm_bios.h')
expected = [
u'Copyright 1994-2001 Stephen Rothwell (sfr@canb.auug.org.au)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ashmem_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ashmem.h')
expected = [
u'Copyright 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ata_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ata.h')
expected = [
u'Copyright 2003-2004 Red Hat, Inc.',
u'Copyright 2003-2004 Jeff Garzik',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_attribute_container_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/attribute_container.h')
expected = [
u'Copyright (c) 2005 - James Bottomley <James.Bottomley@steeleye.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_auto_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/auto_fs.h')
expected = [
u'Copyright 1997 Transmeta Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_binder_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/binder.h')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
u'Copyright (c) 2005 Palmsource, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_bio_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/bio.h')
expected = [
u'Copyright (c) 2001 Jens Axboe <axboe@suse.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_bmp085_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/bmp085.h')
expected = [
u'Copyright (c) 2010 Motorola, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_capella_cm3602_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/capella_cm3602.h')
expected = [
u'Copyright (c) 2009 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_capi_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/capi.h')
expected = [
u'Copyright 1997 by Carsten Paeth (calle@calle.in-berlin.de)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_cdrom_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/cdrom.h')
expected = [
u'Copyright (c) 1992 David Giller, rafetmad@oxy.edu 1994, 1995 Eberhard Moenkeberg, emoenke@gwdg.de 1996 David van Leeuwen',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_cdrom_h_trail_email(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/cdrom.h')
expected = [
u'Copyright (c) 1992 David Giller, rafetmad@oxy.edu 1994, 1995 Eberhard Moenkeberg, emoenke@gwdg.de 1996 David van Leeuwen, david@tm.tno.nl',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_clk_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/clk.h')
expected = [
u'Copyright (c) 2004 ARM Limited.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_coda_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/coda.h')
expected = [
u'Copyright (c) 1987-1999 Carnegie Mellon University',
u'Copyright (c) 1987-1999 Carnegie Mellon University',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_coda_fs_i_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/coda_fs_i.h')
expected = [
u'Copyright (c) 1998 Carnegie Mellon University',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_completion_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/completion.h')
expected = [
u'(c) Copyright 2001 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_cpcap_audio_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/cpcap_audio.h')
expected = [
u'Copyright (c) 2010 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_device_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/device.h')
expected = [
u'Copyright (c) 2001-2003 Patrick Mochel <mochel@osdl.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_dmaengine_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/dmaengine.h')
expected = [
u'Copyright (c) 2004 - 2006 Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_dm_ioctl_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/dm-ioctl.h')
expected = [
u'Copyright (c) 2001 - 2003 Sistina Software (UK) Limited.',
u'Copyright (c) 2004 - 2005 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_efs_dir_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/efs_dir.h')
expected = [
u'Copyright (c) 1999 Al Smith',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_efs_fs_i_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/efs_fs_i.h')
expected = [
u'Copyright (c) 1999 Al Smith',
u'(c) 1988 Silicon Graphics',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ethtool_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ethtool.h')
expected = [
u'Copyright (c) 1998 David S. Miller (davem@redhat.com)',
u'Copyright 2001 Jeff Garzik <jgarzik@pobox.com>',
u'Portions Copyright 2001 Sun Microsystems',
u'Portions Copyright 2002 Intel',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ext2_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext2_fs.h')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI',
u'Copyright (c) 1991, 1992 Linus Torvalds',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_ext2_fs_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext2_fs.h')
expected = [
u'Copyright (C) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI - Institut Blaise Pascal',
u'Copyright (c) 1991, 1992 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ext3_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext3_fs.h')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI',
u'Copyright (c) 1991, 1992 Linus Torvalds',
u'(c) Daniel Phillips, 2001',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_ext3_fs_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext3_fs.h')
expected = [
u'Copyright (C) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI - Institut Blaise Pascal',
u'Copyright (c) 1991, 1992 Linus Torvalds',
u'(c) Daniel Phillips, 2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ftape_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ftape.h')
expected = [
u'Copyright (c) 1994-1996 Bas Laarhoven',
u'(c) 1996-1997 Claus-Justus Heine.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_genhd_h_extra_generic(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/genhd.h')
expected = [
u'Copyright (c) 1992 Drew Eckhardt',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hdsmart_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hdsmart.h')
expected = [
u'Copyright (c) 1999-2000 Michael Cornwell <cornwell@acm.org>',
u'Copyright (c) 2000 Andre Hedrick <andre@linux-ide.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hid_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hid.h')
expected = [
u'Copyright (c) 1999 Andreas Gal',
u'Copyright (c) 2000-2001 Vojtech Pavlik',
u'Copyright (c) 2006-2007 Jiri Kosina',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hidraw_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hidraw.h')
expected = [
u'Copyright (c) 2007 Jiri Kosina',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_hil_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/hil.h')
expected = [
u'Copyright (c) 2001 Brian S. Julin',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_i2c_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/i2c.h')
expected = [
u'Copyright (c) 1995-2000 Simon G. Vogl',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_if_ppp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/if_ppp.h')
expected = [
u'Copyright (c) 1989 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_inotify_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/inotify.h')
expected = [
u'Copyright (c) 2005 John McCutchan',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_input_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/input.h')
expected = [
u'Copyright (c) 1999-2002 Vojtech Pavlik',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ion_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ion.h')
expected = [
u'Copyright (c) 2011 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ipmi_msgdefs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ipmi_msgdefs.h')
expected = [
u'Copyright 2002 MontaVista Software Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_jbd_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/jbd.h')
expected = [
u'Copyright 1998-2000 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kernelcapi_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kernelcapi.h')
expected = [
u'(c) Copyright 1997 by Carsten Paeth (calle@calle.in-berlin.de)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_keychord_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/keychord.h')
expected = [
u'Copyright (c) 2008 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_klist_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/klist.h')
expected = [
u'Copyright (c) 2005 Patrick Mochel',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kobject_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kobject.h')
expected = [
u'Copyright (c) 2002-2003 Patrick Mochel',
u'Copyright (c) 2002-2003 Open Source Development Labs',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kref_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kref.h')
expected = [
u'Copyright (c) 2004 Greg Kroah-Hartman <greg@kroah.com>',
u'Copyright (c) 2004 IBM Corp.',
u'Copyright (c) 2002-2003 Patrick Mochel <mochel@osdl.org>',
u'Copyright (c) 2002-2003 Open Source Development Labs',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ktime_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ktime.h')
expected = [
u'Copyright (c) 2005, Thomas Gleixner <tglx@linutronix.de>',
u'Copyright (c) 2005, Red Hat, Inc., Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_kxtf9_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/kxtf9.h')
expected = [
u'Copyright (c) 2008-2009, Kionix, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_leds_an30259a_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/leds-an30259a.h')
expected = [
u'Copyright (c) 2011 Samsung Electronics Co. Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lis331dlh_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/lis331dlh.h')
expected = [
u'Copyright (c) 2008-2009, Motorola',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lockdep_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/lockdep.h')
expected = [
u'Copyright (c) 2006 Red Hat, Inc., Ingo Molnar <mingo@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_loop_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/loop.h')
expected = [
u"Copyright 1993 by Theodore Ts'o.",
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mc146818rtc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mc146818rtc.h')
expected = [
u'Copyright Torsten Duwe <duwe@informatik.uni-erlangen.de> 1993',
u'Copyright Motorola 1984',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mempolicy_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mempolicy.h')
expected = [
u'Copyright 2003,2004 Andi Kleen SuSE Labs',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_moduleparam_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/moduleparam.h')
expected = [
u'(c) Copyright 2001, 2002 Rusty Russell IBM Corporation',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_kgsl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_kgsl.h')
expected = [
u'(c) Copyright Advanced Micro Devices, Inc. 2002, 2007',
u'Copyright (c) 2008-2009 QUALCOMM USA, INC.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_mdp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_mdp.h')
expected = [
u'Copyright (c) 2007 Google Incorporated',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_q6vdec_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_q6vdec.h')
expected = [
u'Copyright (c) 2008-2009, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_vidc_dec_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_vidc_dec.h')
expected = [
u'Copyright (c) 2010, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_msm_vidc_enc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_vidc_enc.h')
expected = [
u'Copyright (c) 2009, Code Aurora Forum.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mt9t013_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mt9t013.h')
expected = [
u'Copyright (c) 2007, 2008 HTC, Inc',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mutex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/mutex.h')
expected = [
u'Copyright (c) 2004, 2005, 2006 Red Hat, Inc., Ingo Molnar <mingo@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ncp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ncp.h')
expected = [
u'Copyright (c) 1995 by Volker Lendecke',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ncp_mount_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ncp_mount.h')
expected = [
u'Copyright (c) 1995, 1996 by Volker Lendecke',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_arp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/netfilter_arp.h')
expected = [
u'(c) 2002 Rusty Russell',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfs4_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/nfs4.h')
expected = [
u'Copyright (c) 2002 The Regents of the University of Michigan.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsacl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/nfsacl.h')
expected = [
u'(c) 2003 Andreas Gruenbacher <agruen@suse.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nvhdcp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/nvhdcp.h')
expected = [
u'Copyright (c) 2010-2011, NVIDIA Corporation.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pagemap_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pagemap.h')
expected = [
u'Copyright 1995 Linus Torvalds',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_patchkey_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/patchkey.h')
expected = [
u'Copyright (c) 2005 Stuart Brady',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pci_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pci.h')
expected = [
u'Copyright 1994, Drew Eckhardt',
u'Copyright 1997 1999 Martin Mares <mj@ucw.cz>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_perf_event_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/perf_event.h')
expected = [
u'Copyright (c) 2008-2009, Thomas Gleixner <tglx@linutronix.de>',
u'Copyright (c) 2008-2009, Red Hat, Inc., Ingo Molnar',
u'Copyright (c) 2008-2009, Red Hat, Inc., Peter Zijlstra',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_plist_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/plist.h')
expected = [
u'(c) 2002-2003 Intel Corp Inaky Perez-Gonzalez <inaky.perez-gonzalez@intel.com>.',
u'(c) MontaVista Software, Inc.',
u'(c) 2005 Thomas Gleixner <tglx@linutronix.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pm.h')
expected = [
u'Copyright (c) 2000 Andrew Henroid',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_pn544_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/pn544.h')
expected = [
u'Copyright (c) 2010 Trusted Logic S.A.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_posix_acl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/posix_acl.h')
expected = [
u'(c) 2002 Andreas Gruenbacher, <a.gruenbacher@computer.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ppdev_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ppdev.h')
expected = [
u'Copyright (c) 1998-9 Tim Waugh <tim@cyberelk.demon.co.uk>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ppp_defs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ppp_defs.h')
expected = [
u'Copyright (c) 1994 The Australian National University.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_qic117_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/qic117.h')
expected = [
u'Copyright (c) 1993-1996 Bas Laarhoven',
u'(c) 1997 Claus-Justus Heine.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_quota_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/quota.h')
expected = [
u'Copyright (c) 1982, 1986 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_rcupdate_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/rcupdate.h')
expected = [
u'Copyright (c) IBM Corporation, 2001',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_relay_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/relay.h')
expected = [
u'Copyright (c) 2002, 2003 - Tom Zanussi (zanussi@us.ibm.com), IBM Corp',
u'Copyright (c) 1999, 2000, 2001, 2002 - Karim Yaghmour (karim@opersys.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_rpmsg_omx_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/rpmsg_omx.h')
expected = [
u'Copyright (c) 2011 Texas Instruments.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_rtc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/rtc.h')
expected = [
u'Copyright (c) 1999 Hewlett-Packard Co.',
u'Copyright (c) 1999 Stephane Eranian <eranian@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_serial_core_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/serial_core.h')
expected = [
u'Copyright (c) 2000 Deep Blue Solutions Ltd.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_serial_reg_h_trail_name(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/serial_reg.h')
expected = [
u"Copyright (c) 1992, 1994 by Theodore Ts'o.",
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sfh7743_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/sfh7743.h')
expected = [
u'Copyright (c) 2009 Motorola, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_smb_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/smb.h')
expected = [
u'Copyright (c) 1995, 1996 by Paal-Kr. Engstad and Volker Lendecke',
u'Copyright (c) 1997 by Volker Lendecke',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_soundcard_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/soundcard.h')
expected = [
u'Copyright by Hannu Savolainen 1993-1997',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_spinlock_api_smp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/spinlock_api_smp.h')
expected = [
u'portions Copyright 2005, Red Hat, Inc., Ingo Molnar',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sysfs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/sysfs.h')
expected = [
u'Copyright (c) 2001,2002 Patrick Mochel',
u'Copyright (c) 2004 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_taskstats_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/taskstats.h')
expected = [
u'Copyright (c) Shailabh Nagar, IBM Corp. 2006',
u'(c) Balbir Singh, IBM Corp. 2006',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_telephony_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/telephony.h')
expected = [
u'(c) Copyright 1999-2001 Quicknet Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_timex_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/timex.h')
expected = [
u'Copyright (c) David L. Mills 1993',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_ufs_fs_i_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/ufs_fs_i.h')
expected = [
u'Copyright (c) 1996 Adrian Rodriguez (adrian@franklins-tower.rutgers.edu) Laboratory for Computer Science Research Computing Facility',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_usbdevice_fs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/usbdevice_fs.h')
expected = [
u'Copyright (c) 2000 Thomas Sailer (sailer@ife.ee.ethz.ch)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_videodev2_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/videodev2.h')
expected = [
u'Copyright (c) 1999-2007 the contributors',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_vt_buffer_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/vt_buffer.h')
expected = [
u'(c) 1998 Martin Mares <mj@ucw.cz>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_wanrouter_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/wanrouter.h')
expected = [
u'Copyright (c) 1995-2000 Sangoma Technologies Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_wireless_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/wireless.h')
expected = [
u'Copyright (c) 1997-2006 Jean Tourrilhes',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_xattr_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/xattr.h')
expected = [
u'Copyright (c) 2001 by Andreas Gruenbacher <a.gruenbacher@computer.org>',
u'Copyright (c) 2001-2002 Silicon Graphics, Inc.',
u'Copyright (c) 2004 Red Hat, Inc., James Morris <jmorris@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_zconf_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux/zconf.h')
expected = [
u'Copyright (c) 1995-1998 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lockd_nlm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-lockd/nlm.h')
expected = [
u'Copyright (c) 1996, Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_lockd_xdr_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-lockd/xdr.h')
expected = [
u'Copyright (c) 1996 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_bbm_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/bbm.h')
expected = [
u'Copyright (c) 2005 Samsung Electronics Kyungmin Park <kyungmin.park@samsung.com>',
u'Copyright (c) 2000-2005 Thomas Gleixner <tglx@linuxtronix.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_blktrans_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/blktrans.h')
expected = [
u'(c) 2003 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_flashchip_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/flashchip.h')
expected = [
u'(c) 2000 Red Hat.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_mtd_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/mtd.h')
expected = [
u'Copyright (c) 1999-2003 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_nand_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nand.h')
expected = [
u'Copyright (c) 2000 David Woodhouse <dwmw2@mvhi.com> Steven J. Hill <sjhill@realitydiluted.com> Thomas Gleixner <tglx@linutronix.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_nand_ecc_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nand_ecc.h')
expected = [
u'Copyright (c) 2000 Steven J. Hill (sjhill@realitydiluted.com)',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_nftl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nftl.h')
expected = [
u'(c) 1999-2003 David Woodhouse <dwmw2@infradead.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_onenand_regs_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/onenand_regs.h')
expected = [
u'Copyright (c) 2005 Samsung Electronics',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_mtd_partitions_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/partitions.h')
expected = [
u'(c) 2000 Nicolas Pitre <nico@cam.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_xt_connmark_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter/xt_CONNMARK.h')
expected = [
u'Copyright (c) 2002,2004 MARA Systems AB',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_kernel_headers_original_linux_netfilter_xt_connmark_h_trail_url(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter/xt_CONNMARK.h')
expected = [
u'Copyright (c) 2002,2004 MARA Systems AB <http://www.marasystems.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_ipv4_ip_queue_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ip_queue.h')
expected = [
u'(c) 2000 James Morris',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_ipv4_ipt_dscp_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ipt_DSCP.h')
expected = [
u'(c) 2002 Harald Welte <laforge@gnumonks.org>',
u'(c) 2000 by Matthew G. Marsh <mgm@paktronix.com>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_netfilter_ipv4_ipt_ttl_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ipt_TTL.h')
expected = [
u'(c) 2000 by Harald Welte <laforge@netfilter.org>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_auth_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/auth.h')
expected = [
u'Copyright (c) 1995, 1996 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_const_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/const.h')
expected = [
u'Copyright (c) 1995-1997 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_debug_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/debug.h')
expected = [
u'Copyright (c) 1995 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_interface_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/interface.h')
expected = [
u'Copyright (c) 2000 Neil Brown <neilb@cse.unsw.edu.au>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_nfsd_nfsfh_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/nfsfh.h')
expected = [
u'Copyright (c) 1995, 1996, 1997 Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_raid_md_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-raid/md.h')
expected = [
u'Copyright (c) 1996-98 Ingo Molnar, Gadi Oxman',
u'Copyright (c) 1994-96 Marc ZYNGIER <zyngier@ufr-info-p7.ibp.fr>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_raid_md_k_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-raid/md_k.h')
expected = [
u'Copyright (c) 1996-98 Ingo Molnar, Gadi Oxman',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_auth_gss_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/auth_gss.h')
expected = [
u'Copyright (c) 2000 The Regents of the University of Michigan',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_clnt_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/clnt.h')
expected = [
u'Copyright (c) 1995, 1996, Olaf Kirch <okir@monad.swb.de>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_gss_asn1_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/gss_asn1.h')
expected = [
u'Copyright (c) 2000 The Regents of the University of Michigan.',
u'Copyright 1995 by the Massachusetts Institute of Technology.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_gss_err_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/gss_err.h')
expected = [
u'Copyright (c) 2002 The Regents of the University of Michigan.',
u'Copyright 1993 by OpenVision Technologies, Inc.',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_linux_sunrpc_timer_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/timer.h')
expected = [
u'Copyright (c) 2002 Trond Myklebust <trond.myklebust@fys.uio.no>',
]
check_detection(expected, test_file)
def test_ics_kernel_headers_original_sound_asound_h(self):
test_file = self.get_test_loc('ics/kernel-headers-original-sound/asound.h')
expected = [
u'Copyright (c) 1994-2003 by Jaroslav Kysela <perex@perex.cz>, Abramo Bagnara <abramo@alsa-project.org>',
]
check_detection(expected, test_file)
def test_ics_libffi_aclocal_m4(self):
test_file = self.get_test_loc('ics/libffi/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_android_mk(self):
test_file = self.get_test_loc('ics/libffi/Android.mk')
expected = [
u'Copyright 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_libffi_configure(self):
test_file = self.get_test_loc('ics/libffi/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_depcomp(self):
test_file = self.get_test_loc('ics/libffi/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_license(self):
test_file = self.get_test_loc('ics/libffi/LICENSE')
expected = [
u'Copyright (c) 1996-2008 Red Hat, Inc and others.',
]
check_detection(expected, test_file)
def test_ics_libffi_ltcf_c_sh(self):
test_file = self.get_test_loc('ics/libffi/ltcf-c.sh')
expected = [
u'Copyright (c) 1996-2000, 2001 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_libffi_ltcf_cxx_sh(self):
test_file = self.get_test_loc('ics/libffi/ltcf-cxx.sh')
expected = [
u'Copyright (c) 1996-1999, 2000, 2001, 2003 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
]
check_detection(expected, test_file)
def test_ics_libffi_ltconfig(self):
test_file = self.get_test_loc('ics/libffi/ltconfig')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1996-2000 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 1999-2000 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_ltmain_sh(self):
test_file = self.get_test_loc('ics/libffi/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_missing(self):
test_file = self.get_test_loc('ics/libffi/missing')
expected = [
u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_darwin_x86_ffi_h(self):
test_file = self.get_test_loc('ics/libffi-darwin-x86/ffi.h')
expected = [
u'Copyright (c) 1996-2003, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_darwin_x86_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-darwin-x86/ffitarget.h')
expected = [
u'Copyright (c) 1996-2003 Red Hat, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_doc_libffi_texi(self):
test_file = self.get_test_loc('ics/libffi-doc/libffi.texi')
expected = [
u'Copyright 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_include_ffi_h_in(self):
test_file = self.get_test_loc('ics/libffi-include/ffi.h.in')
expected = [
u'Copyright (c) 1996-2003, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_include_ffi_common_h(self):
test_file = self.get_test_loc('ics/libffi-include/ffi_common.h')
expected = [
u'Copyright (c) 1996 Red Hat, Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_closures_c(self):
test_file = self.get_test_loc('ics/libffi-src/closures.c')
expected = [
u'Copyright (c) 2007 Red Hat, Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_debug_c(self):
test_file = self.get_test_loc('ics/libffi-src/debug.c')
expected = [
u'Copyright (c) 1996 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_java_raw_api_c(self):
test_file = self.get_test_loc('ics/libffi-src/java_raw_api.c')
expected = [
u'Copyright (c) 1999, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_prep_cif_c(self):
test_file = self.get_test_loc('ics/libffi-src/prep_cif.c')
expected = [
u'Copyright (c) 1996, 1998, 2007 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_raw_api_c(self):
test_file = self.get_test_loc('ics/libffi-src/raw_api.c')
expected = [
u'Copyright (c) 1999, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_types_c(self):
test_file = self.get_test_loc('ics/libffi-src/types.c')
expected = [
u'Copyright (c) 1996, 1998 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_alpha_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-alpha/ffi.c')
expected = [
u'Copyright (c) 1998, 2001, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_alpha_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-alpha/ffitarget.h')
expected = [
u'Copyright (c) 1996-2003 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_arm_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-arm/ffi.c')
expected = [
u'Copyright (c) 1998, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_cris_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-cris/ffi.c')
expected = [
u'Copyright (c) 1998 Cygnus Solutions',
u'Copyright (c) 2004 Simon Posnjak',
u'Copyright (c) 2005 Axis Communications AB',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_frv_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-frv/ffi.c')
expected = [
u'Copyright (c) 2004 Anthony Green',
u'Copyright (c) 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_frv_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-frv/ffitarget.h')
expected = [
u'Copyright (c) 1996-2004 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_ia64_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-ia64/ffi.c')
expected = [
u'Copyright (c) 1998, 2007, 2008 Red Hat, Inc.',
u'Copyright (c) 2000 Hewlett Packard Company',
]
check_detection(expected, test_file)
def test_ics_libffi_src_ia64_ia64_flags_h(self):
test_file = self.get_test_loc('ics/libffi-src-ia64/ia64_flags.h')
expected = [
u'Copyright (c) 2000 Hewlett Packard Company',
]
check_detection(expected, test_file)
def test_ics_libffi_src_m32r_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-m32r/ffi.c')
expected = [
u'Copyright (c) 2004 Renesas Technology',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_m32r_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-m32r/ffitarget.h')
expected = [
u'Copyright (c) 2004 Renesas Technology.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_mips_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-mips/ffi.c')
expected = [
u'Copyright (c) 1996, 2007, 2008 Red Hat, Inc.',
u'Copyright (c) 2008 David Daney',
]
check_detection(expected, test_file)
def test_ics_libffi_src_pa_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-pa/ffi.c')
expected = [
u'(c) 2003-2004 Randolph Chung <tausq@debian.org>',
u'(c) 2008 Red Hat, Inc.',
u'(c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_powerpc_asm_h(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/asm.h')
expected = [
u'Copyright (c) 1998 Geoffrey Keating',
]
check_detection(expected, test_file)
def test_ics_libffi_src_powerpc_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/ffi.c')
expected = [
u'Copyright (c) 1998 Geoffrey Keating',
u'Copyright (c) 2007 Free Software Foundation, Inc',
u'Copyright (c) 2008 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_powerpc_ffi_darwin_c(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/ffi_darwin.c')
expected = [
u'Copyright (c) 1998 Geoffrey Keating',
u'Copyright (c) 2001 John Hornkvist',
u'Copyright (c) 2002, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_libffi_src_powerpc_ffitarget_h(self):
test_file = self.get_test_loc('ics/libffi-src-powerpc/ffitarget.h')
expected = [
u'Copyright (c) 1996-2003 Red Hat, Inc.',
u'Copyright (c) 2007 Free Software Foundation, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_s390_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-s390/ffi.c')
expected = [
u'Copyright (c) 2000, 2007 Software AG',
u'Copyright (c) 2008 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_libffi_src_sh_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-sh/ffi.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008 Kaz Kojima',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_sh64_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-sh64/ffi.c')
expected = [
u'Copyright (c) 2003, 2004 Kaz Kojima',
u'Copyright (c) 2008 Anthony Green',
]
check_detection(expected, test_file)
def test_ics_libffi_src_sparc_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-sparc/ffi.c')
expected = [
u'Copyright (c) 1996, 2003, 2004, 2007, 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_x86_ffi_c(self):
test_file = self.get_test_loc('ics/libffi-src-x86/ffi.c')
expected = [
u'Copyright (c) 1996, 1998, 1999, 2001, 2007, 2008 Red Hat, Inc.',
u'Copyright (c) 2002 Ranjit Mathew',
u'Copyright (c) 2002 Bo Thorsen',
u'Copyright (c) 2002 Roger Sayle',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_src_x86_ffi64_c(self):
test_file = self.get_test_loc('ics/libffi-src-x86/ffi64.c')
expected = [
u'Copyright (c) 2002, 2007 Bo Thorsen <bo@suse.de>',
u'Copyright (c) 2008 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_run_all_tests(self):
test_file = self.get_test_loc('ics/libffi-testsuite/run-all-tests')
expected = [
u'Copyright 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_lib_libffi_dg_exp(self):
test_file = self.get_test_loc('ics/libffi-testsuite-lib/libffi-dg.exp')
expected = [
u'Copyright (c) 2003, 2005, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_lib_target_libpath_exp(self):
test_file = self.get_test_loc('ics/libffi-testsuite-lib/target-libpath.exp')
expected = [
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libffi_testsuite_lib_wrapper_exp(self):
test_file = self.get_test_loc('ics/libffi-testsuite-lib/wrapper.exp')
expected = [
u'Copyright (c) 2004, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libgsm_changelog(self):
test_file = self.get_test_loc('ics/libgsm/ChangeLog')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_notice(self):
test_file = self.get_test_loc('ics/libgsm/NOTICE')
expected = [
u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin',
]
check_detection(expected, test_file)
def test_ics_libgsm_readme(self):
test_file = self.get_test_loc('ics/libgsm/README')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_inc_config_h(self):
test_file = self.get_test_loc('ics/libgsm-inc/config.h')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_man_gsm_3(self):
test_file = self.get_test_loc('ics/libgsm-man/gsm.3')
expected = [
u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_libgsm_man_gsm_option_3(self):
test_file = self.get_test_loc('ics/libgsm-man/gsm_option.3')
expected = [
u'Copyright 1992-1995 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.',
]
check_detection(expected, test_file)
def test_ics_liblzf_license(self):
test_file = self.get_test_loc('ics/liblzf/LICENSE')
expected = [
u'Copyright (c) 2000-2009 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzf_c(self):
test_file = self.get_test_loc('ics/liblzf/lzf.c')
expected = [
u'Copyright (c) 2006 Stefan Traby <stefan@hello-penguin.com>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzf_h(self):
test_file = self.get_test_loc('ics/liblzf/lzf.h')
expected = [
u'Copyright (c) 2000-2008 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzf_c_c(self):
test_file = self.get_test_loc('ics/liblzf/lzf_c.c')
expected = [
u'Copyright (c) 2000-2010 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_lzfp_h(self):
test_file = self.get_test_loc('ics/liblzf/lzfP.h')
expected = [
u'Copyright (c) 2000-2007 Marc Alexander Lehmann <schmorp@schmorp.de>',
]
check_detection(expected, test_file)
def test_ics_liblzf_cs_clzf_cs(self):
test_file = self.get_test_loc('ics/liblzf-cs/CLZF.cs')
expected = [
u'Copyright (c) 2005 Oren J. Maurice <oymaurice@hazorea.org.il>',
]
check_detection(expected, test_file)
def test_ics_libnfc_nxp_inc_nfc_custom_config_h(self):
test_file = self.get_test_loc('ics/libnfc-nxp-inc/nfc_custom_config.h')
expected = [
u'Copyright (c) 2010 NXP Semiconductors',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_generic_h(self):
test_file = self.get_test_loc('ics/libnl-headers/netlink-generic.h')
expected = [
u'Copyright (c) 2003-2006 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_local_h(self):
test_file = self.get_test_loc('ics/libnl-headers/netlink-local.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_errno_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink/errno.h')
expected = [
u'Copyright (c) 2008 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_object_api_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink/object-api.h')
expected = [
u'Copyright (c) 2003-2007 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_cli_utils_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-cli/utils.h')
expected = [
u'Copyright (c) 2003-2009 Thomas Graf <tgraf@suug.ch>',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_netfilter_ct_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-netfilter/ct.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
u'Copyright (c) 2007 Philip Craig <philipc@snapgear.com>',
u'Copyright (c) 2007 Secure Computing Corporation',
]
check_detection(expected, test_file)
def test_ics_libnl_headers_netlink_route_addr_h(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-route/addr.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
u'Copyright (c) 2003-2006 Baruch Even <baruch@ev-en.org>, Mediatrix Telecom, inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_libnl_headers_netlink_route_addr_h_trail_email(self):
test_file = self.get_test_loc('ics/libnl-headers-netlink-route/addr.h')
expected = [
u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>',
u'Copyright (c) 2003-2006 Baruch Even <baruch@ev-en.org>, Mediatrix Telecom, inc. <ericb@mediatrix.com>',
]
check_detection(expected, test_file)
def test_ics_libpcap_aclocal_m4_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/aclocal.m4')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_atmuni31_h(self):
test_file = self.get_test_loc('ics/libpcap/atmuni31.h')
expected = [
u'Copyright (c) 1997 Yen Yen Lim and North Dakota State University',
]
check_detection(expected, test_file)
def test_ics_libpcap_bpf_dump_c(self):
test_file = self.get_test_loc('ics/libpcap/bpf_dump.c')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_bpf_image_c(self):
test_file = self.get_test_loc('ics/libpcap/bpf_image.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_config_guess(self):
test_file = self.get_test_loc('ics/libpcap/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libpcap_configure_in_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/configure.in')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_etherent_c(self):
test_file = self.get_test_loc('ics/libpcap/etherent.c')
expected = [
u'Copyright (c) 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_ethertype_h(self):
test_file = self.get_test_loc('ics/libpcap/ethertype.h')
expected = [
u'Copyright (c) 1993, 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_fad_getad_c(self):
test_file = self.get_test_loc('ics/libpcap/fad-getad.c')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_fad_win32_c(self):
test_file = self.get_test_loc('ics/libpcap/fad-win32.c')
expected = [
u'Copyright (c) 2002 - 2005 NetGroup, Politecnico di Torino (Italy)',
u'Copyright (c) 2005 - 2006 CACE Technologies, Davis (California)',
]
check_detection(expected, test_file)
def test_ics_libpcap_gencode_c(self):
test_file = self.get_test_loc('ics/libpcap/gencode.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_gencode_h(self):
test_file = self.get_test_loc('ics/libpcap/gencode.h')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
u'Copyright (c) 1997 Yen Yen Lim and North Dakota State University',
]
check_detection(expected, test_file)
def test_ics_libpcap_grammar_c(self):
test_file = self.get_test_loc('ics/libpcap/grammar.c')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_llc_h(self):
test_file = self.get_test_loc('ics/libpcap/llc.h')
expected = [
u'Copyright (c) 1993, 1994, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_makefile_in(self):
test_file = self.get_test_loc('ics/libpcap/Makefile.in')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_mkdep(self):
test_file = self.get_test_loc('ics/libpcap/mkdep')
expected = [
u'Copyright (c) 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_nlpid_h(self):
test_file = self.get_test_loc('ics/libpcap/nlpid.h')
expected = [
u'Copyright (c) 1996 Juniper Networks, Inc.',
]
check_detection(expected, test_file)
def test_ics_libpcap_optimize_c(self):
test_file = self.get_test_loc('ics/libpcap/optimize.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_3_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/pcap.3')
expected = [
u'Copyright (c) 1994, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap.h')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_bpf_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-bpf.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1998 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_bpf_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-bpf.h')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_dlpi_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-dlpi.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_int_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-int.h')
expected = [
u'Copyright (c) 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_linux_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-linux.c')
expected = [
u'Copyright (c) 2000 Torsten Landschoff <torsten@debian.org> Sebastian Krahmer <krahmer@cs.uni-potsdam.de>',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_namedb_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-namedb.h')
expected = [
u'Copyright (c) 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_nit_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-nit.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_nit_h(self):
test_file = self.get_test_loc('ics/libpcap/pcap-nit.h')
expected = [
u'Copyright (c) 1990, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_null_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-null.c')
expected = [
u'Copyright (c) 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_stdinc_h_trail_name(self):
test_file = self.get_test_loc('ics/libpcap/pcap-stdinc.h')
expected = [
u'Copyright (c) 2002 - 2003 NetGroup, Politecnico di Torino (Italy)',
]
check_detection(expected, test_file)
def test_ics_libpcap_pcap_win32_c(self):
test_file = self.get_test_loc('ics/libpcap/pcap-win32.c')
expected = [
u'Copyright (c) 1999 - 2005 NetGroup, Politecnico di Torino (Italy)',
u'Copyright (c) 2005 - 2007 CACE Technologies, Davis (California)',
]
check_detection(expected, test_file)
def test_ics_libpcap_ppp_h(self):
test_file = self.get_test_loc('ics/libpcap/ppp.h')
expected = [
u'Copyright 1989 by Carnegie Mellon.',
]
check_detection(expected, test_file)
def test_ics_libpcap_scanner_c(self):
test_file = self.get_test_loc('ics/libpcap/scanner.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_tokdefs_h(self):
test_file = self.get_test_loc('ics/libpcap/tokdefs.h')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_libpcap_doc_pcap_html(self):
test_file = self.get_test_loc('ics/libpcap-doc/pcap.html')
expected = [
u'Copyright (c) The Internet Society (2004).',
u'Copyright (c) The Internet Society (2004).',
]
check_detection(expected, test_file)
def test_ics_libpcap_doc_pcap_txt(self):
test_file = self.get_test_loc('ics/libpcap-doc/pcap.txt')
expected = [
u'Copyright (c) The Internet Society (2004).',
u'Full Copyright Statement',
u'Copyright (c) The Internet Society (2004).',
]
check_detection(expected, test_file)
def test_ics_libpcap_lbl_os_sunos4_h(self):
test_file = self.get_test_loc('ics/libpcap-lbl/os-sunos4.h')
expected = [
u'Copyright (c) 1989, 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_lbl_os_ultrix4_h(self):
test_file = self.get_test_loc('ics/libpcap-lbl/os-ultrix4.h')
expected = [
u'Copyright (c) 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_libpcap_missing_snprintf_c(self):
test_file = self.get_test_loc('ics/libpcap-missing/snprintf.c')
expected = [
u'Copyright (c) 1995-1999 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_libvpx_args_c(self):
test_file = self.get_test_loc('ics/libvpx/args.c')
expected = [
u'Copyright (c) 2010 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_docs_mk(self):
test_file = self.get_test_loc('ics/libvpx/docs.mk')
expected = [
u'Copyright (c) 2010 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_license(self):
test_file = self.get_test_loc('ics/libvpx/LICENSE')
expected = [
u'Copyright (c) 2010, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_libvpx_y4minput_c(self):
test_file = self.get_test_loc('ics/libvpx/y4minput.c')
expected = [
u'Copyright (c) 2010 The WebM project',
u'Copyright (c) 2002-2010 The Xiph.Org Foundation and contributors.',
]
check_detection(expected, test_file)
def test_ics_libvpx_build_x86_msvs_obj_int_extract_bat(self):
test_file = self.get_test_loc('ics/libvpx-build-x86-msvs/obj_int_extract.bat')
expected = [
u'Copyright (c) 2011 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_asciimathphp_2_0_htmlmathml_js(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-ASCIIMathPHP-2.0/htmlMathML.js')
expected = [
u'(c) Peter Jipsen',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_html(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.html')
expected = [
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann',
u'Copyright (c) 2004 Nigel McNie',
u"Copyright (c) 2008 < name> (< website URL> ) <span class coMULTI'> ",
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_txt(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.txt')
expected = [
u'Copyright (c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann Email nigel@geshi.org',
u'Copyright (c) 2004 Nigel McNie',
u'Copyright (c) 2004 ( )',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_txt_trail_email_trail_url_misc(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.txt')
expected = [
u'Copyright (c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann Email nigel@geshi.org, BenBE@omorphia.de',
u'Copyright: (c) 2004 Nigel McNie (http://qbnz.com/highlighter/)',
u'Copyright: (c) 2004 <name> (<website URL>)',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_geshi_docs_phpdoc_ini(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/phpdoc.ini')
expected = [
u'Copyright 2002, Greg Beaver <cellog@users.sourceforge.net>',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_html_toc_0_91_toc_pod(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-HTML-Toc-0.91/Toc.pod')
expected = [
u'Copyright (c) 2001 Freddy Vulto.',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_license_text(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/License.text')
expected = [
u'Copyright (c) 2004-2008 Michel Fortin',
u'Copyright (c) 2003-2006 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_markdown_php(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/markdown.php')
expected = [
u'Copyright (c) 2004-2008 Michel Fortin',
u'Copyright (c) 2004-2006 John Gruber',
u'Copyright (c) 2004-2008 Michel Fortin',
u'Copyright (c) 2003-2006 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_php_markdown_extra_readme_text(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/PHP Markdown Extra Readme.text')
expected = [
u'Copyright (c) 2004-2005 Michel Fortin',
u'Copyright (c) 2003-2005 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_smartypants_1_5_1e_php_smartypants_readme_txt(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-SmartyPants-1.5.1e/PHP SmartyPants Readme.txt')
expected = [
u'Copyright (c) 2005 Michel Fortin',
u'Copyright (c) 2003-2004 John Gruber',
]
check_detection(expected, test_file)
def test_ics_libvpx_examples_includes_php_smartypants_1_5_1e_smartypants_php(self):
test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-SmartyPants-1.5.1e/smartypants.php')
expected = [
u'Copyright (c) 2003-2004 John Gruber',
u'Copyright (c) 2004-2005 Michel Fortin',
u'Copyright (c) 2003 John Gruber',
u'Copyright (c) 2004-2005 Michel Fortin',
]
check_detection(expected, test_file)
def test_ics_libvpx_libmkv_ebmlids_h(self):
test_file = self.get_test_loc('ics/libvpx-libmkv/EbmlIDs.h')
expected = [
u'Copyright (c) 2010 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_license(self):
test_file = self.get_test_loc('ics/libvpx-nestegg/LICENSE')
expected = [
u'Copyright (c) 2010 Mozilla Foundation',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_halloc_halloc_h(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-halloc/halloc.h')
expected = [
u'Copyright (c) 2004-2010 Alex Pankratov.',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_halloc_readme(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-halloc/README')
expected = [
u'Copyright (c) 2004-2010, Alex Pankratov (ap@swapped.cc).',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_halloc_src_halloc_c(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-halloc-src/halloc.c')
expected = [
u'Copyright (c) 2004i-2010 Alex Pankratov.',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_include_nestegg_nestegg_h(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-include-nestegg/nestegg.h')
expected = [
u'Copyright (c) 2010 Mozilla Foundation',
]
check_detection(expected, test_file)
def test_ics_libvpx_nestegg_m4_pkg_m4(self):
test_file = self.get_test_loc('ics/libvpx-nestegg-m4/pkg.m4')
expected = [
u'Copyright (c) 2004 Scott James Remnant <scott@netsplit.com>.',
]
check_detection(expected, test_file)
def test_ics_libvpx_vp8_common_asm_com_offsets_c(self):
test_file = self.get_test_loc('ics/libvpx-vp8-common/asm_com_offsets.c')
expected = [
u'Copyright (c) 2011 The WebM project',
]
check_detection(expected, test_file)
def test_ics_libxml2_dict_c(self):
test_file = self.get_test_loc('ics/libxml2/dict.c')
expected = [
u'Copyright (c) 2003 Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_hash_c(self):
test_file = self.get_test_loc('ics/libxml2/hash.c')
expected = [
u'Copyright (c) 2000 Bjorn Reese and Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_list_c(self):
test_file = self.get_test_loc('ics/libxml2/list.c')
expected = [
u'Copyright (c) 2000 Gary Pennington and Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_notice(self):
test_file = self.get_test_loc('ics/libxml2/NOTICE')
expected = [
u'Copyright (c) 1998-2003 Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_libxml2_trio_c(self):
test_file = self.get_test_loc('ics/libxml2/trio.c')
expected = [
u'Copyright (c) 1998 Bjorn Reese and Daniel Stenberg.',
]
check_detection(expected, test_file)
def test_ics_libxml2_triodef_h(self):
test_file = self.get_test_loc('ics/libxml2/triodef.h')
expected = [
u'Copyright (c) 2001 Bjorn Reese <breese@users.sourceforge.net>',
]
check_detection(expected, test_file)
def test_ics_libxml2_triop_h(self):
test_file = self.get_test_loc('ics/libxml2/triop.h')
expected = [
u'Copyright (c) 2000 Bjorn Reese and Daniel Stenberg.',
]
check_detection(expected, test_file)
def test_ics_libxml2_triostr_c(self):
test_file = self.get_test_loc('ics/libxml2/triostr.c')
expected = [
u'Copyright (c) 2001 Bjorn Reese and Daniel Stenberg.',
]
check_detection(expected, test_file)
def test_ics_libxslt_copyright(self):
test_file = self.get_test_loc('ics/libxslt/Copyright')
expected = [
u'Copyright (c) 2001-2002 Daniel Veillard.',
u'Copyright (c) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard.',
]
check_detection(expected, test_file)
def test_ics_lohit_fonts_notice(self):
test_file = self.get_test_loc('ics/lohit-fonts/NOTICE')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_lohit_fonts_notice_trail_url(self):
test_file = self.get_test_loc('ics/lohit-fonts/NOTICE')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors <http://fedorahosted.org/lohit>',
]
check_detection(expected, test_file)
def test_ics_lohit_fonts_lohit_bengali_ttf_copyright(self):
test_file = self.get_test_loc('ics/lohit-fonts-lohit-bengali-ttf/COPYRIGHT')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_lohit_fonts_lohit_bengali_ttf_copyright_trail_url(self):
test_file = self.get_test_loc('ics/lohit-fonts-lohit-bengali-ttf/COPYRIGHT')
expected = [
u'Copyright 2011 Lohit Fonts Project contributors. <http://fedorahosted.org/lohit>',
]
check_detection(expected, test_file)
def test_ics_markdown_notice(self):
test_file = self.get_test_loc('ics/markdown/NOTICE')
expected = [
u'Copyright 2007, 2008 The Python Markdown Project',
u'Copyright 2004, 2005, 2006 Yuri Takhteyev',
u'Copyright 2004 Manfred Stienstra',
]
check_detection(expected, test_file)
def test_ics_markdown_bin_markdown(self):
test_file = self.get_test_loc('ics/markdown-bin/markdown')
expected = [
u'Copyright 2007, 2008 The Python Markdown Project',
u'Copyright 2004, 2005, 2006 Yuri Takhteyev',
u'Copyright 2004 Manfred Stienstra',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_html4_py(self):
test_file = self.get_test_loc('ics/markdown-markdown/html4.py')
expected = [
u'Copyright (c) 1999-2007 by Fredrik Lundh.',
u'Copyright (c) 1999-2007 by Fredrik Lundh',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_abbr_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/abbr.py')
expected = [
u'Copyright 2007-2008 Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_abbr_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/abbr.py')
expected = [
u'Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/) [Seemant Kulleen](http://www.kulleen.org/)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_codehilite_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/codehilite.py')
expected = [
u'Copyright 2006-2008 Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_codehilite_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/codehilite.py')
expected = [
u'Copyright 2006-2008 [Waylan Limberg](http://achinghead.com/).',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_def_list_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/def_list.py')
expected = [
u'Copyright 2008 - Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_def_list_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/def_list.py')
expected = [
u'Copyright 2008 - [Waylan Limberg](http://achinghead.com)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_html_tidy_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/html_tidy.py')
expected = [
u'Copyright (c) 2008 Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_html_tidy_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/html_tidy.py')
expected = [
u'Copyright (c)2008 [Waylan Limberg](http://achinghead.com)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_tables_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/tables.py')
expected = [
u'Copyright 2009 - Waylan Limberg',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_tables_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/tables.py')
expected = [
u'Copyright 2009 - [Waylan Limberg](http://achinghead.com)',
]
check_detection(expected, test_file)
def test_ics_markdown_markdown_extensions_toc_py(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/toc.py')
expected = [
u'(c) 2008 Jack Miller',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_markdown_markdown_extensions_toc_py_trail_url(self):
test_file = self.get_test_loc('ics/markdown-markdown-extensions/toc.py')
expected = [
u'(c) 2008 [Jack Miller](http://codezen.org)',
]
check_detection(expected, test_file)
def test_ics_mesa3d_notice(self):
test_file = self.get_test_loc('ics/mesa3d/NOTICE')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
u'Copyright (c) 2008-1010 Intel Corporation',
u'Copyright (c) 2007-2010 VMware, Inc.',
u'Copyright (c) 2010 Luca Barbieri',
u'Copyright (c) 2006 Alexander Chemeris',
u'Copyright 2007,2010,2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_docs_license_html(self):
test_file = self.get_test_loc('ics/mesa3d-docs/license.html')
expected = [
u'copyrighted by Mark Kilgard',
u'Copyright (c) 1999-2007 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_docs_subset_a_html(self):
test_file = self.get_test_loc('ics/mesa3d-docs/subset-A.html')
expected = [
u'Copyright (c) 2002-2003 by Tungsten Graphics, Inc., Cedar Park, Texas.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_c99_inttypes_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-c99/inttypes.h')
expected = [
u'Copyright (c) 2006 Alexander Chemeris',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_c99_stdbool_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-c99/stdbool.h')
expected = [
u'Copyright 2007-2010 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_c99_stdint_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-c99/stdint.h')
expected = [
u'Copyright (c) 2006-2008 Alexander Chemeris',
]
check_detection(expected, test_file)
def test_ics_mesa3d_include_pixelflinger2_pixelflinger2_interface_h(self):
test_file = self.get_test_loc('ics/mesa3d-include-pixelflinger2/pixelflinger2_interface.h')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_ast_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/ast.h')
expected = [
u'Copyright (c) 2009 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_ast_expr_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/ast_expr.cpp')
expected = [
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glsl_compiler_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_compiler.cpp')
expected = [
u'Copyright (c) 2008, 2009 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glsl_parser_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_parser.cpp')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright (c) 2008, 2009 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glsl_parser_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_parser.h')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_ir_to_llvm_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/ir_to_llvm.cpp')
expected = [
u'Copyright (c) 2005-2007 Brian Paul',
u'Copyright (c) 2008 VMware, Inc.',
u'Copyright (c) 2010 Intel Corporation',
u'Copyright (c) 2010 Luca Barbieri',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_list_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/list.h')
expected = [
u'Copyright (c) 2008, 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_lower_jumps_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/lower_jumps.cpp')
expected = [
u'Copyright (c) 2010 Luca Barbieri',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_program_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/program.h')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
u'Copyright (c) 2009 VMware, Inc.',
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_strtod_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl/strtod.c')
expected = [
u'Copyright 2010 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glcpp_glcpp_lex_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/glcpp-lex.c')
expected = [
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glcpp_glcpp_parse_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/glcpp-parse.c')
expected = [
u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.',
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_glsl_glcpp_makefile_am(self):
test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/Makefile.am')
expected = [
u'Copyright (c) 2010 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_compiler_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/compiler.h')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
u'Copyright (c) 2009 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_config_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/config.h')
expected = [
u'Copyright (c) 1999-2007 Brian Paul',
u'Copyright (c) 2008 VMware, Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_core_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/core.h')
expected = [
u'Copyright (c) 2010 LunarG Inc.',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_debug_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/debug.h')
expected = [
u'Copyright (c) 1999-2004 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_get_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/get.h')
expected = [
u'Copyright (c) 1999-2001 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_glheader_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/glheader.h')
expected = [
u'Copyright (c) 1999-2008 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_hash_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/hash.h')
expected = [
u'Copyright (c) 1999-2006 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_shaderobj_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/shaderobj.h')
expected = [
u'Copyright (c) 2004-2007 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_main_simple_list_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/simple_list.h')
expected = [
u'(c) 1997, Keith Whitwell',
u'Copyright (c) 1999-2001 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_program_hash_table_c(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-program/hash_table.c')
expected = [
u'Copyright (c) 2008 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_mesa_program_prog_statevars_h(self):
test_file = self.get_test_loc('ics/mesa3d-src-mesa-program/prog_statevars.h')
expected = [
u'Copyright (c) 1999-2007 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_src_pixelflinger2_pixelflinger2_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-src-pixelflinger2/pixelflinger2.cpp')
expected = [
u'Copyright 2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_test_egl_cpp(self):
test_file = self.get_test_loc('ics/mesa3d-test/egl.cpp')
expected = [
u'Copyright 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mesa3d_test_m_matrix_c(self):
test_file = self.get_test_loc('ics/mesa3d-test/m_matrix.c')
expected = [
u'Copyright (c) 1999-2005 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mesa3d_test_m_matrix_h(self):
test_file = self.get_test_loc('ics/mesa3d-test/m_matrix.h')
expected = [
u'Copyright (c) 1999-2005 Brian Paul',
]
check_detection(expected, test_file)
def test_ics_mksh_android_mk(self):
test_file = self.get_test_loc('ics/mksh/Android.mk')
expected = [
u'Copyright (c) 2010 Thorsten Glaser <t.glaser@tarent.de>',
]
check_detection(expected, test_file)
def test_ics_mksh_mkshrc(self):
test_file = self.get_test_loc('ics/mksh/mkshrc')
expected = [
u'Copyright (c) 2010 Thorsten Glaser <t.glaser@tarent.de>',
]
check_detection(expected, test_file)
def test_ics_mksh_notice(self):
test_file = self.get_test_loc('ics/mksh/NOTICE')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_build_sh(self):
test_file = self.get_test_loc('ics/mksh-src/Build.sh')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_edit_c(self):
test_file = self.get_test_loc('ics/mksh-src/edit.c')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_funcs_c(self):
test_file = self.get_test_loc('ics/mksh-src/funcs.c')
expected = [
u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_jobs_c(self):
test_file = self.get_test_loc('ics/mksh-src/jobs.c')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_lalloc_c(self):
test_file = self.get_test_loc('ics/mksh-src/lalloc.c')
expected = [
u'Copyright (c) 2009 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mksh_src_sh_h(self):
test_file = self.get_test_loc('ics/mksh-src/sh.h')
expected = [
u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>',
]
check_detection(expected, test_file)
def test_ics_mtpd_l2tp_c(self):
test_file = self.get_test_loc('ics/mtpd/l2tp.c')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_mtpd_notice(self):
test_file = self.get_test_loc('ics/mtpd/NOTICE')
expected = [
u'Copyright (c) 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_netperf_module_license_hp(self):
test_file = self.get_test_loc('ics/netperf/MODULE_LICENSE_HP')
expected = [
u'Copyright (c) 1993 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_kstat10_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_kstat10.c')
expected = [
u'(c) Copyright 2005-2007, Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_looper_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_looper.c')
expected = [
u'(c) Copyright 2005-2007. version 2.4.3',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_none_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_none.c')
expected = [
u'(c) Copyright 2005, Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netcpu_procstat_c(self):
test_file = self.get_test_loc('ics/netperf/netcpu_procstat.c')
expected = [
u'(c) Copyright 2005-2007 version 2.4.3',
]
check_detection(expected, test_file)
def test_ics_netperf_netlib_c(self):
test_file = self.get_test_loc('ics/netperf/netlib.c')
expected = [
u'(c) Copyright 1993-2007 Hewlett-Packard Company.',
]
check_detection(expected, test_file)
def test_ics_netperf_netlib_h(self):
test_file = self.get_test_loc('ics/netperf/netlib.h')
expected = [
u'Copyright (c) 1993-2005 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_netperf_c(self):
test_file = self.get_test_loc('ics/netperf/netperf.c')
expected = [
u'Copyright (c) 1993-2007 Hewlett-Packard Company',
u'(c) Copyright 1993-2007 Hewlett-Packard Company.',
]
check_detection(expected, test_file)
def test_ics_netperf_netserver_c(self):
test_file = self.get_test_loc('ics/netperf/netserver.c')
expected = [
u'Copyright (c) 1993-2007 Hewlett-Packard Company',
u'(c) Copyright 1993-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_netsh_h(self):
test_file = self.get_test_loc('ics/netperf/netsh.h')
expected = [
u'Copyright (c) 1993,1995 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_bsd_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_bsd.c')
expected = [
u'(c) Copyright 1993-2004 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_bsd_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_bsd.h')
expected = [
u'Copyright (c) 1993-2004 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_dlpi_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_dlpi.c')
expected = [
u'(c) Copyright 1993,1995,2004 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_dlpi_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_dlpi.h')
expected = [
u'Copyright (c) 1993, Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sctp_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_sctp.c')
expected = [
u'(c) Copyright 2005-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sctp_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_sctp.h')
expected = [
u'Copyright (c) 1993-2003 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sdp_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_sdp.c')
expected = [
u'(c) Copyright 2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_sdp_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_sdp.h')
expected = [
u'Copyright (c) 2007 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_unix_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_unix.c')
expected = [
u'(c) Copyright 1994-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_xti_c(self):
test_file = self.get_test_loc('ics/netperf/nettest_xti.c')
expected = [
u'(c) Copyright 1995-2007 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_netperf_nettest_xti_h(self):
test_file = self.get_test_loc('ics/netperf/nettest_xti.h')
expected = [
u'Copyright (c) 1995,2004 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_neven_facedetector_jni_cpp(self):
test_file = self.get_test_loc('ics/neven/FaceDetector_jni.cpp')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_neven_notice(self):
test_file = self.get_test_loc('ics/neven/NOTICE')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_nist_sip_java_gov_nist_core_multimap_java(self):
test_file = self.get_test_loc('ics/nist-sip-java-gov-nist-core/MultiMap.java')
expected = [
u'Copyright 1999-2004 The Apache Software Foundation',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_consumerproperties_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/ConsumerProperties.java')
expected = [
u'Copyright 2007 Netflix, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_oauthexception_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/OAuthException.java')
expected = [
u'Copyright 2008 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_oauthmessage_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/OAuthMessage.java')
expected = [
u'Copyright 2007, 2008 Netflix, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_client_oauthresponsemessage_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-client/OAuthResponseMessage.java')
expected = [
u'Copyright 2008 Netflix, Inc.',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_client_httpclient4_httpclient4_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-client-httpclient4/HttpClient4.java')
expected = [
u'Copyright 2008 Sean Sullivan',
]
check_detection(expected, test_file)
def test_ics_oauth_core_src_main_java_net_oauth_signature_rsa_sha1_java(self):
test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-signature/RSA_SHA1.java')
expected = [
u'Copyright 2007 Google, Inc.',
]
check_detection(expected, test_file)
def test_ics_opencv_cvjni_cpp(self):
test_file = self.get_test_loc('ics/opencv/cvjni.cpp')
expected = [
u'Copyright (c) 2006-2009 SIProp Project http://www.siprop.org',
]
check_detection(expected, test_file)
def test_ics_opencv_license_opencv(self):
test_file = self.get_test_loc('ics/opencv/LICENSE_OpenCV')
expected = [
u'Copyright (c) 2000-2006, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_notice(self):
test_file = self.get_test_loc('ics/opencv/NOTICE')
expected = [
u'Copyright (c) 2000-2006, Intel Corporation',
u'Copyright (c) 2006-2009 SIProp Project http://www.siprop.org',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
u'Copyright (c) 2008, Liu Liu',
u'Copyright (c) 2008, Google',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
u'Copyright (c) 2002, MD-Mathematische Dienste GmbH Im Defdahl',
u'Copyright (c) 2000-2003 Chih-Chung Chang and Chih-Jen Lin',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2002, Intel Corporation',
u'Copyright( C) 2000, Intel Corporation',
u'Copyright (c) 2008, Xavier Delacour',
u'Copyright( C) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2008, Nils Hasler',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1978-1999 Ken Turkowski. <turk@computer.org>',
u'Copyright (c) 1981-1999 Ken Turkowski. <turk@computer.org>',
u'Copyright (c) 1998 Yossi Rubner Computer Science Department, Stanford University',
u'Copyright (c) 2006 Simon Perreault',
u'Copyright (c) 1995 Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_opencv_cvaux_src_cv3dtracker_cpp(self):
test_file = self.get_test_loc('ics/opencv-cvaux-src/cv3dtracker.cpp')
expected = [
u'Copyright (c) 2002, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_cvaux_src_cvdpstereo_cpp(self):
test_file = self.get_test_loc('ics/opencv-cvaux-src/cvdpstereo.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_include_cv_h(self):
test_file = self.get_test_loc('ics/opencv-cv-include/cv.h')
expected = [
u'Copyright (c) 2000, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvkdtree_hpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/_cvkdtree.hpp')
expected = [
u'Copyright (c) 2008, Xavier Delacour',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvcolor_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvcolor.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2002, MD-Mathematische Dienste GmbH Im Defdahl',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvdistransform_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvdistransform.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'(c) 2006 by Jay Stavinzky.',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvemd_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvemd.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1998 Yossi Rubner Computer Science Department, Stanford University',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvkdtree_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvkdtree.cpp')
expected = [
u'Copyright (c) 2008, Xavier Delacour',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvsmooth_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvsmooth.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2006 Simon Perreault',
]
check_detection(expected, test_file)
def test_ics_opencv_cv_src_cvsurf_cpp(self):
test_file = self.get_test_loc('ics/opencv-cv-src/cvsurf.cpp')
expected = [
u'Copyright (c) 2008, Liu Liu',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_include_cvwimage_h(self):
test_file = self.get_test_loc('ics/opencv-cxcore-include/cvwimage.h')
expected = [
u'Copyright (c) 2008, Google',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_include_cxmisc_h(self):
test_file = self.get_test_loc('ics/opencv-cxcore-include/cxmisc.h')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_include_cxtypes_h(self):
test_file = self.get_test_loc('ics/opencv-cxcore-include/cxtypes.h')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1995 Intel Corporation.',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_src_cxdatastructs_cpp(self):
test_file = self.get_test_loc('ics/opencv-cxcore-src/cxdatastructs.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_opencv_cxcore_src_cxutils_cpp(self):
test_file = self.get_test_loc('ics/opencv-cxcore-src/cxutils.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 1978-1999 Ken Turkowski. <turk@computer.org>',
u'Copyright (c) 1981-1999 Ken Turkowski. <turk@computer.org>',
]
check_detection(expected, test_file)
def test_ics_opencv_ml_src_mlsvm_cpp(self):
test_file = self.get_test_loc('ics/opencv-ml-src/mlsvm.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'Copyright (c) 2000-2003 Chih-Chung Chang and Chih-Jen Lin',
]
check_detection(expected, test_file)
def test_ics_opencv_otherlibs_highgui_cvcap_socket_cpp(self):
test_file = self.get_test_loc('ics/opencv-otherlibs-highgui/cvcap_socket.cpp')
expected = [
u'Copyright (c) 2008, Nils Hasler',
]
check_detection(expected, test_file)
def test_ics_opencv_otherlibs_highgui_grfmt_png_cpp(self):
test_file = self.get_test_loc('ics/opencv-otherlibs-highgui/grfmt_png.cpp')
expected = [
u'Copyright (c) 2000, Intel Corporation',
u'(Copyright (c) 1999-2001 MIYASAKA Masaru)',
]
check_detection(expected, test_file)
def test_ics_openssl_e_os_h(self):
test_file = self.get_test_loc('ics/openssl/e_os.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_e_os2_h(self):
test_file = self.get_test_loc('ics/openssl/e_os2.h')
expected = [
u'Copyright (c) 1998-2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_notice(self):
test_file = self.get_test_loc('ics/openssl/NOTICE')
expected = [
u'Copyright (c) 1998-2011 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_app_rand_c(self):
test_file = self.get_test_loc('ics/openssl-apps/app_rand.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_apps_c(self):
test_file = self.get_test_loc('ics/openssl-apps/apps.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_apps_h(self):
test_file = self.get_test_loc('ics/openssl-apps/apps.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_asn1pars_c(self):
test_file = self.get_test_loc('ics/openssl-apps/asn1pars.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_cms_c(self):
test_file = self.get_test_loc('ics/openssl-apps/cms.c')
expected = [
u'Copyright (c) 2008 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_ec_c(self):
test_file = self.get_test_loc('ics/openssl-apps/ec.c')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_ecparam_c(self):
test_file = self.get_test_loc('ics/openssl-apps/ecparam.c')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_engine_c(self):
test_file = self.get_test_loc('ics/openssl-apps/engine.c')
expected = [
u'Copyright (c) 2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_genpkey_c(self):
test_file = self.get_test_loc('ics/openssl-apps/genpkey.c')
expected = [
u'Copyright (c) 2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_nseq_c(self):
test_file = self.get_test_loc('ics/openssl-apps/nseq.c')
expected = [
u'Copyright (c) 1999 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_openssl_c(self):
test_file = self.get_test_loc('ics/openssl-apps/openssl.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_pkcs12_c(self):
test_file = self.get_test_loc('ics/openssl-apps/pkcs12.c')
expected = [
u'Copyright (c) 1999-2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_prime_c(self):
test_file = self.get_test_loc('ics/openssl-apps/prime.c')
expected = [
u'Copyright (c) 2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_rand_c(self):
test_file = self.get_test_loc('ics/openssl-apps/rand.c')
expected = [
u'Copyright (c) 1998-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_s_client_c(self):
test_file = self.get_test_loc('ics/openssl-apps/s_client.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_s_server_c(self):
test_file = self.get_test_loc('ics/openssl-apps/s_server.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_smime_c(self):
test_file = self.get_test_loc('ics/openssl-apps/smime.c')
expected = [
u'Copyright (c) 1999-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_speed_c(self):
test_file = self.get_test_loc('ics/openssl-apps/speed.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_apps_timeouts_h(self):
test_file = self.get_test_loc('ics/openssl-apps/timeouts.h')
expected = [
u'Copyright (c) 1999-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_cryptlib_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/cryptlib.c')
expected = [
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_lpdir_nyi_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/LPdir_nyi.c')
expected = [
u'Copyright (c) 2004, Richard Levitte <richard@levitte.org>',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_md32_common_h(self):
test_file = self.get_test_loc('ics/openssl-crypto/md32_common.h')
expected = [
u'Copyright (c) 1999-2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_mem_clr_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/mem_clr.c')
expected = [
u'Copyright (c) 2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_o_str_c(self):
test_file = self.get_test_loc('ics/openssl-crypto/o_str.c')
expected = [
u'Copyright (c) 2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_aes_aes_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-aes/aes.h')
expected = [
u'Copyright (c) 1998-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_aes_aes_cfb_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-aes/aes_cfb.c')
expected = [
u'Copyright (c) 2002-2006 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_a_sign_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/a_sign.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn_mime_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn_mime.c')
expected = [
u'Copyright (c) 1999-2008 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn_moid_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn_moid.c')
expected = [
u'Copyright (c) 2001-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn1_err_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1_err.c')
expected = [
u'Copyright (c) 1999-2009 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn1_gen_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1_gen.c')
expected = [
u'Copyright (c) 2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_asn1t_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1t.h')
expected = [
u'Copyright (c) 2000-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_tasn_dec_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_dec.c')
expected = [
u'Copyright (c) 2000-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_tasn_enc_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_enc.c')
expected = [
u'Copyright (c) 2000-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_tasn_prn_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_prn.c')
expected = [
u'Copyright (c) 2000,2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_asn1_x_nx509_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-asn1/x_nx509.c')
expected = [
u'Copyright (c) 2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bf_bf_locl_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-bf/bf_locl.h')
expected = [
u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bf_copyright(self):
test_file = self.get_test_loc('ics/openssl-crypto-bf/COPYRIGHT')
expected = [
u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bio_b_print_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bio/b_print.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright Patrick Powell 1995',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bio_bss_bio_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bio/bss_bio.c')
expected = [
u'Copyright (c) 1998-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn.h')
expected = [
u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_blind_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_blind.c')
expected = [
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_ctx_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_ctx.c')
expected = [
u'Copyright (c) 1998-2004 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_err_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_err.c')
expected = [
u'Copyright (c) 1999-2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_exp_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_exp.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_gf2m_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_gf2m.c')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 1998-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_lcl_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_lcl.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_bn_bn_mod_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_mod.c')
expected = [
u'Copyright (c) 1998-2000 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_read2pwd_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-des/read2pwd.c')
expected = [
u'Copyright (c) 2001-2002 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_readme(self):
test_file = self.get_test_loc('ics/openssl-crypto-des/README')
expected = [
u'Copyright (c) 1997, Eric Young',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_rpc_des_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-des/rpc_des.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1986 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_des_asm_des_enc_m4(self):
test_file = self.get_test_loc('ics/openssl-crypto-des-asm/des_enc.m4')
expected = [
u'Copyright Svend Olaf Mikkelsen.',
u'Copyright Eric A. Young.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_dsa_dsa_locl_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-dsa/dsa_locl.h')
expected = [
u'Copyright (c) 2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec.h')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_asn1_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_asn1.c')
expected = [
u'Copyright (c) 2000-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_curve_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_curve.c')
expected = [
u'Copyright (c) 1998-2004 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec_mult_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_mult.c')
expected = [
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec2_mult_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec2_mult.c')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 1998-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ec2_smpl_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ec2_smpl.c')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 1998-2005 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ecp_mont_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_mont.c')
expected = [
u'Copyright (c) 1998-2001 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ecp_nist_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_nist.c')
expected = [
u'Copyright (c) 1998-2003 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ec_ecp_smpl_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_smpl.c')
expected = [
u'Copyright (c) 1998-2002 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ecdh_ecdh_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-ecdh/ecdh.h')
expected = [
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright (c) 2000-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ecdsa_ecdsatest_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ecdsa/ecdsatest.c')
expected = [
u'Copyright (c) 2000-2005 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ecdsa_ecs_asn1_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ecdsa/ecs_asn1.c')
expected = [
u'Copyright (c) 2000-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_all_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_all.c')
expected = [
u'Copyright (c) 2000-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_cryptodev_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_cryptodev.c')
expected = [
u'Copyright (c) 2002 Bob Beck <beck@openbsd.org>',
u'Copyright (c) 2002 Theo de Raadt',
u'Copyright (c) 2002 Markus Friedl',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_dyn_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_dyn.c')
expected = [
u'Copyright (c) 1999-2001 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_err_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_err.c')
expected = [
u'Copyright (c) 1999-2010 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_eng_fat_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_fat.c')
expected = [
u'Copyright (c) 1999-2001 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_engine_engine_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-engine/engine.h')
expected = [
u'Copyright (c) 1999-2004 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_evp_m_ecdsa_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-evp/m_ecdsa.c')
expected = [
u'Copyright (c) 1998-2002 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_evp_m_sigver_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-evp/m_sigver.c')
expected = [
u'Copyright (c) 2006,2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_pem_pem_all_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-pem/pem_all.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_pkcs12_p12_crt_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-pkcs12/p12_crt.c')
expected = [
u'Copyright (c) 1999-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_rand_rand_win_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-rand/rand_win.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
u'(c) Copyright Microsoft Corp. 1993.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_rc4_asm_rc4_ia64_pl(self):
test_file = self.get_test_loc('ics/openssl-crypto-rc4-asm/rc4-ia64.pl')
expected = [
u'Copyright (c) 2005 Hewlett-Packard Development Company, L.P.',
u'(c) 2005 Hewlett-Packard Development Company',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ui_ui_compat_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ui/ui_compat.c')
expected = [
u'Copyright (c) 2001-2002 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_ui_ui_openssl_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-ui/ui_openssl.c')
expected = [
u'Copyright (c) 2001 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_x509_x509_h(self):
test_file = self.get_test_loc('ics/openssl-crypto-x509/x509.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_x509v3_v3_alt_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-x509v3/v3_alt.c')
expected = [
u'Copyright (c) 1999-2003 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_crypto_x509v3_v3_pci_c(self):
test_file = self.get_test_loc('ics/openssl-crypto-x509v3/v3_pci.c')
expected = [
u'Copyright (c) 2004 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_modes_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/modes.h')
expected = [
u'Copyright (c) 2008 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_ssl_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/ssl.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_ssl3_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/ssl3.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2002 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_include_openssl_tls1_h(self):
test_file = self.get_test_loc('ics/openssl-include-openssl/tls1.h')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2006 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_d1_both_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/d1_both.c')
expected = [
u'Copyright (c) 1998-2005 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_d1_clnt_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/d1_clnt.c')
expected = [
u'Copyright (c) 1999-2007 The OpenSSL Project.',
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_s2_lib_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/s2_lib.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_s3_enc_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/s3_enc.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_s3_lib_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/s3_lib.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_ssl_asn1_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/ssl_asn1.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_ssl_cert_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/ssl_cert.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2007 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_ssltest_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/ssltest.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2000 The OpenSSL Project.',
u'Copyright 2002 Sun Microsystems, Inc.',
u'Copyright 2005 Nokia.',
]
check_detection(expected, test_file)
def test_ics_openssl_ssl_t1_reneg_c(self):
test_file = self.get_test_loc('ics/openssl-ssl/t1_reneg.c')
expected = [
u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)',
u'Copyright (c) 1998-2009 The OpenSSL Project.',
]
check_detection(expected, test_file)
def test_ics_oprofile_changelog_2002(self):
test_file = self.get_test_loc('ics/oprofile/ChangeLog-2002')
expected = [
u'copyright for 2002',
]
check_detection(expected, test_file)
def test_ics_oprofile_configure_in(self):
test_file = self.get_test_loc('ics/oprofile/configure.in')
expected = [
u'Copyright 1999 Olaf Titz <olaf@bigred.inka.de>',
]
check_detection(expected, test_file)
def test_ics_oprofile_popt_h(self):
test_file = self.get_test_loc('ics/oprofile/popt.h')
expected = [
u'(c) 1998-2000 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_agents_jvmpi_jvmpi_oprofile_cpp(self):
test_file = self.get_test_loc('ics/oprofile-agents-jvmpi/jvmpi_oprofile.cpp')
expected = [
u'Copyright 2007 OProfile authors',
u'Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_init_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/init.c')
expected = [
u'Copyright 2002 OProfile authors',
u'Copyright (c) 2005 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_anon_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_anon.c')
expected = [
u'Copyright 2005 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_cookie_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_cookie.c')
expected = [
u'Copyright 2002, 2005 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_events_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_events.c')
expected = [
u'Copyright 2002, 2003 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_extended_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_extended.c')
expected = [
u'Copyright 2007-2009 OProfile authors',
u'Copyright (c) 2009 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs.c')
expected = [
u'Copyright 2007-2010 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs.h')
expected = [
u'Copyright 2008-2010 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_trans_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs_trans.c')
expected = [
u'Copyright 2008 - 2010 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_ibs_trans_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs_trans.h')
expected = [
u'Copyright 2008 OProfile authors',
u'Copyright (c) 2008 Advanced Micro Devices, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_mangling_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_mangling.c')
expected = [
u'Copyright 2002 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_perfmon_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_perfmon.c')
expected = [
u'Copyright 2003 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_pipe_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_pipe.c')
expected = [
u'Copyright 2008 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_spu_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_spu.c')
expected = [
u'Copyright 2007 OProfile authors',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_trans_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_trans.c')
expected = [
u'Copyright 2002 OProfile authors',
u'Copyright (c) 2005 Hewlett-Packard Co.',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_opd_trans_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon/opd_trans.h')
expected = [
u'Copyright 2002 OProfile authors',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_oprofiled_c(self):
test_file = self.get_test_loc('ics/oprofile-daemon/oprofiled.c')
expected = [
u'Copyright 2002, 2003 OProfile authors',
u'Copyright (c) 2005 Hewlett-Packard Co.',
]
check_detection(expected, test_file)
def test_ics_oprofile_daemon_liblegacy_p_module_h(self):
test_file = self.get_test_loc('ics/oprofile-daemon-liblegacy/p_module.h')
expected = [
u'Copyright 1996, 1997 Linux International.',
]
check_detection(expected, test_file)
def test_ics_oprofile_doc_oprofile_1_in(self):
test_file = self.get_test_loc('ics/oprofile-doc/oprofile.1.in')
expected = [
u'Copyright (c) 1998-2004 University of Manchester, UK, John Levon, and others.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_970mp_events(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-970MP/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) International Business Machines, 2007.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_970mp_unit_masks(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-970MP/unit_masks')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) International Business Machines, 2006.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_cell_be_events(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-cell-be/events')
expected = [
u'Copyright OProfile authors',
u'(c) COPYRIGHT International Business Machines Corp. 2006',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_ppc64_ibm_compat_v1_events(self):
test_file = self.get_test_loc('ics/oprofile-events-ppc64-ibm-compat-v1/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) International Business Machines, 2009.',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_x86_64_family10_events_extra_contributed(self):
test_file = self.get_test_loc('ics/oprofile-events-x86-64-family10/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) 2006-2008 Advanced Micro Devices',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_x86_64_family11h_unit_masks(self):
test_file = self.get_test_loc('ics/oprofile-events-x86-64-family11h/unit_masks')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) Advanced Micro Devices, 2006-2008',
]
check_detection(expected, test_file)
def test_ics_oprofile_events_x86_64_family12h_events_extra_contributed(self):
test_file = self.get_test_loc('ics/oprofile-events-x86-64-family12h/events')
expected = [
u'Copyright OProfile authors',
u'Copyright (c) 2006-2010 Advanced Micro Devices',
]
check_detection(expected, test_file)
def test_ics_oprofile_include_sstream(self):
test_file = self.get_test_loc('ics/oprofile-include/sstream')
expected = [
u'Copyright (c) 2000 Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_oprofile_libop_op_hw_specific_h(self):
test_file = self.get_test_loc('ics/oprofile-libop/op_hw_specific.h')
expected = [
u'Copyright 2008 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpopt_findme_c(self):
test_file = self.get_test_loc('ics/oprofile-libpopt/findme.c')
expected = [
u'(c) 1998-2002 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_callgraph_container_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/callgraph_container.cpp')
expected = [
u'Copyright 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_format_output_h(self):
test_file = self.get_test_loc('ics/oprofile-libpp/format_output.h')
expected = [
u'Copyright 2002 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_populate_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/populate.cpp')
expected = [
u'Copyright 2003 OProfile authors',
u'(c) Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_symbol_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/symbol.cpp')
expected = [
u'Copyright 2002, 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libpp_xml_utils_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libpp/xml_utils.cpp')
expected = [
u'Copyright 2006 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libregex_demangle_java_symbol_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libregex/demangle_java_symbol.cpp')
expected = [
u'Copyright 2007 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libutil_sparse_array_h(self):
test_file = self.get_test_loc('ics/oprofile-libutil++/sparse_array.h')
expected = [
u'Copyright 2007 OProfile authors',
u'Copyright (c) International Business Machines, 2007.',
]
check_detection(expected, test_file)
def test_ics_oprofile_libutil_string_manip_cpp(self):
test_file = self.get_test_loc('ics/oprofile-libutil++/string_manip.cpp')
expected = [
u'Copyright 2002 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_libutil_utility_h(self):
test_file = self.get_test_loc('ics/oprofile-libutil++/utility.h')
expected = [
u'Copyright 2002 OProfile authors',
u'(c) Copyright boost.org 1999.',
]
check_detection(expected, test_file)
def test_ics_oprofile_module_ia64_op_pmu_c(self):
test_file = self.get_test_loc('ics/oprofile-module-ia64/op_pmu.c')
expected = [
u'Copyright 2002 OProfile authors',
u'Copyright (c) 1999 Ganesh Venkitachalam <venkitac@us.ibm.com>',
u'Copyright (c) 1999-2002 Hewlett Packard Co Stephane Eranian <eranian@hpl.hp.com> David Mosberger-Tang <davidm@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_oprofile_opcontrol_opcontrol_cpp(self):
test_file = self.get_test_loc('ics/oprofile-opcontrol/opcontrol.cpp')
expected = [
u'Copyright 2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_oprofile_opjitconv_conversion_c(self):
test_file = self.get_test_loc('ics/oprofile-opjitconv/conversion.c')
expected = [
u'Copyright 2008 OProfile authors',
u'Copyright IBM Corporation 2008',
]
check_detection(expected, test_file)
def test_ics_oprofile_pp_oparchive_cpp(self):
test_file = self.get_test_loc('ics/oprofile-pp/oparchive.cpp')
expected = [
u'Copyright 2003, 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_pp_oparchive_options_cpp(self):
test_file = self.get_test_loc('ics/oprofile-pp/oparchive_options.cpp')
expected = [
u'Copyright 2002, 2003, 2004 OProfile authors',
]
check_detection(expected, test_file)
def test_ics_oprofile_utils_opcontrol(self):
test_file = self.get_test_loc('ics/oprofile-utils/opcontrol')
expected = [
u'Copyright 2002 Read',
u'Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_oprofile_utils_opcontrol_misc(self):
test_file = self.get_test_loc('ics/oprofile-utils/opcontrol')
expected = [
u'Copyright 2002 Read the file COPYING',
u'Copyright IBM Corporation 2007',
]
check_detection(expected, test_file)
def test_ics_ping_notice(self):
test_file = self.get_test_loc('ics/ping/NOTICE')
expected = [
u'Copyright (c) 1989 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ping_ping_c(self):
test_file = self.get_test_loc('ics/ping/ping.c')
expected = [
u'Copyright (c) 1989 The Regents of the University of California.',
u'Copyright (c) 1989 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ping6_notice(self):
test_file = self.get_test_loc('ics/ping6/NOTICE')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ping6_ping6_c(self):
test_file = self.get_test_loc('ics/ping6/ping6.c')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_auth_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/auth.c')
expected = [
u'Copyright (c) 1993-2002 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_cbcp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/cbcp.c')
expected = [
u'Copyright (c) 1995 Pedro Roque Marques.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ccp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/ccp.c')
expected = [
u'Copyright (c) 1994-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_chap_ms_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/chap_ms.c')
expected = [
u'Copyright (c) 1995 Eric Rosenquist.',
u'Copyright (c) 2002 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_chap_ms_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/chap_ms.h')
expected = [
u'Copyright (c) 1995 Eric Rosenquist.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_chap_md5_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/chap-md5.c')
expected = [
u'Copyright (c) 2003 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_demand_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/demand.c')
expected = [
u'Copyright (c) 1996-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_eap_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/eap.c')
expected = [
u'Copyright (c) 2001 by Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ecp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/ecp.c')
expected = [
u'Copyright (c) 2002 The Android Open Source Project',
u'Copyright (c) 1994-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ecp_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/ecp.h')
expected = [
u'Copyright (c) 2002 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_eui64_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/eui64.c')
expected = [
u'Copyright (c) 1999 Tommi Komulainen.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_fsm_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/fsm.c')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_ipv6cp_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/ipv6cp.c')
expected = [
u'Copyright (c) 1999 Tommi Komulainen.',
u'Copyright (c) 1995, 1996, 1997 Francis.Dupont@inria.fr, INRIA',
u'Copyright (c) 1998, 1999 Francis.Dupont@inria.fr',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_main_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/main.c')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
u'Copyright (c) 1999-2004 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_md4_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/md4.c')
expected = [
u'(c) 1990 RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_md5_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/md5.c')
expected = [
u'Copyright (c) 1990, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_md5_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/md5.h')
expected = [
u'Copyright (c) 1990, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_notice(self):
test_file = self.get_test_loc('ics/ppp-pppd/NOTICE')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
u'Copyright (c) 1995 Pedro Roque Marques.',
u'Copyright (c) 2000-2004 Paul Mackerras.',
u'Copyright (c) 1994-2002 Paul Mackerras.',
u'Copyright (c) 2003 Paul Mackerras.',
u'Copyright (c) 1996-2002 Paul Mackerras.',
u'Copyright (c) 1999-2004 Paul Mackerras.',
u'Copyright (c) 2000-2002 Paul Mackerras.',
u'Copyright (c) 1999-2002 Paul Mackerras.',
u'Copyright (c) 1995 Eric Rosenquist.',
u'Copyright (c) 2002 The Android Open Source Project',
u'Copyright (c) 1990, RSA Data Security, Inc.',
u'Copyright (c) 2001 by Sun Microsystems, Inc.',
u'Copyright (c) 1999 Tommi Komulainen.',
u'Copyright (c) 1995, 1996, 1997 Francis.Dupont@inria.fr, INRIA',
u'Copyright (c) 1998, 1999 Francis.Dupont@inria.fr',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_pppd_8(self):
test_file = self.get_test_loc('ics/ppp-pppd/pppd.8')
expected = [
u'Copyright (c) 1993-2003 Paul Mackerras <paulus@samba.org>',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
u'Copyright (c) 1993-2004 Paul Mackerras.',
u'Copyright (c) 1995 Pedro Roque Marques.',
u'Copyright (c) 1995 Eric Rosenquist.',
u'Copyright (c) 1999 Tommi Komulainen.',
u'Copyright (c) Andrew Tridgell 1999',
u'Copyright (c) 2000 by Sun Microsystems, Inc.',
u'Copyright (c) 2001 by Sun Microsystems, Inc.',
u'Copyright (c) 2002 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_pppd_h(self):
test_file = self.get_test_loc('ics/ppp-pppd/pppd.h')
expected = [
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_sys_linux_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/sys-linux.c')
expected = [
u'Copyright (c) 1994-2004 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_sys_solaris_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/sys-solaris.c')
expected = [
u'Copyright (c) 2000 by Sun Microsystems, Inc.',
u'Copyright (c) 1995-2002 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_tty_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/tty.c')
expected = [
u'Copyright (c) 2000-2004 Paul Mackerras.',
u'Copyright (c) 1984-2000 Carnegie Mellon University.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_utils_c(self):
test_file = self.get_test_loc('ics/ppp-pppd/utils.c')
expected = [
u'Copyright (c) 1999-2002 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_include_net_ppp_defs_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-include-net/ppp_defs.h')
expected = [
u'Copyright (c) 1984 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_include_net_pppio_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-include-net/pppio.h')
expected = [
u'Copyright (c) 1994 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_include_net_slcompress_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-include-net/slcompress.h')
expected = [
u'Copyright (c) 1989 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_minconn_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins/minconn.c')
expected = [
u'Copyright (c) 1999 Paul Mackerras.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_passprompt_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins/passprompt.c')
expected = [
u'Copyright 1999 Paul Mackerras, Alan Curry.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_winbind_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins/winbind.c')
expected = [
u'Copyright (c) 2003 Andrew Bartlet <abartlet@samba.org>',
u'Copyright 1999 Paul Mackerras, Alan Curry.',
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1996, Matjaz Godec <gody@elgo.si>',
u'Copyright (c) 1996, Lars Fenneberg <in5y050@public.uni-hamburg.de>',
u'Copyright (c) 1997, Miguel A.L. Paraz <map@iphil.net>',
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>',
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 2003, Sean E. Millichamp',
u'Copyright (c) Andrew Tridgell 1992-2001',
u'Copyright (c) Simo Sorce 2001-2002',
u'Copyright (c) Martin Pool 2003',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_pppoatm_copying(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-pppoatm/COPYING')
expected = [
u'Copyright 1995-2000 EPFL-LRC/ICA',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_pppoatm_pppoatm_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-pppoatm/pppoatm.c')
expected = [
u'Copyright 2000 Mitchell Blank Jr.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_avpair_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/avpair.c')
expected = [
u'Copyright (c) 1995 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_buildreq_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/buildreq.c')
expected = [
u'Copyright (c) 1995,1997 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_clientid_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/clientid.c')
expected = [
u'Copyright (c) 1995,1996,1997 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_config_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/config.c')
expected = [
u'Copyright (c) 1995,1996,1997 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_copyright(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/COPYRIGHT')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>',
u'Copyright 1992 Livingston Enterprises, Inc. Livingston Enterprises, Inc.',
u'Copyright (c) 1991-2, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_dict_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/dict.c')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1995,1996,1997 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_includes_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/includes.h')
expected = [
u'Copyright (c) 1997 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_lock_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/lock.c')
expected = [
u'Copyright (c) 1997 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_makefile_linux(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/Makefile.linux')
expected = [
u'Copyright 2002 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_options_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/options.h')
expected = [
u'Copyright (c) 1996 Lars Fenneberg',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_pathnames_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/pathnames.h')
expected = [
u'Copyright (c) 1995,1996 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radattr_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radattr.c')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radius_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radius.c')
expected = [
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
u'Copyright (c) 1996, Matjaz Godec <gody@elgo.si>',
u'Copyright (c) 1996, Lars Fenneberg <in5y050@public.uni-hamburg.de>',
u'Copyright (c) 1997, Miguel A.L. Paraz <map@iphil.net>',
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>',
u'Copyright (c) 2002 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radiusclient_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radiusclient.h')
expected = [
u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg',
u'Copyright 1992 Livingston Enterprises, Inc.',
u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_radius_radrealms_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radrealms.c')
expected = [
u'Copyright (c) 2002 Netservers',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_common_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/common.c')
expected = [
u'Copyright (c) 2000 by Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_discovery_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/discovery.c')
expected = [
u'Copyright (c) 1999 by Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_makefile_linux(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/Makefile.linux')
expected = [
u'Copyright (c) 2001 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_plugin_c(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/plugin.c')
expected = [
u'Copyright (c) 2001 by Roaring Penguin Software Inc., Michal Ostrowski and Jamal Hadi Salim.',
u'Copyright 2000 Michal Ostrowski <mostrows@styx.uwaterloo.ca>, Jamal Hadi Salim <hadi@cyberus.ca>',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_pppoe_h(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/pppoe.h')
expected = [
u'Copyright (c) 2000 Roaring Penguin Software Inc.',
]
check_detection(expected, test_file)
def test_ics_ppp_pppd_plugins_rp_pppoe_pppoe_discovery_c_trail_name(self):
test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/pppoe-discovery.c')
expected = [
u'Copyright (c) 2000-2001 by Roaring Penguin Software Inc.',
u"Copyright (c) 2004 Marco d'Itri <md@linux.it>",
]
check_detection(expected, test_file)
def test_ics_proguard_notice(self):
test_file = self.get_test_loc('ics/proguard/NOTICE')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_proguard_readme(self):
test_file = self.get_test_loc('ics/proguard/README')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)',
]
check_detection(expected, test_file)
def test_ics_proguard_docs_acknowledgements_html(self):
test_file = self.get_test_loc('ics/proguard-docs/acknowledgements.html')
expected = [
u"Copyright (c) 2002-2009 <a href http://www.graphics.cornell.edu/~eric/'>Eric",
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_proguard_docs_acknowledgements_html_markup(self):
test_file = self.get_test_loc('ics/proguard-docs/acknowledgements.html')
expected = [
u'Copyright (c) 2002-2009 http://www.graphics.cornell.edu/~eric/ Eric Lafortune',
]
check_detection(expected, test_file)
def test_ics_proguard_docs_gpl_html(self):
test_file = self.get_test_loc('ics/proguard-docs/GPL.html')
expected = [
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_proguard_docs_gpl_exception_html(self):
test_file = self.get_test_loc('ics/proguard-docs/GPL_exception.html')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune',
]
check_detection(expected, test_file)
def test_ics_proguard_examples_annotations_src_proguard_annotation_keep_java(self):
test_file = self.get_test_loc('ics/proguard-examples-annotations-src-proguard-annotation/Keep.java')
expected = [
u'Copyright (c) 2002-2007 Eric Lafortune (eric@graphics.cornell.edu)',
]
check_detection(expected, test_file)
def test_ics_proguard_src_proguard_argumentwordreader_java(self):
test_file = self.get_test_loc('ics/proguard-src-proguard/ArgumentWordReader.java')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)',
]
check_detection(expected, test_file)
def test_ics_proguard_src_proguard_gui_guiresources_properties(self):
test_file = self.get_test_loc('ics/proguard-src-proguard-gui/GUIResources.properties')
expected = [
u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)',
u'Copyright (c) 2002-2009.',
]
check_detection(expected, test_file)
def test_ics_protobuf_aclocal_m4(self):
test_file = self.get_test_loc('ics/protobuf/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_configure(self):
test_file = self.get_test_loc('ics/protobuf/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_install_txt(self):
test_file = self.get_test_loc('ics/protobuf/INSTALL.txt')
expected = [
u'Copyright 1994, 1995, 1996, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_ltmain_sh(self):
test_file = self.get_test_loc('ics/protobuf/ltmain.sh')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_readme_txt(self):
test_file = self.get_test_loc('ics/protobuf/README.txt')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_editors_proto_vim(self):
test_file = self.get_test_loc('ics/protobuf-editors/proto.vim')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_gtest_aclocal_m4(self):
test_file = self.get_test_loc('ics/protobuf-gtest/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2004 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_gtest_scons_sconscript(self):
test_file = self.get_test_loc('ics/protobuf-gtest-scons/SConscript')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_java_src_main_java_com_google_protobuf_abstractmessage_java(self):
test_file = self.get_test_loc('ics/protobuf-java-src-main-java-com-google-protobuf/AbstractMessage.java')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_libtool_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/libtool.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.',
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_ltoptions_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/ltoptions.m4')
expected = [
u'Copyright (c) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_ltsugar_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/ltsugar.m4')
expected = [
u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_m4_ltversion_m4(self):
test_file = self.get_test_loc('ics/protobuf-m4/ltversion.m4')
expected = [
u'Copyright (c) 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_src_google_protobuf_compiler_javamicro_javamicro_params_h(self):
test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-compiler-javamicro/javamicro_params.h')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_src_google_protobuf_io_tokenizer_cc(self):
test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-io/tokenizer.cc')
expected = [
u'Copyright 2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_protobuf_src_google_protobuf_stubs_structurally_valid_cc(self):
test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-stubs/structurally_valid.cc')
expected = [
u'Copyright 2005-2008 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_a_out_h(self):
test_file = self.get_test_loc('ics/qemu/a.out.h')
expected = [
u'Copyright 1997, 1998, 1999, 2001 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_acl_c(self):
test_file = self.get_test_loc('ics/qemu/acl.c')
expected = [
u'Copyright (c) 2009 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_aio_android_c(self):
test_file = self.get_test_loc('ics/qemu/aio-android.c')
expected = [
u'Copyright IBM, Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_qemu_android_trace_h(self):
test_file = self.get_test_loc('ics/qemu/android-trace.h')
expected = [
u'Copyright (c) 2006-2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_arch_init_c(self):
test_file = self.get_test_loc('ics/qemu/arch_init.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_arm_dis_c(self):
test_file = self.get_test_loc('ics/qemu/arm-dis.c')
expected = [
u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 2007, Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_arm_semi_c(self):
test_file = self.get_test_loc('ics/qemu/arm-semi.c')
expected = [
u'Copyright (c) 2005, 2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_block_c(self):
test_file = self.get_test_loc('ics/qemu/block.c')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_bt_host_c(self):
test_file = self.get_test_loc('ics/qemu/bt-host.c')
expected = [
u'Copyright (c) 2008 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_console_c(self):
test_file = self.get_test_loc('ics/qemu/console.c')
expected = [
u'Copyright (c) 2004 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_cpu_all_h(self):
test_file = self.get_test_loc('ics/qemu/cpu-all.h')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_cpu_exec_c(self):
test_file = self.get_test_loc('ics/qemu/cpu-exec.c')
expected = [
u'Copyright (c) 2003-2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_curses_c(self):
test_file = self.get_test_loc('ics/qemu/curses.c')
expected = [
u'Copyright (c) 2005 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_curses_keys_h(self):
test_file = self.get_test_loc('ics/qemu/curses_keys.h')
expected = [
u'Copyright (c) 2005 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_cutils_c(self):
test_file = self.get_test_loc('ics/qemu/cutils.c')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_d3des_c(self):
test_file = self.get_test_loc('ics/qemu/d3des.c')
expected = [
u'Copyright (c) 1999 AT&T Laboratories Cambridge.',
u'Copyright (c) 1988,1989,1990,1991,1992 by Richard Outerbridge.',
]
check_detection(expected, test_file)
def test_ics_qemu_d3des_h(self):
test_file = self.get_test_loc('ics/qemu/d3des.h')
expected = [
u'Copyright (c) 1999 AT&T Laboratories Cambridge.',
u'Copyright (c) 1988,1989,1990,1991,1992 by Richard Outerbridge',
]
check_detection(expected, test_file)
def test_ics_qemu_device_tree_c(self):
test_file = self.get_test_loc('ics/qemu/device_tree.c')
expected = [
u'Copyright 2008 IBM Corporation. Authors Jerone Young <jyoung5@us.ibm.com> Hollis Blanchard <hollisb@us.ibm.com>',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_qemu_device_tree_c_extra_author(self):
test_file = self.get_test_loc('ics/qemu/device_tree.c')
expected = [
u'Copyright 2008 IBM Corporation.',
]
check_detection(expected, test_file)
def test_ics_qemu_dma_helpers_c(self):
test_file = self.get_test_loc('ics/qemu/dma-helpers.c')
expected = [
u'Copyright (c) 2009 Red Hat',
]
check_detection(expected, test_file)
def test_ics_qemu_dynlink_h(self):
test_file = self.get_test_loc('ics/qemu/dynlink.h')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_dynlink_static_c(self):
test_file = self.get_test_loc('ics/qemu/dynlink-static.c')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_feature_to_c_sh(self):
test_file = self.get_test_loc('ics/qemu/feature_to_c.sh')
expected = [
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hostregs_helper_h(self):
test_file = self.get_test_loc('ics/qemu/hostregs_helper.h')
expected = [
u'Copyright (c) 2007 CodeSourcery',
]
check_detection(expected, test_file)
def test_ics_qemu_host_utils_c(self):
test_file = self.get_test_loc('ics/qemu/host-utils.c')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
u'Copyright (c) 2007 Aurelien Jarno',
]
check_detection(expected, test_file)
def test_ics_qemu_host_utils_h(self):
test_file = self.get_test_loc('ics/qemu/host-utils.h')
expected = [
u'Copyright (c) 2007 Thiemo Seufer',
u'Copyright (c) 2007 Jocelyn Mayer',
]
check_detection(expected, test_file)
def test_ics_qemu_i386_dis_c(self):
test_file = self.get_test_loc('ics/qemu/i386-dis.c')
expected = [
u'Copyright 1988, 1989, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_json_lexer_c(self):
test_file = self.get_test_loc('ics/qemu/json-lexer.c')
expected = [
u'Copyright IBM, Corp. 2009',
]
check_detection(expected, test_file)
def test_ics_qemu_keymaps_c(self):
test_file = self.get_test_loc('ics/qemu/keymaps.c')
expected = [
u'Copyright (c) 2004 Johannes Schindelin',
]
check_detection(expected, test_file)
def test_ics_qemu_kqemu_c(self):
test_file = self.get_test_loc('ics/qemu/kqemu.c')
expected = [
u'Copyright (c) 2005-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_kqemu_h(self):
test_file = self.get_test_loc('ics/qemu/kqemu.h')
expected = [
u'Copyright (c) 2004-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_loader_c(self):
test_file = self.get_test_loc('ics/qemu/loader.c')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
u'(c) Copyright 2008 Semihalf',
u'(c) Copyright 2000-2005 Wolfgang Denk',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_qemu_loader_c_trail_name(self):
test_file = self.get_test_loc('ics/qemu/loader.c')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
u'(c) Copyright 2008 Semihalf',
u'(C) Copyright 2000-2005 Wolfgang Denk, DENX Software Engineering, wd@denx.de.',
]
check_detection(expected, test_file)
def test_ics_qemu_migration_exec_c(self):
test_file = self.get_test_loc('ics/qemu/migration-exec.c')
expected = [
u'Copyright IBM, Corp. 2008',
u'Copyright Dell MessageOne 2008',
]
check_detection(expected, test_file)
def test_ics_qemu_monitor_c(self):
test_file = self.get_test_loc('ics/qemu/monitor.c')
expected = [
u'Copyright (c) 2003-2004 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_net_checksum_c(self):
test_file = self.get_test_loc('ics/qemu/net-checksum.c')
expected = [
u'(c) 2008 Gerd Hoffmann <kraxel@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_notify_c(self):
test_file = self.get_test_loc('ics/qemu/notify.c')
expected = [
u'Copyright IBM, Corp. 2010',
]
check_detection(expected, test_file)
def test_ics_qemu_os_posix_c(self):
test_file = self.get_test_loc('ics/qemu/os-posix.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2010 Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_ppc_dis_c(self):
test_file = self.get_test_loc('ics/qemu/ppc-dis.c')
expected = [
u'Copyright 1994, 1995, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright 1994, 1995, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
u'Copyright 1994, 1995, 1996, 1997, 1998, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qdict_c(self):
test_file = self.get_test_loc('ics/qemu/qdict.c')
expected = [
u'Copyright (c) 2009 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_error_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-error.c')
expected = [
u'Copyright (c) 2010 Red Hat Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_io_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-io.c')
expected = [
u'Copyright (c) 2009 Red Hat, Inc.',
u'Copyright (c) 2003-2005 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_option_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-option.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2009 Kevin Wolf <kwolf@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_options_h(self):
test_file = self.get_test_loc('ics/qemu/qemu-options.h')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2010 Jes Sorensen <Jes.Sorensen@redhat.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_qemu_thread_c(self):
test_file = self.get_test_loc('ics/qemu/qemu-thread.c')
expected = [
u'Copyright Red Hat, Inc. 2009',
]
check_detection(expected, test_file)
def test_ics_qemu_softmmu_outside_jit_c(self):
test_file = self.get_test_loc('ics/qemu/softmmu_outside_jit.c')
expected = [
u'Copyright (c) 2007-2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_softmmu_semi_h(self):
test_file = self.get_test_loc('ics/qemu/softmmu-semi.h')
expected = [
u'Copyright (c) 2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_sys_tree_h(self):
test_file = self.get_test_loc('ics/qemu/sys-tree.h')
expected = [
u'Copyright 2002 Niels Provos <provos@citi.umich.edu>',
]
check_detection(expected, test_file)
def test_ics_qemu_tap_win32_c(self):
test_file = self.get_test_loc('ics/qemu/tap-win32.c')
expected = [
u'Copyright (c) Damion K. Wilson, 2003',
u'Copyright (c) James Yonan, 2003-2004',
]
check_detection(expected, test_file)
def test_ics_qemu_tcpdump_c(self):
test_file = self.get_test_loc('ics/qemu/tcpdump.c')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_uboot_image_h(self):
test_file = self.get_test_loc('ics/qemu/uboot_image.h')
expected = [
u'(c) Copyright 2000-2005 Wolfgang Denk',
]
check_detection(expected, test_file)
def test_ics_qemu_usb_linux_c(self):
test_file = self.get_test_loc('ics/qemu/usb-linux.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2008 Max Krasnyansky Support',
u'Copyright 2008 TJ',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_qemu_usb_linux_c_extra_support_trail_email(self):
test_file = self.get_test_loc('ics/qemu/usb-linux.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2008 Max Krasnyansky',
u'Copyright 2008 TJ <linux@tjworld.net>',
]
check_detection(expected, test_file)
def test_ics_qemu_vl_android_c(self):
test_file = self.get_test_loc('ics/qemu/vl-android.c')
expected = [
u'Copyright (c) 2003-2008 Fabrice Bellard',
u'Copyright (c) 2003-2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_vnc_android_c(self):
test_file = self.get_test_loc('ics/qemu/vnc-android.c')
expected = [
u'Copyright (c) 2006 Anthony Liguori <anthony@codemonkey.ws>',
u'Copyright (c) 2006 Fabrice Bellard',
u'Copyright (c) 2009 Red Hat, Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_android_android_h(self):
test_file = self.get_test_loc('ics/qemu-android/android.h')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_main_c(self):
test_file = self.get_test_loc('ics/qemu-android/main.c')
expected = [
u'Copyright (c) 2006-2008 The Android Open Source Project',
u'Copyright (c) 2006-2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_main_common_c(self):
test_file = self.get_test_loc('ics/qemu-android/main-common.c')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_qemu_setup_c(self):
test_file = self.get_test_loc('ics/qemu-android/qemu-setup.c')
expected = [
u'Copyright (c) 2006-2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_android_snapshot_c(self):
test_file = self.get_test_loc('ics/qemu-android/snapshot.c')
expected = [
u'Copyright (c) 2010 The Android Open Source Project',
u'copyright (c) 2003 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_android_utils_mapfile_c(self):
test_file = self.get_test_loc('ics/qemu-android-utils/mapfile.c')
expected = [
u'Copyright (c) 2007-2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_alsaaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/alsaaudio.c')
expected = [
u'Copyright (c) 2008-2010 The Android Open Source Project',
u'Copyright (c) 2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_audio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/audio.c')
expected = [
u'Copyright (c) 2007-2008 The Android Open Source Project',
u'Copyright (c) 2003-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_audio_h(self):
test_file = self.get_test_loc('ics/qemu-audio/audio.h')
expected = [
u'Copyright (c) 2003-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_audio_template_h(self):
test_file = self.get_test_loc('ics/qemu-audio/audio_template.h')
expected = [
u'Copyright (c) 2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_coreaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/coreaudio.c')
expected = [
u'Copyright (c) 2008 The Android Open Source Project',
u'Copyright (c) 2005 Mike Kronenberg',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_esdaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/esdaudio.c')
expected = [
u'Copyright (c) 2008-2009 The Android Open Source Project',
u'Copyright (c) 2006 Frederick Reeve',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_fmodaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/fmodaudio.c')
expected = [
u'Copyright (c) 2004-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_mixeng_c(self):
test_file = self.get_test_loc('ics/qemu-audio/mixeng.c')
expected = [
u'Copyright (c) 2004-2005 Vassili Karpov',
u'Copyright (c) 1998 Fabrice Bellard',
u'Copyright 1998 Fabrice Bellard.',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_rate_template_h(self):
test_file = self.get_test_loc('ics/qemu-audio/rate_template.h')
expected = [
u'Copyright (c) 2004-2005 Vassili Karpov',
u'Copyright (c) 1998 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_wavaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/wavaudio.c')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
u'Copyright (c) 2004-2005 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_audio_winaudio_c(self):
test_file = self.get_test_loc('ics/qemu-audio/winaudio.c')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_block_bochs_c(self):
test_file = self.get_test_loc('ics/qemu-block/bochs.c')
expected = [
u'Copyright (c) 2005 Alex Beregszaszi',
]
check_detection(expected, test_file)
def test_ics_qemu_block_cloop_c(self):
test_file = self.get_test_loc('ics/qemu-block/cloop.c')
expected = [
u'Copyright (c) 2004 Johannes E. Schindelin',
]
check_detection(expected, test_file)
def test_ics_qemu_block_nbd_c(self):
test_file = self.get_test_loc('ics/qemu-block/nbd.c')
expected = [
u'Copyright (c) 2008 Bull S.A.S.',
u'Copyright (c) 2007 Anthony Liguori <anthony@codemonkey.ws>',
]
check_detection(expected, test_file)
def test_ics_qemu_block_parallels_c(self):
test_file = self.get_test_loc('ics/qemu-block/parallels.c')
expected = [
u'Copyright (c) 2007 Alex Beregszaszi',
]
check_detection(expected, test_file)
def test_ics_qemu_block_qcow_c(self):
test_file = self.get_test_loc('ics/qemu-block/qcow.c')
expected = [
u'Copyright (c) 2004-2006 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_block_vmdk_c(self):
test_file = self.get_test_loc('ics/qemu-block/vmdk.c')
expected = [
u'Copyright (c) 2004 Fabrice Bellard',
u'Copyright (c) 2005 Filip Navara',
]
check_detection(expected, test_file)
def test_ics_qemu_block_vpc_c(self):
test_file = self.get_test_loc('ics/qemu-block/vpc.c')
expected = [
u'Copyright (c) 2005 Alex Beregszaszi',
u'Copyright (c) 2009 Kevin Wolf <kwolf@suse.de>',
]
check_detection(expected, test_file)
def test_ics_qemu_block_vvfat_c(self):
test_file = self.get_test_loc('ics/qemu-block/vvfat.c')
expected = [
u'Copyright (c) 2004,2005 Johannes E. Schindelin',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_png_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/png.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996-1997 Andreas Dilger',
u'Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_png_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/png.h')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 2004, 2006-2007 Glenn Randers-Pehrson',
u'Copyright (c) 2000-2002 Glenn Randers-Pehrson',
u'Copyright (c) 1998, 1999, 2000 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngconf_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngconf.h')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngerror_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngerror.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pnggccrd_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pnggccrd.c')
expected = [
u'Copyright (c) 1998 Intel Corporation',
u'Copyright (c) 1999-2002,2007 Greg Roelofs',
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngmem_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngmem.c')
expected = [
u'Copyright (c) 1998-2006 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngrtran_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngrtran.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1996, 1997 Andreas Dilger',
u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.',
u'Copyright (c) 1998-01-04 Charles Poynton poynton at inforamp.net',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_libpng_1_2_19_pngvcrd_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngvcrd.c')
expected = [
u'Copyright (c) 1998-2007 Glenn Randers-Pehrson',
u'Copyright (c) 1998, Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_copying(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12/COPYING')
expected = [
u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_include_begin_code_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/begin_code.h')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_include_sdl_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/SDL.h')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_include_sdl_opengl_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/SDL_opengl.h')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright (c) 1991-2004 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sdl_mixer_mmx_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio/SDL_mixer_MMX.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright 2002 Stephane Marchesin (stephane.marchesin@wanadoo.fr)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sdl_mixer_mmx_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio/SDL_mixer_MMX.h')
expected = [
u'Copyright 2002 Stephane Marchesin (stephane.marchesin@wanadoo.fr)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_dc_aica_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio-dc/aica.c')
expected = [
u'(c) 2000 Dan Potter',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sun_sdl_sunaudio_c_trail_name(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio-sun/SDL_sunaudio.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright 1989 by Rich Gopstein and Harris Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_audiofileplayer_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/AudioFilePlayer.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_sdlosxcaguard_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/SDLOSXCAGuard.c')
expected = [
u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 Sam Lantinga',
u'(c) Copyright 2002 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_sdlosxcaguard_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/SDLOSXCAGuard.h')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
u'(c) Copyright 2002 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_osf_sdl_syscdrom_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-osf/SDL_syscdrom.c')
expected = [
u'DirectMedia Layer Copyright (c) 2003',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_copying_lib(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/COPYING.LIB')
expected = [
u'Copyright (c) 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_headmmx_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/HeadMMX.h')
expected = [
u'Copyright (c) 1998 Christian Nentwich (c.nentwich@cs.ucl.ac.uk)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_headx86_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/HeadX86.h')
expected = [
u'Copyright (c) 1998 Christian Nentwich (brn@eleet.mcb.at)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_readme(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/README')
expected = [
u'(c) 1998 Christian Nentwich',
u'(c) Glenn Fielder (gaffer@gaffer.org)',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_joystick_os2_joyos2_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-joystick-os2/joyos2.h')
expected = [
u'Copyright (c) 1995 IBM Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_loadso_macosx_sdl_dlcompat_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-loadso-macosx/SDL_dlcompat.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u"Copyright (c) 2002 Jorge Acereda <jacereda@users.sourceforge.net> & Peter O'Gorman <ogorman@users.sourceforge.net>",
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_main_win32_version_rc(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-main-win32/version.rc')
expected = [
u'Copyright (c) 2007 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_stdlib_sdl_qsort_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-stdlib/SDL_qsort.c')
expected = [
u'(c) 1998 Gareth McCaughan',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_thread_win32_win_ce_semaphore_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-thread-win32/win_ce_semaphore.c')
expected = [
u'Copyright (c) 1998, Johnson M. Hart',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_timer_macos_fasttimes_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-timer-macos/FastTimes.c')
expected = [
u'Copyright (c) Matt Slot, 1999-2000.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_sdl_yuv_sw_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video/SDL_yuv_sw.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright (c) 1995 The Regents of the University of California.',
u'Copyright (c) 1995 Erik Corry',
u'Copyright (c) 1995 Brown University.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_fbcon_matrox_regs_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-fbcon/matrox_regs.h')
expected = [
u'Copyright 1996 The XFree86 Project, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_fbcon_riva_mmio_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-fbcon/riva_mmio.h')
expected = [
u'Copyright 1993-1999 NVIDIA, Corporation.',
u'Copyright 1993-1999 NVIDIA, Corporation.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_maccommon_sdl_macwm_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-maccommon/SDL_macwm.c')
expected = [
u'Copyright (c) 1997-2006 Sam Lantinga',
u'Copyright (c) 1999 Apple Computer, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_nanox_sdl_nxevents_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-nanox/SDL_nxevents.c')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
u'Copyright (c) 2001 Hsieh-Fu Tsai',
u'Copyright (c) 2002 Greg Haerr <greg@censoft.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_nanox_sdl_nxevents_c_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-nanox/SDL_nxevents_c.h')
expected = [
u'Copyright (c) 1997-2004 Sam Lantinga',
u'Copyright (c) 2001 Hsieh-Fu Tsai',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_quartz_cgs_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-quartz/CGS.h')
expected = [
u'Copyright (c) 1997-2003 Sam Lantinga',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_extutil_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/extutil.h')
expected = [
u'Copyright 1989, 1998 The Open Group',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_panoramixext_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/panoramiXext.h')
expected = [
u'Copyright (c) 1991, 1997 Digital Equipment Corporation, Maynard, Massachusetts.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga.h')
expected = [
u'Copyright (c) 1999 XFree86 Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga1_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga1.h')
expected = [
u'Copyright (c) 1995 Jon Tombs',
u'Copyright (c) 1995 XFree86 Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga1str_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga1str.h')
expected = [
u'Copyright (c) 1995 Jon Tombs',
u'Copyright (c) 1995 XFree86 Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86vmode_h_trail_caps(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86vmode.h')
expected = [
u'Copyright 1995 Kaleb S. KEITHLEY',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xme_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xme.h')
expected = [
u'Copyright 1993-2001 by Xi Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xv_h_trail_name(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/Xv.h')
expected = [
u'Copyright 1991 by Digital Equipment Corporation, Maynard, Massachusetts, and the Massachusetts Institute of Technology, Cambridge, Massachusetts.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xv_xvlibint_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xv/Xvlibint.h')
expected = [
u'Copyright 1987 by Digital Equipment Corporation, Maynard, Massachusetts, and the Massachusetts Institute of Technology, Cambridge, Massachusetts.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xxf86dga_xf86dga_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xxf86dga/XF86DGA.c')
expected = [
u'Copyright (c) 1995 Jon Tombs',
u'Copyright (c) 1995,1996 The XFree86 Project, Inc',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xxf86vm_xf86vmode_c_trail_caps(self):
test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xxf86vm/XF86VMode.c')
expected = [
u'Copyright (c) 1995 Kaleb S. KEITHLEY',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_compress_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/compress.c')
expected = [
u'Copyright (c) 1995-2003 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_crc32_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/crc32.c')
expected = [
u'Copyright (c) 1995-2005 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_deflate_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/deflate.c')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly.',
u'Copyright 1995-2005 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_deflate_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/deflate.h')
expected = [
u'Copyright (c) 1995-2004 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_gzio_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/gzio.c')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_inffast_h(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/inffast.h')
expected = [
u'Copyright (c) 1995-2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_inftrees_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/inftrees.c')
expected = [
u'Copyright (c) 1995-2005 Mark Adler',
u'Copyright 1995-2005 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_qemu_distrib_zlib_1_2_3_trees_c(self):
test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/trees.c')
expected = [
u'Copyright (c) 1995-2005 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_qemu_elff_dwarf_h(self):
test_file = self.get_test_loc('ics/qemu-elff/dwarf.h')
expected = [
u'Copyright (c) 2000,2001,2003,2004,2005,2006 Silicon Graphics, Inc.',
u'Portions Copyright 2002,2007 Sun Microsystems, Inc.',
u'Portions Copyright 2007-2009 David Anderson.',
]
check_detection(expected, test_file)
def test_ics_qemu_gdb_xml_arm_core_xml(self):
test_file = self.get_test_loc('ics/qemu-gdb-xml/arm-core.xml')
expected = [
u'Copyright (c) 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_gdb_xml_power_altivec_xml(self):
test_file = self.get_test_loc('ics/qemu-gdb-xml/power-altivec.xml')
expected = [
u'Copyright (c) 2007, 2008 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_apic_c(self):
test_file = self.get_test_loc('ics/qemu-hw/apic.c')
expected = [
u'Copyright (c) 2004-2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_arm_misc_h(self):
test_file = self.get_test_loc('ics/qemu-hw/arm-misc.h')
expected = [
u'Copyright (c) 2006 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_armv7m_c(self):
test_file = self.get_test_loc('ics/qemu-hw/armv7m.c')
expected = [
u'Copyright (c) 2006-2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_baum_h(self):
test_file = self.get_test_loc('ics/qemu-hw/baum.h')
expected = [
u'Copyright (c) 2008 Samuel Thibault',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_bt_h(self):
test_file = self.get_test_loc('ics/qemu-hw/bt.h')
expected = [
u'Copyright (c) 2007 OpenMoko, Inc.',
u'Copyright (c) 2000-2001 Qualcomm Incorporated',
u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>',
u'Copyright (c) 2002-2006 Marcel Holtmann <marcel@holtmann.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_bt_hci_c(self):
test_file = self.get_test_loc('ics/qemu-hw/bt-hci.c')
expected = [
u'Copyright (c) 2007 OpenMoko, Inc.',
u'Copyright (c) 2008 Andrzej Zaborowski <balrog@zabor.org>',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_bt_hid_c(self):
test_file = self.get_test_loc('ics/qemu-hw/bt-hid.c')
expected = [
u'Copyright (c) 2007-2008 OpenMoko, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_dma_c(self):
test_file = self.get_test_loc('ics/qemu-hw/dma.c')
expected = [
u'Copyright (c) 2003-2004 Vassili Karpov',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_fw_cfg_c(self):
test_file = self.get_test_loc('ics/qemu-hw/fw_cfg.c')
expected = [
u'Copyright (c) 2008 Gleb Natapov',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_irq_c(self):
test_file = self.get_test_loc('ics/qemu-hw/irq.c')
expected = [
u'Copyright (c) 2007 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_mmc_h(self):
test_file = self.get_test_loc('ics/qemu-hw/mmc.h')
expected = [
u'Copyright 2002 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_msmouse_c(self):
test_file = self.get_test_loc('ics/qemu-hw/msmouse.c')
expected = [
u'Copyright (c) 2008 Lubomir Rintel',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_power_supply_h(self):
test_file = self.get_test_loc('ics/qemu-hw/power_supply.h')
expected = [
u'Copyright (c) 2007 Anton Vorontsov <cbou@mail.ru>',
u'Copyright (c) 2004 Szabolcs Gyurko',
u'Copyright (c) 2003 Ian Molton <spyro@f2s.com>',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_pxa_h(self):
test_file = self.get_test_loc('ics/qemu-hw/pxa.h')
expected = [
u'Copyright (c) 2006 Openedhand Ltd.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_qdev_c(self):
test_file = self.get_test_loc('ics/qemu-hw/qdev.c')
expected = [
u'Copyright (c) 2009 CodeSourcery',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_sd_h(self):
test_file = self.get_test_loc('ics/qemu-hw/sd.h')
expected = [
u'Copyright (c) 2005-2007 Pierre Ossman',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_smbios_c(self):
test_file = self.get_test_loc('ics/qemu-hw/smbios.c')
expected = [
u'Copyright (c) 2009 Hewlett-Packard Development Company, L.P.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_smc91c111_c(self):
test_file = self.get_test_loc('ics/qemu-hw/smc91c111.c')
expected = [
u'Copyright (c) 2005 CodeSourcery, LLC.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_usb_hid_c(self):
test_file = self.get_test_loc('ics/qemu-hw/usb-hid.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2007 OpenMoko, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_usb_hub_c(self):
test_file = self.get_test_loc('ics/qemu-hw/usb-hub.c')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_hw_usb_ohci_c(self):
test_file = self.get_test_loc('ics/qemu-hw/usb-ohci.c')
expected = [
u'Copyright (c) 2004 Gianni Tedesco',
u'Copyright (c) 2006 CodeSourcery',
u'Copyright (c) 2006 Openedhand Ltd.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bochs_h(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/bochs.h')
expected = [
u'Copyright (c) 2002 MandrakeSoft S.A.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_config_h_in(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/config.h.in')
expected = [
u'Copyright (c) 2001 MandrakeSoft S.A.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_configure(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/configure')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.',
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_makefile_in(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/Makefile.in')
expected = [
u'Copyright (c) 2002 MandrakeSoft S.A.',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_acpi_dsdt_dsl(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/acpi-dsdt.dsl')
expected = [
u'Copyright (c) 2006 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_acpi_dsdt_hex_extra_support(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/acpi-dsdt.hex')
expected = [
u'Copyright (c) 2000 - 2006 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_rombios_c(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/rombios.c')
expected = [
u'Copyright (c) 2002 MandrakeSoft S.A.',
u'(c) 2002 MandrakeSoft S.A.',
u'(c) by Joseph Gil',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_bochs_bios_rombios_h(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/rombios.h')
expected = [
u'Copyright (c) 2006 Volker Ruppert',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_clext_c(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/clext.c')
expected = [
u'Copyright (c) 2004 Makoto Suzuki',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_readme(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/README')
expected = [
u'(c) by Joseph Gil',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_vbe_c_extra_byte(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vbe.c')
expected = [
u'Copyright (c) 2002 Jeroen Janssen',
u'(c) 2003 http://savannah.nongnu.org/projects/vgabios/',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_vgabios_c(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vgabios.c')
expected = [
u'Copyright (c) 2001-2008 the LGPL VGABios developers Team',
u'(c) by Joseph Gil',
u'(c) 2008 the LGPL VGABios developers Team',
]
check_detection(expected, test_file)
def test_ics_qemu_pc_bios_vgabios_vgafonts_h(self):
test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vgafonts.h')
expected = [
u'(c) by Joseph Gil',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_cksum_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/cksum.c')
expected = [
u'Copyright (c) 1988, 1992, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_copyright(self):
test_file = self.get_test_loc('ics/qemu-slirp/COPYRIGHT')
expected = [
u'Danny Gasparovski. Copyright (c), 1995,1996',
u'Copyright (c) 1995,1996 Danny Gasparovski.'
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_debug_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/debug.c')
expected = [
u'Copyright (c) 1995 Danny Gasparovski.',
u'Portions copyright (c) 2000 Kelly Price.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_debug_h(self):
test_file = self.get_test_loc('ics/qemu-slirp/debug.h')
expected = [
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_ip_icmp_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/ip_icmp.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_ip_input_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/ip_input.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1993 The Regents of the University of California.',
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_ip_output_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/ip_output.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1990, 1993 The Regents of the University of California.',
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_mbuf_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/mbuf.c')
expected = [
u'Copyright (c) 1995 Danny Gasparovski',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_misc_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/misc.c')
expected = [
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tcp_input_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/tcp_input.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1990, 1993, 1994 The Regents of the University of California.',
u'Copyright (c) 1995 Danny Gasparovski.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tcp_timer_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/tcp_timer.c')
expected = [
u'Copyright (c) 1982, 1986, 1988, 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tcp_var_h(self):
test_file = self.get_test_loc('ics/qemu-slirp/tcp_var.h')
expected = [
u'Copyright (c) 1982, 1986, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_tftp_c(self):
test_file = self.get_test_loc('ics/qemu-slirp/tftp.c')
expected = [
u'Copyright (c) 2004 Magnus Damm <damm@opensource.se>',
]
check_detection(expected, test_file)
def test_ics_qemu_slirp_android_helper_h(self):
test_file = self.get_test_loc('ics/qemu-slirp-android/helper.h')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_iwmmxt_helper_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/iwmmxt_helper.c')
expected = [
u'Copyright (c) 2007 OpenedHand, Ltd.',
u'Copyright (c) 2008 CodeSourcery',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_neon_helper_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/neon_helper.c')
expected = [
u'Copyright (c) 2007, 2008 CodeSourcery.',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_op_helper_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/op_helper.c')
expected = [
u'Copyright (c) 2005-2007 CodeSourcery, LLC',
]
check_detection(expected, test_file)
def test_ics_qemu_target_arm_translate_c(self):
test_file = self.get_test_loc('ics/qemu-target-arm/translate.c')
expected = [
u'Copyright (c) 2003 Fabrice Bellard',
u'Copyright (c) 2005-2007 CodeSourcery',
u'Copyright (c) 2007 OpenedHand, Ltd.',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_helper_template_h(self):
test_file = self.get_test_loc('ics/qemu-target-i386/helper_template.h')
expected = [
u'Copyright (c) 2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_kvm_c(self):
test_file = self.get_test_loc('ics/qemu-target-i386/kvm.c')
expected = [
u'Copyright (c) 2006-2008 Qumranet Technologies',
u'Copyright IBM, Corp. 2008',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_ops_sse_h(self):
test_file = self.get_test_loc('ics/qemu-target-i386/ops_sse.h')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
u'Copyright (c) 2008 Intel Corporation',
]
check_detection(expected, test_file)
def test_ics_qemu_target_i386_ops_sse_header_h(self):
test_file = self.get_test_loc('ics/qemu-target-i386/ops_sse_header.h')
expected = [
u'Copyright (c) 2005 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_tcg_tcg_c(self):
test_file = self.get_test_loc('ics/qemu-tcg/tcg.c')
expected = [
u'Copyright (c) 2008 Fabrice Bellard',
]
check_detection(expected, test_file)
def test_ics_qemu_tcg_arm_tcg_target_c(self):
test_file = self.get_test_loc('ics/qemu-tcg-arm/tcg-target.c')
expected = [
u'Copyright (c) 2008 Andrzej Zaborowski',
]
check_detection(expected, test_file)
def test_ics_qemu_tcg_arm_tcg_target_h(self):
test_file = self.get_test_loc('ics/qemu-tcg-arm/tcg-target.h')
expected = [
u'Copyright (c) 2008 Fabrice Bellard',
u'Copyright (c) 2008 Andrzej Zaborowski',
]
check_detection(expected, test_file)
def test_ics_quake_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/quake/AndroidManifest.xml')
expected = [
u'Copyright 2007, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_quake_notice(self):
test_file = self.get_test_loc('ics/quake/NOTICE')
expected = [
u'Copyright (c) 1996-2000 Id Software Inc.',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_gnu_txt(self):
test_file = self.get_test_loc('ics/quake-quake-src/gnu.txt')
expected = [
u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.',
u'copyrighted by the Free Software Foundation',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_glqwcl_spec_sh(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW/glqwcl.spec.sh')
expected = [
u'Copyright Restricted Icon',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_adivtab_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/adivtab.h')
expected = [
u'Copyright (c) 1999, 2000 Id Software Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_anorms_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/anorms.h')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_cd_linux_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/cd_linux.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_cl_demo_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/cl_demo.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_exitscrn_txt(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/exitscrn.txt')
expected = [
u'(c) 1996, 1997 Id Software, inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_keys_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/keys.h')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) Mouse Wheel Support',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_md4_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/md4.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'Copyright (c) 1991-2, RSA Data Security, Inc.',
u'Copyright (c) 1990-2, RSA Data Security, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_menu_c(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/menu.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_quake_quake_src_qw_client_menu_c_trail_name(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/menu.c')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_client_qwcl_plg(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-client/qwcl.plg')
expected = [
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_d3d_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/d3d.h')
expected = [
u'Copyright (c) 1995-1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_ddraw_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/ddraw.h')
expected = [
u'Copyright (c) 1994-1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dinput_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dinput.h')
expected = [
u'Copyright (c) 1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dplay_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dplay.h')
expected = [
u'Copyright (c) 1994-1995 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dsound_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dsound.h')
expected = [
u'Copyright (c) 1995,1996 Microsoft Corporation.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_scitech_include_debug_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-scitech-include/debug.h')
expected = [
u'Copyright (c) 1996 SciTech Software',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_qw_scitech_include_mgldos_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-QW-scitech-include/mgldos.h')
expected = [
u'Copyright (c) 1996 SciTech Software.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_3dfx_txt_trail_name(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/3dfx.txt')
expected = [
u'Copyright 1997 3Dfx Interactive, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_cl_input_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/cl_input.cpp')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_conproc_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/conproc.cpp')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_menu_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/menu.cpp')
expected = [
u'Copyright (c) 1996-1997 Id Software, Inc.',
u'(c) 1996 Id Software, inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_mpdosock_h(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/mpdosock.h')
expected = [
u'Copyright (c) 1993-1995, Microsoft Corp.',
u'Copyright (c) 1982-1986 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_sys_linux_cpp(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/sys_linux.cpp')
expected = [
u'(c) 1996 Id Software, inc.',
u'(c) 1996 Id Software, inc.',
]
check_detection(expected, test_file)
def test_ics_quake_quake_src_winquake_winquake_plg(self):
test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/WinQuake.plg')
expected = [
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
u'Copyright (c) Microsoft Corp 1984-1998.',
u'Copyright (c) Microsoft Corp 1981-1993.',
]
check_detection(expected, test_file)
def test_ics_quake_src_com_android_quake_quakeactivity_java(self):
test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeActivity.java')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_quake_src_com_android_quake_quakelib_java(self):
test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeLib.java')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
u'(c) Mouse Wheel Support',
]
check_detection(expected, test_file)
def test_ics_quake_src_com_android_quake_quakeview_java(self):
test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeView.java')
expected = [
u'Copyright (c) 2007 The Android Open Source Project',
u'Copyright (c) 2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_safe_iop_notice(self):
test_file = self.get_test_loc('ics/safe-iop/NOTICE')
expected = [
u'Copyright (c) 2007,2008 Will Drewry <redpig@dataspill.org>',
]
check_detection(expected, test_file)
def test_ics_safe_iop_include_safe_iop_h_lead_portion(self):
test_file = self.get_test_loc('ics/safe-iop-include/safe_iop.h')
expected = [
u'Copyright 2007,2008 redpig@dataspill.org',
u'portions copyright The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_safe_iop_src_safe_iop_c_lead_portion(self):
test_file = self.get_test_loc('ics/safe-iop-src/safe_iop.c')
expected = [
u'Copyright 2007,2008 redpig@dataspill.org',
u'portions copyright The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_android_sample_sampleapp_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/skia-android_sample-SampleApp/AndroidManifest.xml')
expected = [
u'Copyright (c) 2011 Skia',
]
check_detection(expected, test_file)
def test_ics_skia_android_sample_sampleapp_jni_sample_jni_cpp(self):
test_file = self.get_test_loc('ics/skia-android_sample-SampleApp-jni/sample-jni.cpp')
expected = [
u'Copyright (c) 2011 Skia',
]
check_detection(expected, test_file)
def test_ics_skia_emoji_emojifont_cpp(self):
test_file = self.get_test_loc('ics/skia-emoji/EmojiFont.cpp')
expected = [
u'Copyright 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_gm_strokerects_cpp(self):
test_file = self.get_test_loc('ics/skia-gm/strokerects.cpp')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_gpu_src_grgpu_cpp(self):
test_file = self.get_test_loc('ics/skia-gpu-src/GrGpu.cpp')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skbitmap_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkBitmap.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
u'SkColorGetR (c), SkColorGetG',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skcolorpriv_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkColorPriv.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
u'SkGetPackedG32 (c), SkGetPackedB32',
u'SkGetPackedG32 (c), SkGetPackedB32',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skregion_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkRegion.h')
expected = [
u'Copyright (c) 2005 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_skscalar_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkScalar.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_include_core_sktregistry_h(self):
test_file = self.get_test_loc('ics/skia-include-core/SkTRegistry.h')
expected = [
u'Copyright 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_include_ports_skharfbuzzfont_h(self):
test_file = self.get_test_loc('ics/skia-include-ports/SkHarfBuzzFont.h')
expected = [
u'Copyright (c) 2009, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_include_views_skoswindow_wxwidgets_h(self):
test_file = self.get_test_loc('ics/skia-include-views/SkOSWindow_wxwidgets.h')
expected = [
u'Copyright (c) 2006 The Android Open Source Project',
u'Copyright 2005 MyCompanyName',
]
check_detection(expected, test_file)
def test_ics_skia_src_animator_skoperanditerpolator_cpp(self):
test_file = self.get_test_loc('ics/skia-src-animator/SkOperandIterpolator.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skbitmap_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkBitmap.cpp')
expected = [
u'Copyright (c) 2006-2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skbitmapprocstate_matrixprocs_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkBitmapProcState_matrixProcs.cpp')
expected = [
u'(c) COPYRIGHT 2009 Motorola',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skblitter_4444_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkBlitter_4444.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
u'SkColorGetG (c), SkColorGetB',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skcolortable_cpp(self):
test_file = self.get_test_loc('ics/skia-src-core/SkColorTable.cpp')
expected = [
u'Copyright (c) 2006-2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_core_skfilterproc_h(self):
test_file = self.get_test_loc('ics/skia-src-core/SkFilterProc.h')
expected = [
u'Copyright (c) 2006-2008 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_images_skimagedecoder_libjpeg_cpp(self):
test_file = self.get_test_loc('ics/skia-src-images/SkImageDecoder_libjpeg.cpp')
expected = [
u'Copyright 2007, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_opts_opts_check_arm_cpp(self):
test_file = self.get_test_loc('ics/skia-src-opts/opts_check_arm.cpp')
expected = [
u'Copyright (c) 2010, Code Aurora Forum.',
u'Copyright 2006-2010, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_pdf_skpdffont_cpp(self):
test_file = self.get_test_loc('ics/skia-src-pdf/SkPDFFont.cpp')
expected = [
u'Copyright (c) 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skdebug_brew_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkDebug_brew.cpp')
expected = [
u'Copyright 2009, The Android Open Source Project',
u'Copyright 2009, Company 100, Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skfonthost_fontconfig_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkFontHost_fontconfig.cpp')
expected = [
u'Copyright 2008, Google Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skfonthost_none_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkFontHost_none.cpp')
expected = [
u'Copyright 2006-2008, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skosfile_brew_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkOSFile_brew.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
u'Copyright 2009, Company 100, Inc.',
]
check_detection(expected, test_file)
def test_ics_skia_src_ports_skxmlparser_empty_cpp(self):
test_file = self.get_test_loc('ics/skia-src-ports/SkXMLParser_empty.cpp')
expected = [
u'Copyright 2006, The Android Open Source Project',
u'Copyright Skia Inc. 2004 - 2005',
]
check_detection(expected, test_file)
def test_ics_skia_tests_fillpathtest_cpp(self):
test_file = self.get_test_loc('ics/skia-tests/FillPathTest.cpp')
expected = [
u'Copyright (c) 2010 The Chromium Authors.',
]
check_detection(expected, test_file)
def test_ics_sonivox_notice(self):
test_file = self.get_test_loc('ics/sonivox/NOTICE')
expected = [
u'Copyright (c) 2004-2006 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas.h')
expected = [
u'Copyright Sonic Network Inc. 2005, 2006',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_build_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_build.h')
expected = [
u'Copyright Sonic Network Inc. 2006',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_config_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_config.c')
expected = [
u'Copyright Sonic Network Inc. 2004-2006',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_config_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_config.h')
expected = [
u'Copyright 2005 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_main_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_main.c')
expected = [
u'Copyright Sonic Network Inc. 2004',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_types_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_types.h')
expected = [
u'Copyright Sonic Network Inc. 2004',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_host_src_eas_wave_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_wave.c')
expected = [
u'Copyright Sonic Network Inc. 2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_ctype_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_ctype.h')
expected = [
u'Copyright (c) 2005 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_data_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_data.h')
expected = [
u'Copyright 2004 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_fmengine_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_fmengine.c')
expected = [
u'Copyright Sonic Network Inc. 2004, 2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_fmsndlib_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_fmsndlib.c')
expected = [
u'(c) Copyright 2005 Sonic Network, Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_fm_22k_lib_src_eas_smfdata_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_smfdata.h')
expected = [
u'Copyright Sonic Network Inc. 2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_hybrid_22k_lib_src_eas_wtengine_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-hybrid-22k-lib_src/eas_wtengine.c')
expected = [
u'Copyright Sonic Network Inc. 2004-2005',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_hybrid_22k_lib_src_hybrid_22khz_mcu_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-hybrid-22k-lib_src/hybrid_22khz_mcu.c')
expected = [
u'Copyright (c) 2006 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_wt_22k_lib_src_dls_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/dls.h')
expected = [
u'Copyright (c) 1996 Sonic Foundry',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_wt_22k_lib_src_jet_data_h(self):
test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/jet_data.h')
expected = [
u'Copyright (c) 2006 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_arm_wt_22k_lib_src_wt_22khz_c(self):
test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/wt_22khz.c')
expected = [
u'Copyright (c) 2009 Sonic Network Inc.',
]
check_detection(expected, test_file)
def test_ics_sonivox_docs_jet_authoring_guidelines_html(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Authoring_Guidelines.html')
expected = [
u'Copyright 2009 techdoc.dot Jennifer Hruska',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_sonivox_docs_jet_creator_user_manual_html(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Creator_User_Manual.html')
expected = [
u'Copyright 2009 Confidential Information',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_sonivox_docs_jet_creator_user_manual_html_markup_lead_name(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Creator_User_Manual.html')
expected = [
u'Jennifer Hruska Copyright 2009 Confidential Information',
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_sonivox_docs_jet_programming_manual_html(self):
test_file = self.get_test_loc('ics/sonivox-docs/JET_Programming_Manual.html')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_sonivox_jet_tools_jetcreator_jetaudition_py(self):
test_file = self.get_test_loc('ics/sonivox-jet_tools-JetCreator/JetAudition.py')
expected = [
u'Copyright (c) 2008 Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_speex_notice(self):
test_file = self.get_test_loc('ics/speex/NOTICE')
expected = [
u'Copyright (c) 2002-2008 Jean-Marc Valin',
u'Copyright (c) 2002 Jean-Marc Valin & David Rowe',
u'Copyright (c) 2003 Epic Games',
u'Copyright (c) 2003 Epic Games',
u'Copyright (c) 2004-2006 Epic Games',
u'Copyright (c) 2005 Analog Devices',
u'Copyright (c) 2005 Jean-Marc Valin, CSIRO, Christopher Montgomery',
u'Copyright (c) 2006 David Rowe',
u'Copyright (c) 2006-2008 CSIRO, Jean-Marc Valin, Xiph.Org Foundation',
u'Copyright (c) 2008 Thorvald Natvig',
u'Copyright (c) 2003-2004, Mark Borgerding',
u'Copyright (c) 2005-2007, Jean-Marc Valin',
u'Copyright (c) 2011 Jyri Sarha, Texas Instruments',
u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex.h')
expected = [
u'Copyright (c) 2002-2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_bits_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_bits.h')
expected = [
u'Copyright (c) 2002 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_buffer_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_buffer.h')
expected = [
u'Copyright (c) 2007 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_echo_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_echo.h')
expected = [
u'Copyright (c) Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_preprocess_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_preprocess.h')
expected = [
u'Copyright (c) 2003 Epic Games',
]
check_detection(expected, test_file)
def test_ics_speex_include_speex_speex_types_h(self):
test_file = self.get_test_loc('ics/speex-include-speex/speex_types.h')
expected = [
u'(c) COPYRIGHT 1994-2002 by the Xiph.Org Foundation http://www.xiph.org/',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_kiss_fft_guts_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/_kiss_fft_guts.h')
expected = [
u'Copyright (c) 2003-2004, Mark Borgerding',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_arch_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/arch.h')
expected = [
u'Copyright (c) 2003 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_bits_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/bits.c')
expected = [
u'Copyright (c) 2002 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search.c')
expected = [
u'Copyright (c) 2002-2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search.h')
expected = [
u'Copyright (c) 2002 Jean-Marc Valin & David Rowe',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_arm4_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search_arm4.h')
expected = [
u'Copyright (c) 2004 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_cb_search_bfin_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/cb_search_bfin.h')
expected = [
u'Copyright (c) 2005 Analog Devices',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_fftwrap_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/fftwrap.c')
expected = [
u'Copyright (c) 2005-2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_fftwrap_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/fftwrap.h')
expected = [
u'Copyright (c) 2005 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_filterbank_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/filterbank.c')
expected = [
u'Copyright (c) 2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_fixed_bfin_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/fixed_bfin.h')
expected = [
u'Copyright (c) 2005 Analog Devices Author Jean-Marc Valin',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_speex_libspeex_fixed_bfin_h_extra_author(self):
test_file = self.get_test_loc('ics/speex-libspeex/fixed_bfin.h')
expected = [
u'Copyright (c) 2005 Analog Devices',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_kiss_fft_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/kiss_fft.c')
expected = [
u'Copyright (c) 2003-2004, Mark Borgerding',
u'Copyright (c) 2005-2007, Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_kiss_fftr_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/kiss_fftr.c')
expected = [
u'Copyright (c) 2003-2004, Mark Borgerding',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_lpc_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/lpc.c')
expected = [
u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_lsp_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/lsp.c')
expected = [
u'Jean-Marc Valin (c) 2002-2006',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_lsp_bfin_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/lsp_bfin.h')
expected = [
u'Copyright (c) 2006 David Rowe',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_mdf_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/mdf.c')
expected = [
u'Copyright (c) 2003-2008 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_modes_wb_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/modes_wb.c')
expected = [
u'Copyright (c) 2002-2007 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_preprocess_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/preprocess.c')
expected = [
u'Copyright (c) 2003 Epic Games',
u'Copyright (c) 2004-2006 Epic Games',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_pseudofloat_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/pseudofloat.h')
expected = [
u'Copyright (c) 2005 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_resample_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/resample.c')
expected = [
u'Copyright (c) 2007-2008 Jean-Marc Valin',
u'Copyright (c) 2008 Thorvald Natvig',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_resample_neon_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/resample_neon.h')
expected = [
u'Copyright (c) 2007-2008 Jean-Marc Valin',
u'Copyright (c) 2008 Thorvald Natvig',
u'Copyright (c) 2011 Jyri Sarha, Texas Instruments',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_resample_sse_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/resample_sse.h')
expected = [
u'Copyright (c) 2007-2008 Jean-Marc Valin',
u'Copyright (c) 2008 Thorvald Natvig',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_scal_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/scal.c')
expected = [
u'Copyright (c) 2006-2008 CSIRO, Jean-Marc Valin, Xiph.Org Foundation',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_smallft_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/smallft.c')
expected = [
u'(c) COPYRIGHT 1994-2001 by the XIPHOPHORUS Company http://www.xiph.org/',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_vorbis_psy_h(self):
test_file = self.get_test_loc('ics/speex-libspeex/vorbis_psy.h')
expected = [
u'Copyright (c) 2005 Jean-Marc Valin, CSIRO, Christopher Montgomery',
]
check_detection(expected, test_file)
def test_ics_speex_libspeex_window_c(self):
test_file = self.get_test_loc('ics/speex-libspeex/window.c')
expected = [
u'Copyright (c) 2006 Jean-Marc Valin',
]
check_detection(expected, test_file)
def test_ics_srec_notice(self):
test_file = self.get_test_loc('ics/srec/NOTICE')
expected = [
u'Copyright 2007, 2008 Nuance Communications',
]
check_detection(expected, test_file)
def test_ics_srec_audio_audioin_unix_include_audioin_h(self):
test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-include/audioin.h')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
def test_ics_srec_audio_audioin_unix_src_audioinwrapper_cpp(self):
test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-src/audioinwrapper.cpp')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
def test_ics_srec_audio_audioin_unix_src_filter_c(self):
test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-src/filter.c')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
def test_ics_srec_doc_srec_doxygen(self):
test_file = self.get_test_loc('ics/srec-doc/srec.doxygen')
expected = [
u'(c) Copyright 2003-2007 Nuance',
]
check_detection(expected, test_file)
def test_ics_srec_srec_srec_doxygen(self):
test_file = self.get_test_loc('ics/srec-srec/srec.doxygen')
expected = [
u'(c) Copyright 2003 Speechworks International',
]
check_detection(expected, test_file)
def test_ics_srec_srec_jni_android_speech_srec_microphoneinputstream_cpp(self):
test_file = self.get_test_loc('ics/srec-srec_jni/android_speech_srec_MicrophoneInputStream.cpp')
expected = [
u'Copyright 2007 Nuance Communciations, Inc.',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_srec_tools_grxmlcompile_grxmlcompile_cpp(self):
test_file = self.get_test_loc('ics/srec-tools-grxmlcompile/grxmlcompile.cpp')
expected = [
u'Copyright 2007, 2008 Nuance Communciations, Inc.',
u'Copyright (c) 2007 Project Admins leethomason',
]
check_detection(expected, test_file)
def test_ics_srtp_config_guess(self):
test_file = self.get_test_loc('ics/srtp/config.guess')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_config_log(self):
test_file = self.get_test_loc('ics/srtp/config.log')
expected = [
u'Copyright (c) 2007 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_license(self):
test_file = self.get_test_loc('ics/srtp/LICENSE')
expected = [
u'Copyright (c) 2001-2006 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_cipher_aes_c(self):
test_file = self.get_test_loc('ics/srtp-crypto-cipher/aes.c')
expected = [
u'Copyright (c) 2001-2006, Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_hash_hmac_c(self):
test_file = self.get_test_loc('ics/srtp-crypto-hash/hmac.c')
expected = [
u'Copyright (c) 2001-2006 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_include_auth_h(self):
test_file = self.get_test_loc('ics/srtp-crypto-include/auth.h')
expected = [
u'Copyright (c) 2001-2006, Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_crypto_include_kernel_compat_h(self):
test_file = self.get_test_loc('ics/srtp-crypto-include/kernel_compat.h')
expected = [
u'Copyright (c) 2005 Ingate Systems AB',
]
check_detection(expected, test_file)
def test_ics_srtp_doc_header_template(self):
test_file = self.get_test_loc('ics/srtp-doc/header.template')
expected = [
u'copyright 2001-2005 by David A. McGrew, Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_doc_intro_txt(self):
test_file = self.get_test_loc('ics/srtp-doc/intro.txt')
expected = [
u'Copyright (c) 2001-2005 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_srtp_doc_rfc3711_txt(self):
test_file = self.get_test_loc('ics/srtp-doc/rfc3711.txt')
expected = [
u'Copyright (c) The Internet Society (2004).',
u'Full Copyright Statement',
u'Full Copyright Statement',
u'Copyright (c) The Internet Society (2004).',
]
check_detection(expected, test_file)
def test_ics_srtp_include_ekt_h(self):
test_file = self.get_test_loc('ics/srtp-include/ekt.h')
expected = [
u'Copyright (c) 2001-2005 Cisco Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_configure_bat(self):
test_file = self.get_test_loc('ics/stlport/configure.bat')
expected = [
u'Copyright (c) 2004,2005 Michael Fink',
]
check_detection(expected, test_file)
def test_ics_stlport_license(self):
test_file = self.get_test_loc('ics/stlport/LICENSE')
expected = [
u'Copyright 1999,2000 Boris Fomitchev',
u'Copyright 1994 Hewlett-Packard Company',
u'Copyright 1996,97 Silicon Graphics Computer Systems, Inc.',
u'Copyright 1997 Moscow Center for SPARC Technology.',
]
check_detection(expected, test_file)
def test_ics_stlport_makefile(self):
test_file = self.get_test_loc('ics/stlport/Makefile')
expected = [
u'Copyright (c) 2004-2008 Petr Ovtchenkov',
]
check_detection(expected, test_file)
def test_ics_stlport_readme(self):
test_file = self.get_test_loc('ics/stlport/README')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996-1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999-2003 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_acquire_release_h(self):
test_file = self.get_test_loc('ics/stlport-src/acquire_release.h')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_allocators_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/allocators.cpp')
expected = [
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_bitset_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/bitset.cpp')
expected = [
u'Copyright (c) 1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_ctype_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/ctype.cpp')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_dll_main_cpp(self):
test_file = self.get_test_loc('ics/stlport-src/dll_main.cpp')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_lock_free_slist_h(self):
test_file = self.get_test_loc('ics/stlport-src/lock_free_slist.h')
expected = [
u'Copyright (c) 1997-1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_stlport_rc(self):
test_file = self.get_test_loc('ics/stlport-src/stlport.rc')
expected = [
u'Copyright (c) Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_c_locale_dummy_c_locale_dummy_c(self):
test_file = self.get_test_loc('ics/stlport-src-c_locale_dummy/c_locale_dummy.c')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_src_c_locale_win32_c_wlocale_win32_c(self):
test_file = self.get_test_loc('ics/stlport-src-c_locale_win32/c_wlocale_win32.c')
expected = [
u'Copyright (c) 2007 2008 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_assert_h(self):
test_file = self.get_test_loc('ics/stlport-stlport/assert.h')
expected = [
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_exception(self):
test_file = self.get_test_loc('ics/stlport-stlport/exception')
expected = [
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_limits(self):
test_file = self.get_test_loc('ics/stlport-stlport/limits')
expected = [
u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_locale(self):
test_file = self.get_test_loc('ics/stlport-stlport/locale')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_numeric(self):
test_file = self.get_test_loc('ics/stlport-stlport/numeric')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_rope(self):
test_file = self.get_test_loc('ics/stlport-stlport/rope')
expected = [
u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_type_traits(self):
test_file = self.get_test_loc('ics/stlport-stlport/type_traits')
expected = [
u'Copyright (c) 2007, 2008 Petr Ovtchenkov',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_unordered_map(self):
test_file = self.get_test_loc('ics/stlport-stlport/unordered_map')
expected = [
u'Copyright (c) 2004,2005 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_carray_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_carray.h')
expected = [
u'Copyright (c) 2005 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_function_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_function.h')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_function_adaptors_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_function_adaptors.h')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
u'Copyright (c) 2000 Pavel Kuznetsov',
u"Copyright (c) 2001 Meridian'93",
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_hash_fun_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_hash_fun.h')
expected = [
u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1994 Hewlett-Packard Company',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_heap_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_heap.h')
expected = [
u'Copyright (c) 1994 Hewlett-Packard Company',
u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_limits_c(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_limits.c')
expected = [
u'Copyright (c) 1998,1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_string_base_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/_string_base.h')
expected = [
u'Copyright (c) 1997-1999 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1999 Boris Fomitchev',
u'Copyright (c) 2003 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_boost_type_traits_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/boost_type_traits.h')
expected = [
u'Copyright (c) 2004 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_concept_checks_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/concept_checks.h')
expected = [
u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_msl_string_h_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/msl_string.h')
expected = [
u'Copyright (c) 1998 Mark of the Unicorn, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_type_manips_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/type_manips.h')
expected = [
u'Copyright (c) 2003 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_type_traits_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl/type_traits.h')
expected = [
u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
u'Copyright (c) 1999 Boris Fomitchev',
u'Copyright 2000 Adobe Systems Incorporated and others.',
]
check_detection(expected, test_file)
def test_ics_stlport_stlport_stl_config_native_headers_h(self):
test_file = self.get_test_loc('ics/stlport-stlport-stl-config/_native_headers.h')
expected = [
u'Copyright (c) 2006 Francois Dumont',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_main_cpp_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-test-eh/main.cpp')
expected = [
u'Copyright (c) 1997 Mark of the Unicorn, Inc.',
u'Copyright (c) 1997 Moscow Center for SPARC Technology',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_mwerks_console_os_x_c(self):
test_file = self.get_test_loc('ics/stlport-test-eh/mwerks_console_OS_X.c')
expected = [
u'Copyright (c) 1995-2002 Metrowerks Corporation.',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_random_number_h_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-test-eh/random_number.h')
expected = [
u'Copyright (c) 1997-1998 Mark of the Unicorn, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_test_eh_test_insert_h_trail_inc(self):
test_file = self.get_test_loc('ics/stlport-test-eh/test_insert.h')
expected = [
u'Copyright (c) 1997 Mark of the Unicorn, Inc.',
]
check_detection(expected, test_file)
def test_ics_stlport_test_unit_limits_test_cpp(self):
test_file = self.get_test_loc('ics/stlport-test-unit/limits_test.cpp')
expected = [
u'Copyright Jens Maurer 2000',
]
check_detection(expected, test_file)
def test_ics_stlport_test_unit_cppunit_cppunit_mini_h(self):
test_file = self.get_test_loc('ics/stlport-test-unit-cppunit/cppunit_mini.h')
expected = [
u'Copyright (c) 2003, 2004 Zdenek Nemec',
]
check_detection(expected, test_file)
def test_ics_strace_aclocal_m4(self):
test_file = self.get_test_loc('ics/strace/aclocal.m4')
expected = [
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.',
u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_bjm_c(self):
test_file = self.get_test_loc('ics/strace/bjm.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_config_log(self):
test_file = self.get_test_loc('ics/strace/config.log')
expected = [
u'Copyright (c) 2006 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_copyright(self):
test_file = self.get_test_loc('ics/strace/COPYRIGHT')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993 Ulrich Pegelow <pegelow@moorea.uni-muenster.de>',
u'Copyright (c) 1995, 1996 Michael Elizabeth Chastain <mec@duracef.shout.net>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1998-2001 Wichert Akkerman <wakkerma@deephackmode.org>',
# this is redundant and rare junk u'COPYRIGHT,v 1.3 2002/03/31 18:43:00 wichert',
]
check_detection(expected, test_file)
def test_ics_strace_defs_h(self):
test_file = self.get_test_loc('ics/strace/defs.h')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_depcomp(self):
test_file = self.get_test_loc('ics/strace/depcomp')
expected = [
u'Copyright (c) 1999, 2000, 2003 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_errnoent_sh(self):
test_file = self.get_test_loc('ics/strace/errnoent.sh')
expected = [
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_ioctl_c(self):
test_file = self.get_test_loc('ics/strace/ioctl.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-2001 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_ioctlsort_c(self):
test_file = self.get_test_loc('ics/strace/ioctlsort.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_ipc_c(self):
test_file = self.get_test_loc('ics/strace/ipc.c')
expected = [
u'Copyright (c) 1993 Ulrich Pegelow <pegelow@moorea.uni-muenster.de>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_makefile_in(self):
test_file = self.get_test_loc('ics/strace/Makefile.in')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_mem_c(self):
test_file = self.get_test_loc('ics/strace/mem.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
u'Copyright (c) 2000 PocketPenguins Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_net_c(self):
test_file = self.get_test_loc('ics/strace/net.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-2000 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_proc_c(self):
test_file = self.get_test_loc('ics/strace/proc.c')
expected = [
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_process_c(self):
test_file = self.get_test_loc('ics/strace/process.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
u'Copyright (c) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation',
u'Copyright (c) 2000 PocketPenguins Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_signal_c(self):
test_file = self.get_test_loc('ics/strace/signal.c')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
u'Copyright (c) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation',
]
check_detection(expected, test_file)
def test_ics_strace_signalent_sh(self):
test_file = self.get_test_loc('ics/strace/signalent.sh')
expected = [
u'Copyright (c) 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_sock_c(self):
test_file = self.get_test_loc('ics/strace/sock.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_1(self):
test_file = self.get_test_loc('ics/strace/strace.1')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_graph(self):
test_file = self.get_test_loc('ics/strace/strace-graph')
expected = [
u'Copyright (c) 1998 by Richard Braakman <dark@xs4all.nl>.',
]
check_detection(expected, test_file)
def test_ics_strace_stream_c(self):
test_file = self.get_test_loc('ics/strace/stream.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_syscallent_sh(self):
test_file = self.get_test_loc('ics/strace/syscallent.sh')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_linux_dummy_h(self):
test_file = self.get_test_loc('ics/strace-linux/dummy.h')
expected = [
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_strace_linux_ioctlent_sh(self):
test_file = self.get_test_loc('ics/strace-linux/ioctlent.sh')
expected = [
u'Copyright (c) 2001 Wichert Akkerman <wichert@cistron.nl>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_hppa_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-hppa/syscallent.h')
expected = [
u'Copyright (c) 2001 Hewlett-Packard, Matthew Wilcox',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_ia64_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-ia64/syscallent.h')
expected = [
u'Copyright (c) 1999, 2001 Hewlett-Packard Co David Mosberger-Tang <davidm@hpl.hp.com>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_mips_ioctlent_sh(self):
test_file = self.get_test_loc('ics/strace-strace-linux-mips/ioctlent.sh')
expected = [
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 1995, 1996 Michael Elizabeth Chastain <mec@duracef.shout.net>',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_s390_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-s390/syscallent.h')
expected = [
u'Copyright (c) 2000 IBM Deutschland Entwicklung GmbH, IBM Coporation Authors',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_strace_strace_linux_s390_syscallent_h_extra_author(self):
test_file = self.get_test_loc('ics/strace-strace-linux-s390/syscallent.h')
expected = [
u'Copyright (c) 2000 IBM Deutschland Entwicklung GmbH, IBM Coporation',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_sh_syscallent_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-sh/syscallent.h')
expected = [
u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>',
u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>',
u'Copyright (c) 2000 PocketPenguins Inc.',
]
check_detection(expected, test_file)
def test_ics_strace_strace_linux_sparc_syscall_h(self):
test_file = self.get_test_loc('ics/strace-strace-linux-sparc/syscall.h')
expected = [
u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>',
u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>',
]
check_detection(expected, test_file)
def test_ics_svox_pico_androidmanifest_xml(self):
test_file = self.get_test_loc('ics/svox-pico/AndroidManifest.xml')
expected = [
u'Copyright 2009, The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_svox_pico_resources_tools_lingwarebuilding_readme_txt(self):
test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding/Readme.txt')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_resources_tools_lingwarebuilding_picolingware_source_files_textana_en_gb_en_gb_lexpos_utf(self):
test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding-PicoLingware_source_files-textana-en-GB/en-GB_lexpos.utf')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_resources_tools_lingwarebuilding_picolingware_tools_windows_tools_buildbin_sh(self):
test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding-PicoLingware_tools_windows-tools/buildbin.sh')
expected = [
u'Copyright (c) 2009 SVOX AG.',
]
check_detection(expected, test_file)
def test_ics_svox_pico_compat_jni_com_android_tts_compat_synthproxy_cpp(self):
test_file = self.get_test_loc('ics/svox-pico-compat-jni/com_android_tts_compat_SynthProxy.cpp')
expected = [
u'Copyright (c) 2009-2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_notice(self):
test_file = self.get_test_loc('ics/svox-pico-lib/NOTICE')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_picoacph_c(self):
test_file = self.get_test_loc('ics/svox-pico-lib/picoacph.c')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_picofftsg_c(self):
test_file = self.get_test_loc('ics/svox-pico-lib/picofftsg.c')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
u'Copyright (c) 2008-2009 SVOX AG',
u'(Copyright Takuya OOURA, 1996-2001)',
]
check_detection(expected, test_file)
def test_ics_svox_pico_lib_picoos_c(self):
test_file = self.get_test_loc('ics/svox-pico-lib/picoos.c')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
u'Copyright (c) 2008-2009 SVOX AG',
u'(c) SVOX AG',
]
check_detection(expected, test_file)
def test_ics_svox_pico_res_xml_tts_engine_xml(self):
test_file = self.get_test_loc('ics/svox-pico-res-xml/tts_engine.xml')
expected = [
u'Copyright (c) 2011 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_svox_pico_res_xml_voices_list_xml(self):
test_file = self.get_test_loc('ics/svox-pico-res-xml/voices_list.xml')
expected = [
u'Copyright (c) 2009 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_svox_pico_tts_com_svox_picottsengine_cpp(self):
test_file = self.get_test_loc('ics/svox-pico-tts/com_svox_picottsengine.cpp')
expected = [
u'Copyright (c) 2008-2009 SVOX AG',
]
check_detection(expected, test_file)
def test_ics_tagsoup_src_org_ccil_cowan_tagsoup_autodetector_java(self):
test_file = self.get_test_loc('ics/tagsoup-src-org-ccil-cowan-tagsoup/AutoDetector.java')
expected = [
u'Copyright 2002-2008 by John Cowan.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_aclocal_m4_trail_name_m4_dnl_comment(self):
test_file = self.get_test_loc('ics/tcpdump/aclocal.m4')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998 The Regents of the University of California.',
u'Copyright (c) 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_addrtoname_c(self):
test_file = self.get_test_loc('ics/tcpdump/addrtoname.c')
expected = [
u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_addrtoname_h(self):
test_file = self.get_test_loc('ics/tcpdump/addrtoname.h')
expected = [
u'Copyright (c) 1990, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_af_c(self):
test_file = self.get_test_loc('ics/tcpdump/af.c')
expected = [
u'Copyright (c) 1998-2006 The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_aodv_h(self):
test_file = self.get_test_loc('ics/tcpdump/aodv.h')
expected = [
u'Copyright (c) 2003 Bruce M. Simpson <bms@spc.org>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_appletalk_h(self):
test_file = self.get_test_loc('ics/tcpdump/appletalk.h')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_atm_h(self):
test_file = self.get_test_loc('ics/tcpdump/atm.h')
expected = [
u'Copyright (c) 2002 Guy Harris.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_bootp_h(self):
test_file = self.get_test_loc('ics/tcpdump/bootp.h')
expected = [
u'Copyright 1988 by Carnegie Mellon.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_chdlc_h(self):
test_file = self.get_test_loc('ics/tcpdump/chdlc.h')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_cpack_c(self):
test_file = self.get_test_loc('ics/tcpdump/cpack.c')
expected = [
u'Copyright (c) 2003, 2004 David Young.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_dccp_h(self):
test_file = self.get_test_loc('ics/tcpdump/dccp.h')
expected = [
u'Copyright (c) Arnaldo Carvalho de Melo 2004',
u'Copyright (c) Ian McDonald 2005 <iam4@cs.waikato.ac.nz>',
u'Copyright (c) Yoshifumi Nishida 2005',
]
check_detection(expected, test_file)
def test_ics_tcpdump_decnet_h(self):
test_file = self.get_test_loc('ics/tcpdump/decnet.h')
expected = [
u'Copyright (c) 1992, 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_decode_prefix_h(self):
test_file = self.get_test_loc('ics/tcpdump/decode_prefix.h')
expected = [
u'Copyright (c) 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_enc_h(self):
test_file = self.get_test_loc('ics/tcpdump/enc.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, 1998 by John Ioannidis, Angelos D. Keromytis and Niels Provos.',
u'Copyright (c) 2001, Angelos D. Keromytis.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_gmt2local_c(self):
test_file = self.get_test_loc('ics/tcpdump/gmt2local.c')
expected = [
u'Copyright (c) 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_icmp6_h(self):
test_file = self.get_test_loc('ics/tcpdump/icmp6.h')
expected = [
u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.',
u'Copyright (c) 1982, 1986, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_ieee802_11_h(self):
test_file = self.get_test_loc('ics/tcpdump/ieee802_11.h')
expected = [
u'Copyright (c) 2001 Fortress Technologies Charlie Lenahan',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_tcpdump_ieee802_11_h_trail_email(self):
test_file = self.get_test_loc('ics/tcpdump/ieee802_11.h')
expected = [
u'Copyright (c) 2001 Fortress Technologies Charlie Lenahan ( clenahan@fortresstech.com )',
]
check_detection(expected, test_file)
def test_ics_tcpdump_interface_h(self):
test_file = self.get_test_loc('ics/tcpdump/interface.h')
expected = [
u'Copyright (c) 1988-2002 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_ipproto_h(self):
test_file = self.get_test_loc('ics/tcpdump/ipproto.h')
expected = [
u'Copyright (c) 1982, 1986, 1990, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_l2tp_h(self):
test_file = self.get_test_loc('ics/tcpdump/l2tp.h')
expected = [
u'Copyright (c) 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_machdep_c(self):
test_file = self.get_test_loc('ics/tcpdump/machdep.c')
expected = [
u'Copyright (c) 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_makefile_in(self):
test_file = self.get_test_loc('ics/tcpdump/Makefile.in')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_makemib(self):
test_file = self.get_test_loc('ics/tcpdump/makemib')
expected = [
u'Copyright (c) 1990, 1996 John Robert LoVerso.',
u'copyright (c) 1999 William C. Fenner.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_mpls_h(self):
test_file = self.get_test_loc('ics/tcpdump/mpls.h')
expected = [
u'Copyright (c) 2001 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_nameser_h(self):
test_file = self.get_test_loc('ics/tcpdump/nameser.h')
expected = [
u'Copyright (c) 1983, 1989, 1993 The Regents of the University of California.',
u'Portions Copyright (c) 1993 by Digital Equipment Corporation.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_netdissect_h(self):
test_file = self.get_test_loc('ics/tcpdump/netdissect.h')
expected = [
u'Copyright (c) 1988-1997 The Regents of the University of California.',
u'Copyright (c) 1998-2004 Michael Richardson <mcr@tcpdump.org> The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_nfs_h(self):
test_file = self.get_test_loc('ics/tcpdump/nfs.h')
expected = [
u'Copyright (c) 1989, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_nfsfh_h_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/nfsfh.h')
expected = [
u'Copyright (c) 1993, 1994 Jeffrey C. Mogul, Digital Equipment Corporation, Western Research Laboratory.',
u'Copyright (c) 2001 Compaq Computer Corporation.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_parsenfsfh_c(self):
test_file = self.get_test_loc('ics/tcpdump/parsenfsfh.c')
expected = [
u'Copyright (c) 1993, 1994 Jeffrey C. Mogul, Digital Equipment Corporation, Western Research Laboratory.',
u'Copyright (c) 2001 Compaq Computer Corporation.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_pmap_prot_h(self):
test_file = self.get_test_loc('ics/tcpdump/pmap_prot.h')
expected = [
u'Copyright (c) 1984, Sun Microsystems, Inc.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ah_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ah.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ap1394_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ap1394.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ascii_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ascii.c')
expected = [
u'Copyright (c) 1997, 1998 The NetBSD Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_atm_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-atm.c')
expected = [
u'Copyright (c) 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_beep_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-beep.c')
expected = [
u'Copyright (c) 2000, Richard Sharpe',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_bootp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-bootp.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_cdp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-cdp.c')
expected = [
u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_cnfp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-cnfp.c')
expected = [
u'Copyright (c) 1998 Michael Shalayeff',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_dccp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-dccp.c')
expected = [
u'Copyright (c) Arnaldo Carvalho de Melo 2004',
u'Copyright (c) Ian McDonald 2005',
u'Copyright (c) Yoshifumi Nishida 2005',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_dhcp6_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-dhcp6.c')
expected = [
u'Copyright (c) 1998 and 1999 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_dvmrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-dvmrp.c')
expected = [
u'Copyright (c) 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_eap_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-eap.c')
expected = [
u'Copyright (c) 2004 - Michael Richardson <mcr@xelerance.com>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_egp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-egp.c')
expected = [
u'Copyright (c) 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_eigrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-eigrp.c')
expected = [
u'Copyright (c) 1998-2004 Hannes Gredler <hannes@tcpdump.org> The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_enc_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-enc.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_fddi_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-fddi.c')
expected = [
u'Copyright (c) 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_frag6_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-frag6.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_gre_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-gre.c')
expected = [
u'Copyright (c) 2002 Jason L. Wright (jason@thought.net)',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_hsrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-hsrp.c')
expected = [
u'Copyright (c) 2001 Julian Cowley',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ip6opts_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ip6opts.c')
expected = [
u'Copyright (c) 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_krb_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-krb.c')
expected = [
u'Copyright (c) 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_lwres_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-lwres.c')
expected = [
u'Copyright (c) 2001 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_mobile_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-mobile.c')
expected = [
u'(c) 1998 The NetBSD Foundation, Inc.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_mobility_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-mobility.c')
expected = [
u'Copyright (c) 2002 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_msdp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-msdp.c')
expected = [
u'Copyright (c) 2001 William C. Fenner.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_olsr_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-olsr.c')
expected = [
u'Copyright (c) 1998-2007 The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_radius_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-radius.c')
expected = [
u'Copyright (c) 2000 Alfredo Andres Omella.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_rip_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-rip.c')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1993, 1994, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_ripng_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-ripng.c')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1993, 1994 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_rx_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-rx.c')
expected = [
u'Copyright (c) 2000 United States Government',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_tcpdump_print_rx_c_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/print-rx.c')
expected = [
u'Copyright: (c) 2000 United States Government as represented by the Secretary of the Navy.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_sctp_c_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/print-sctp.c')
expected = [
u'Copyright (c) 2001 NETLAB, Temple University',
u'Copyright (c) 2001 Protocol Engineering Lab, University of Delaware',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_sl_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-sl.c')
expected = [
u'Copyright (c) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_slow_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-slow.c')
expected = [
u'Copyright (c) 1998-2005 The TCPDUMP project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_smb_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-smb.c')
expected = [
u'Copyright (c) Andrew Tridgell 1995-1999',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_snmp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-snmp.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 John Robert LoVerso.',
u'J. Schoenwaelder, Copyright (c) 1999.',
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 this software was produced',
]
check_detection(expected, test_file)
@expectedFailure
def test_ics_tcpdump_print_snmp_c_trail_name_lead_name_trail_name_complex(self):
test_file = self.get_test_loc('ics/tcpdump/print-snmp.c')
expected = [
u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 John Robert LoVerso.',
u'J. Schoenwaelder, Copyright (c) 1999.',
u'Los Alamos National Laboratory Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_stp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-stp.c')
expected = [
u'Copyright (c) 2000 Lennert Buytenhek',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_tcp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-tcp.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.',
u'Copyright (c) 1999-2004 The tcpdump.org project',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_telnet_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-telnet.c')
expected = [
u'Copyright (c) 1997, 1998 The NetBSD Foundation, Inc.',
u'Copyright (c) 1994, Simon J. Gerraty.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_timed_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-timed.c')
expected = [
u'Copyright (c) 2000 Ben Smithurst <ben@scientia.demon.co.uk>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_token_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-token.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_vrrp_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-vrrp.c')
expected = [
u'Copyright (c) 2000 William C. Fenner.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_wb_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-wb.c')
expected = [
u'Copyright (c) 1993, 1994, 1995, 1996 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_print_zephyr_c(self):
test_file = self.get_test_loc('ics/tcpdump/print-zephyr.c')
expected = [
u'Copyright (c) 2001 Nickolai Zeldovich <kolya@MIT.EDU>',
]
check_detection(expected, test_file)
def test_ics_tcpdump_route6d_h(self):
test_file = self.get_test_loc('ics/tcpdump/route6d.h')
expected = [
u'Copyright (c) 1995, 1996, 1997 and 1998 WIDE Project.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_sctpconstants_h_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/sctpConstants.h')
expected = [
u'Implementation Copyright (c) 1999 Cisco And Motorola',
]
check_detection(expected, test_file)
def test_ics_tcpdump_slcompress_h(self):
test_file = self.get_test_loc('ics/tcpdump/slcompress.h')
expected = [
u'Copyright (c) 1989, 1990, 1992, 1993 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_slip_h(self):
test_file = self.get_test_loc('ics/tcpdump/slip.h')
expected = [
u'Copyright (c) 1990 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_strcasecmp_c(self):
test_file = self.get_test_loc('ics/tcpdump/strcasecmp.c')
expected = [
u'Copyright (c) 1987 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_tcpdump_1_trail_name(self):
test_file = self.get_test_loc('ics/tcpdump/tcpdump.1')
expected = [
u'Copyright (c) 1987, 1988, 1989, 1990, 1991, 1992, 1994, 1995, 1996, 1997 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_tcpdump_c(self):
test_file = self.get_test_loc('ics/tcpdump/tcpdump.c')
expected = [
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.',
u'Copyright (c) 2001 Seth Webster <swebster@sst.ll.mit.edu>',
u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_telnet_h(self):
test_file = self.get_test_loc('ics/tcpdump/telnet.h')
expected = [
u'Copyright (c) 1983, 1993 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_token_h(self):
test_file = self.get_test_loc('ics/tcpdump/token.h')
expected = [
u'Copyright (c) 1998, Larry Lile',
]
check_detection(expected, test_file)
def test_ics_tcpdump_vfprintf_c(self):
test_file = self.get_test_loc('ics/tcpdump/vfprintf.c')
expected = [
u'Copyright (c) 1995 The Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_tcpdump_missing_inet_aton_c_trail_place(self):
test_file = self.get_test_loc('ics/tcpdump-missing/inet_aton.c')
expected = [
u'Copyright (c) 1995, 1996, 1997 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_tcpdump_missing_inet_ntop_c_trail_place(self):
test_file = self.get_test_loc('ics/tcpdump-missing/inet_ntop.c')
expected = [
u'Copyright (c) 1999 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).',
]
check_detection(expected, test_file)
def test_ics_tinyxml_android_mk(self):
test_file = self.get_test_loc('ics/tinyxml/Android.mk')
expected = [
u'Copyright 2005 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_tinyxml_tinyxml_cpp(self):
test_file = self.get_test_loc('ics/tinyxml/tinyxml.cpp')
expected = [
u'copyright (c) 2000-2002 Lee Thomason (www.grinninglizard.com)',
]
check_detection(expected, test_file)
def test_ics_tinyxml_tinyxml_h(self):
test_file = self.get_test_loc('ics/tinyxml/tinyxml.h')
expected = [
u'copyright (c) 2000-2002 Lee Thomason (www.grinninglizard.com)',
]
check_detection(expected, test_file)
def test_ics_tremolo_notice(self):
test_file = self.get_test_loc('ics/tremolo/NOTICE')
expected = [
u'Copyright (c) 2002-2009, Xiph.org Foundation',
u'Copyright (c) 2010, Robin Watts for Pinknoise Productions Ltd',
]
check_detection(expected, test_file)
def test_ics_tremolo_tremolo_asm_arm_h(self):
test_file = self.get_test_loc('ics/tremolo-Tremolo/asm_arm.h')
expected = [
u'Copyright (c) 2002-2009, Xiph.org Foundation',
u'Copyright (c) 2010, Robin Watts for Pinknoise Productions Ltd',
]
check_detection(expected, test_file)
def test_ics_webp_examples_dwebp_c(self):
test_file = self.get_test_loc('ics/webp-examples/dwebp.c')
expected = [
u'Copyright 2010 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_webp_include_webp_encode_h(self):
test_file = self.get_test_loc('ics/webp-include-webp/encode.h')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_webp_src_dec_android_mk(self):
test_file = self.get_test_loc('ics/webp-src-dec/Android.mk')
expected = [
u'Copyright 2010 The Android Open Source Project',
]
check_detection(expected, test_file)
def test_ics_webp_src_enc_dsp_c(self):
test_file = self.get_test_loc('ics/webp-src-enc/dsp.c')
expected = [
u'Copyright 2011 Google Inc.',
]
check_detection(expected, test_file)
def test_ics_webrtc_android_webrtc_mk(self):
test_file = self.get_test_loc('ics/webrtc/android-webrtc.mk')
expected = [
u'Copyright (c) 2011 The WebRTC project',
]
check_detection(expected, test_file)
def test_ics_webrtc_notice(self):
test_file = self.get_test_loc('ics/webrtc/NOTICE')
expected = [
u'Copyright (c) 2011 The WebRTC project',
u'Copyright (c) 2010 The Android Open Source Project',
u'Copyright Takuya OOURA, 1996-2001',
u'Copyright Takuya OOURA, 1996-2001',
u'Copyright Steven J. Ross 2001 - 2009.',
]
check_detection(expected, test_file)
def test_ics_webrtc_src_common_types_h(self):
test_file = self.get_test_loc('ics/webrtc-src/common_types.h')
expected = [
u'Copyright (c) 2011 The WebRTC project',
]
check_detection(expected, test_file)
def test_ics_webrtc_src_modules_audio_processing_aec_main_source_aec_rdft_c(self):
test_file = self.get_test_loc('ics/webrtc-src-modules-audio_processing-aec-main-source/aec_rdft.c')
expected = [
u'Copyright Takuya OOURA, 1996-2001',
]
check_detection(expected, test_file)
def test_ics_webrtc_src_system_wrappers_source_spreadsortlib_spreadsort_hpp(self):
test_file = self.get_test_loc('ics/webrtc-src-system_wrappers-source-spreadsortlib/spreadsort.hpp')
expected = [
u'Copyright Steven J. Ross 2001 - 2009.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_aes_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/aes.c')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_aes_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/aes.h')
expected = [
u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_aes_wrap_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/aes_wrap.c')
expected = [
u'Copyright (c) 2003-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_asn1_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/asn1.c')
expected = [
u'Copyright (c) 2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_base64_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/base64.c')
expected = [
u'Copyright (c) 2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_build_config_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/build_config.h')
expected = [
u'Copyright (c) 2005-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_common_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/common.c')
expected = [
u'Copyright (c) 2002-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_config_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/config.h')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_crypto_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/crypto.c')
expected = [
u'Copyright (c) 2004-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_ctrl_iface_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/ctrl_iface.c')
expected = [
u'Copyright (c) 2004-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_ctrl_iface_dbus_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/ctrl_iface_dbus.c')
expected = [
u'Copyright (c) 2006, Dan Williams <dcbw@redhat.com> and Red Hat, Inc.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_atmel_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_atmel.c')
expected = [
u'Copyright (c) 2000-2005, ATMEL Corporation',
u'Copyright (c) 2004-2007, Jouni Malinen <j@w1.fi>',
u'Copyright 2000-2001 ATMEL Corporation.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_broadcom_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_broadcom.c')
expected = [
u'Copyright (c) 2004, Nikki Chumkov <nikki@gattaca.ru>',
u'Copyright (c) 2004, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_bsd_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_bsd.c')
expected = [
u'Copyright (c) 2004, Sam Leffler <sam@errno.com>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_ipw_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_ipw.c')
expected = [
u'Copyright (c) 2005 Zhu Yi <yi.zhu@intel.com>',
u'Copyright (c) 2004 Lubomir Gelo <lgelo@cnc.sk>',
u'Copyright (c) 2003-2004, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_madwifi_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_madwifi.c')
expected = [
u'Copyright (c) 2004, Sam Leffler <sam@errno.com>',
u'Copyright (c) 2004-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_ndiswrapper_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_ndiswrapper.c')
expected = [
u'Copyright (c) 2004-2006, Giridhar Pemmasani <giri@lmc.cs.sunysb.edu>',
u'Copyright (c) 2004-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_prism54_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_prism54.c')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2004, Luis R. Rodriguez <mcgrof@ruslug.rutgers.edu>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_driver_wired_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/driver_wired.c')
expected = [
u'Copyright (c) 2005-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eap_gpsk_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eap_gpsk.c')
expected = [
u'Copyright (c) 2006-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eap_psk_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eap_psk.c')
expected = [
u'Copyright (c) 2004-2007, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eap_sim_common_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eap_sim_common.c')
expected = [
u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eapol_test_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eapol_test.c')
expected = [
u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_eloop_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/eloop.c')
expected = [
u'Copyright (c) 2002-2005, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_l2_packet_freebsd_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/l2_packet_freebsd.c')
expected = [
u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2005, Sam Leffler <sam@errno.com>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_mlme_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/mlme.c')
expected = [
u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2004, Instant802 Networks, Inc.',
u'Copyright (c) 2005-2006, Devicescape Software, Inc.',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_notice(self):
test_file = self.get_test_loc('ics/wpa_supplicant/NOTICE')
expected = [
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_radius_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/radius.c')
expected = [
u'Copyright (c) 2002-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_tls_none_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/tls_none.c')
expected = [
u'Copyright (c) 2004, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wireless_copy_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant/wireless_copy.h')
expected = [
u'Copyright (c) 1997-2007 Jean Tourrilhes',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_cli_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/wpa_cli.c')
expected = [
u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_supplicant_c(self):
test_file = self.get_test_loc('ics/wpa_supplicant/wpa_supplicant.c')
expected = [
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_gui_wpagui_ui_h(self):
test_file = self.get_test_loc('ics/wpa_supplicant-wpa_gui/wpagui.ui.h')
expected = [
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_wpa_supplicant_wpa_gui_qt4_wpagui_cpp(self):
test_file = self.get_test_loc('ics/wpa_supplicant-wpa_gui-qt4/wpagui.cpp')
expected = [
u'Copyright (c) 2005-2008, Jouni Malinen <j@w1.fi>',
u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>',
]
check_detection(expected, test_file)
def test_ics_xmlwriter_src_org_jheer_xmlwriter_java(self):
test_file = self.get_test_loc('ics/xmlwriter-src-org-jheer/XMLWriter.java')
expected = [
u'Copyright (c) 2004-2006 Regents of the University of California.',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_devextras_h(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2/devextras.h')
expected = [
u'Copyright (c) 2002 Aleph One Ltd. for Toby Churchill Ltd and Brightstar Engineering',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_patch_ker_sh(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2/patch-ker.sh')
expected = [
u'Copyright (c) 2002 Aleph One Ltd.',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_yaffs_qsort_h(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2/yaffs_qsort.h')
expected = [
u'Copyright (c) 2000-2002 Silicon Graphics, Inc.',
]
check_detection(expected, test_file)
def test_ics_yaffs2_yaffs2_direct_makefile(self):
test_file = self.get_test_loc('ics/yaffs2-yaffs2-direct/Makefile')
expected = [
u'Copyright (c) 2003 Aleph One Ltd.',
]
check_detection(expected, test_file)
def test_ics_zlib_adler32_c(self):
test_file = self.get_test_loc('ics/zlib/adler32.c')
expected = [
u'Copyright (c) 1995-2007 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_crc32_c(self):
test_file = self.get_test_loc('ics/zlib/crc32.c')
expected = [
u'Copyright (c) 1995-2006, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_deflate_c(self):
test_file = self.get_test_loc('ics/zlib/deflate.c')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler',
u'Copyright 1995-2010 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_deflate_h(self):
test_file = self.get_test_loc('ics/zlib/deflate.h')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_zlib_example_c(self):
test_file = self.get_test_loc('ics/zlib/example.c')
expected = [
u'Copyright (c) 1995-2006 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_gzclose_c(self):
test_file = self.get_test_loc('ics/zlib/gzclose.c')
expected = [
u'Copyright (c) 2004, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_gzguts_h(self):
test_file = self.get_test_loc('ics/zlib/gzguts.h')
expected = [
u'Copyright (c) 2004, 2005, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_infback_c(self):
test_file = self.get_test_loc('ics/zlib/infback.c')
expected = [
u'Copyright (c) 1995-2009 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inffast_c(self):
test_file = self.get_test_loc('ics/zlib/inffast.c')
expected = [
u'Copyright (c) 1995-2008, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inffast_h(self):
test_file = self.get_test_loc('ics/zlib/inffast.h')
expected = [
u'Copyright (c) 1995-2003, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inflate_c(self):
test_file = self.get_test_loc('ics/zlib/inflate.c')
expected = [
u'Copyright (c) 1995-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inftrees_c(self):
test_file = self.get_test_loc('ics/zlib/inftrees.c')
expected = [
u'Copyright (c) 1995-2010 Mark Adler',
u'Copyright 1995-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_inftrees_h(self):
test_file = self.get_test_loc('ics/zlib/inftrees.h')
expected = [
u'Copyright (c) 1995-2005, 2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_makefile_in(self):
test_file = self.get_test_loc('ics/zlib/Makefile.in')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_minigzip_c(self):
test_file = self.get_test_loc('ics/zlib/minigzip.c')
expected = [
u'Copyright (c) 1995-2006, 2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_notice(self):
test_file = self.get_test_loc('ics/zlib/NOTICE')
expected = [
u'(c) 1995-2004 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_readme(self):
test_file = self.get_test_loc('ics/zlib/README')
expected = [
u'(c) 1995-2010 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_trees_c(self):
test_file = self.get_test_loc('ics/zlib/trees.c')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly',
]
check_detection(expected, test_file)
def test_ics_zlib_uncompr_c(self):
test_file = self.get_test_loc('ics/zlib/uncompr.c')
expected = [
u'Copyright (c) 1995-2003, 2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_zconf_h(self):
test_file = self.get_test_loc('ics/zlib/zconf.h')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_zlib_h(self):
test_file = self.get_test_loc('ics/zlib/zlib.h')
expected = [
u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_zutil_c(self):
test_file = self.get_test_loc('ics/zlib/zutil.c')
expected = [
u'Copyright (c) 1995-2005, 2010 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_amiga_makefile_pup(self):
test_file = self.get_test_loc('ics/zlib-amiga/Makefile.pup')
expected = [
u'Copyright (c) 1998 by Andreas R. Kleinert',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_ada_buffer_demo_adb(self):
test_file = self.get_test_loc('ics/zlib-contrib-ada/buffer_demo.adb')
expected = [
u'Copyright (c) 2002-2004 Dmitriy Anisimkov',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_ada_mtest_adb(self):
test_file = self.get_test_loc('ics/zlib-contrib-ada/mtest.adb')
expected = [
u'Copyright (c) 2002-2003 Dmitriy Anisimkov',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_ada_zlib_ads(self):
test_file = self.get_test_loc('ics/zlib-contrib-ada/zlib.ads')
expected = [
u'Copyright (c) 2002-2004 Dmitriy Anisimkov',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_blast_blast_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-blast/blast.c')
expected = [
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_blast_blast_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-blast/blast.h')
expected = [
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_delphi_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-delphi/readme.txt')
expected = [
u'Copyright (c) 1997,99 Borland Corp.',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_dotzlib_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-dotzlib/readme.txt')
expected = [
u'Copyright (c) Henrik Ravn 2004',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_dotzlib_dotzlib_checksumimpl_cs(self):
test_file = self.get_test_loc('ics/zlib-contrib-dotzlib-DotZLib/ChecksumImpl.cs')
expected = [
u'(c) Copyright Henrik Ravn 2004',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_dotzlib_dotzlib_assemblyinfo_cs(self):
test_file = self.get_test_loc('ics/zlib-contrib-dotzlib-DotZLib/AssemblyInfo.cs')
expected = [
u'(c) 2004 by Henrik Ravn',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_infback9_infback9_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-infback9/infback9.c')
expected = [
u'Copyright (c) 1995-2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_infback9_infback9_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-infback9/infback9.h')
expected = [
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_inflate86_inffas86_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-inflate86/inffas86.c')
expected = [
u'Copyright (c) 1995-2003 Mark Adler',
u'Copyright (c) 2003 Chris Anderson <christop@charm.net>',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_masmx86_gvmat32c_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-masmx86/gvmat32c.c')
expected = [
u'Copyright (c) 1995-1996 Jean-loup Gailly and Gilles Vollant.',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_crypt_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/crypt.h')
expected = [
u'Copyright (c) 1998-2005 Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_ioapi_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/ioapi.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_miniunz_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/miniunz.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2007-2008 Even Rouault',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_minizip64_info_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/MiniZip64_info.txt')
expected = [
u'Copyright (c) 1998-2010 - by Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_unzip_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/unzip.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2007-2008 Even Rouault',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
u'Copyright (c) 1990-2000 Info-ZIP.',
u'Copyright (c) 2007-2008 Even Rouault',
u'Copyright (c) 1998 - 2010 Gilles Vollant, Even Rouault, Mathias Svensson',
u'Copyright 1998-2004 Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_minizip_zip_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-minizip/zip.c')
expected = [
u'Copyright (c) 1998-2010 Gilles Vollant',
u'Copyright (c) 2009-2010 Mathias Svensson http://result42.com',
u'Copyright 1998-2004 Gilles Vollant',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_pascal_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-contrib-pascal/readme.txt')
expected = [
u'Copyright (c) 1995-2003 Jean-loup Gailly and Mark Adler.',
u'Copyright (c) 1998 by Bob Dellaca.',
u'Copyright (c) 2003 by Cosmin Truta.',
u'Copyright (c) 1995-2003 by Jean-loup Gailly.',
u'Copyright (c) 1998,1999,2000 by Jacques Nomssi Nzali.',
u'Copyright (c) 2003 by Cosmin Truta.',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_puff_puff_c(self):
test_file = self.get_test_loc('ics/zlib-contrib-puff/puff.c')
expected = [
u'Copyright (c) 2002-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_puff_puff_h(self):
test_file = self.get_test_loc('ics/zlib-contrib-puff/puff.h')
expected = [
u'Copyright (c) 2002-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_vstudio_vc10_zlib_rc(self):
test_file = self.get_test_loc('ics/zlib-contrib-vstudio-vc10/zlib.rc')
expected = [
u'(c) 1995-2010 Jean-loup Gailly & Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_contrib_vstudio_vc7_zlib_rc(self):
test_file = self.get_test_loc('ics/zlib-contrib-vstudio-vc7/zlib.rc')
expected = [
u'(c) 1995-2003 Jean-loup Gailly & Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_doc_rfc1950_txt(self):
test_file = self.get_test_loc('ics/zlib-doc/rfc1950.txt')
expected = [
u'Copyright (c) 1996 L. Peter Deutsch and Jean-Loup Gailly',
]
check_detection(expected, test_file)
def test_ics_zlib_doc_rfc1951_txt(self):
test_file = self.get_test_loc('ics/zlib-doc/rfc1951.txt')
expected = [
u'Copyright (c) 1996 L. Peter Deutsch',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_enough_c(self):
test_file = self.get_test_loc('ics/zlib-examples/enough.c')
expected = [
u'Copyright (c) 2007, 2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gun_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gun.c')
expected = [
u'Copyright (c) 2003, 2005, 2008, 2010 Mark Adler',
u'Copyright (c) 2003-2010 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzappend_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gzappend.c')
expected = [
u'Copyright (c) 2003 Mark Adler',
u'Copyright (c) 2003 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzjoin_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gzjoin.c')
expected = [
u'Copyright (c) 2004 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzlog_c(self):
test_file = self.get_test_loc('ics/zlib-examples/gzlog.c')
expected = [
u'Copyright (c) 2004, 2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_gzlog_h(self):
test_file = self.get_test_loc('ics/zlib-examples/gzlog.h')
expected = [
u'Copyright (c) 2004, 2008 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_zlib_how_html(self):
test_file = self.get_test_loc('ics/zlib-examples/zlib_how.html')
expected = [
u'Copyright (c) 2004, 2005 Mark Adler.',
u'Copyright (c) 2004, 2005 by Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_examples_zran_c(self):
test_file = self.get_test_loc('ics/zlib-examples/zran.c')
expected = [
u'Copyright (c) 2005 Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_msdos_makefile_dj2(self):
test_file = self.get_test_loc('ics/zlib-msdos/Makefile.dj2')
expected = [
u'Copyright (c) 1995-1998 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_old_zlib_html(self):
test_file = self.get_test_loc('ics/zlib-old/zlib.html')
expected = [
u'Copyright (c) 1995-2002 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_old_visualc6_readme_txt(self):
test_file = self.get_test_loc('ics/zlib-old-visualc6/README.txt')
expected = [
u'Copyright (c) 2000-2004 Simon-Pierre Cadieux.',
u'Copyright (c) 2004 Cosmin Truta.',
]
check_detection(expected, test_file)
def test_ics_zlib_win32_makefile_gcc(self):
test_file = self.get_test_loc('ics/zlib-win32/Makefile.gcc')
expected = [
u'Copyright (c) 1995-2003 Jean-loup Gailly.',
]
check_detection(expected, test_file)
def test_ics_zlib_win32_makefile_msc(self):
test_file = self.get_test_loc('ics/zlib-win32/Makefile.msc')
expected = [
u'copyright (c) 1995-2006 Jean-loup Gailly and Mark Adler',
]
check_detection(expected, test_file)
def test_ics_zlib_win32_zlib1_rc(self):
test_file = self.get_test_loc('ics/zlib-win32/zlib1.rc')
expected = [
u'(c) 1995-2006 Jean-loup Gailly & Mark Adler',
]
check_detection(expected, test_file)
|
yashdsaraf/scancode-toolkit
|
tests/cluecode/test_copyrights_ics.py
|
Python
|
apache-2.0
| 812,853
|
[
"BLAST",
"Brian",
"VisIt"
] |
34d3dc6dadde31b30d000791faa91e7847a687599d524299ba157e9bf45e3cb0
|
# Copyright 2015-2019 Cedric RICARD
#
# This file is part of CloudMailing.
#
# CloudMailing is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# CloudMailing is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with CloudMailing. If not, see <http://www.gnu.org/licenses/>.
import fnmatch
import io
import glob
import os
import random
import subprocess
import tempfile
from fabric.api import env, cd, run, put, settings, prefix, task, local, get
from fabric.contrib.project import rsync_project
from fabric.contrib import files
import sys
env.shell = "/bin/sh -c"
try:
import local_settings
env.roledefs = local_settings.roledefs
default_cm_config = local_settings.default_cm_config
except ImportError:
print("Can't find local_settings.py ; No roles definitions", file=sys.stderr)
default_cm_config = {}
FAB_PATH = os.path.dirname(os.path.abspath(__file__))
WORKSPACE = os.path.abspath(os.path.join(FAB_PATH, "..", ".."))
DEFAULT_TARGET_PATH = '/home/cm'
def get_host_conf():
return local_settings.targets.get(env.host_string, {})
@task
def display_host_conf():
print(get_host_conf())
def TARGET_PATH():
host_conf = get_host_conf()
return host_conf.get('path', DEFAULT_TARGET_PATH)
@task
def get_system_name():
return run("uname")
def get_cm_user_and_group() -> str:
host_conf = get_host_conf()
return host_conf.get('user', 'cm:cm')
def get_cm_username() -> str:
username, group = get_cm_user_and_group().split(':')
return username
def update_files_rights(path):
user_and_group = get_cm_user_and_group()
if not user_and_group:
return
run("chown -R %s %s" % (user_and_group, path))
@task
def cm_stop():
group_name = get_host_conf().get('supervisor_group', 'cm')
run("supervisorctl stop %s:*" % group_name)
@task
def cm_start():
group_name = get_host_conf().get('supervisor_group', 'cm')
run("supervisorctl start %s:*" % group_name)
@task
def stop_master():
run("supervisorctl stop cm:cm_master")
@task
def start_master():
run("supervisorctl start cm:cm_master")
@task
def stop_satellite():
run("supervisorctl stop cm:cm_satellite")
@task
def start_satellite():
run("supervisorctl start cm:cm_satellite")
@task
def test():
host_conf = get_host_conf()
print(host_conf)
with cd(TARGET_PATH()):
print('exist:', os.path.exists('/Users'))
@task
def clean_compiled_files():
"""cleanup *.pyc / *.pyo files"""
run("mkdir -p %s" % (TARGET_PATH() + '/deployment'))
put(os.path.join(WORKSPACE, 'deployment', "cm_compile.py"), TARGET_PATH() + '/deployment')
with cd(TARGET_PATH()):
run("python3 deployment/cm_compile.py -c")
def compile_python_files():
"""create *.pyc / *.pyo files"""
# put(os.path.join(WORKSPACE, 'deployment', "cm_compile.py"), TARGET_PATH() +'/deployment')
with cd(TARGET_PATH()):
run("python3 -O deployment/cm_compile.py")
def get_lastmodified(path, match=('*.*',), excludes=()):
lastmodified = 0
# print "Walk %s" % path
for root, dirs, files in os.walk(path):
for filename in files:
for pattern in match:
if fnmatch.fnmatch(filename, pattern):
t = os.path.getmtime(os.path.join(root, filename))
if t > lastmodified:
lastmodified = t
break
for name in excludes:
if name in dirs:
dirs.remove(name)
return lastmodified
@task
def compile_static_files():
source_path = os.path.join(WORKSPACE, 'web')
static_path = os.path.join(WORKSPACE, 'static')
source_time = get_lastmodified(source_path, excludes=('node_modules', 'report'))
static_time = get_lastmodified(static_path, match=('*.js', '*.html', '*.css'))
if source_time > static_time:
print("Compiling static files...")
subprocess.check_output(['npm', 'run', 'gulp', 'build'], cwd=source_path)
@task
def sync_sources(test_only=False):
rsync_project(
TARGET_PATH(),
local_dir=WORKSPACE + "/",
delete=True,
# default_opts='-rvz', # '-pthrvz'
extra_opts='-ci --prune-empty-dirs --filter=". %s"' % os.path.join(FAB_PATH, "rsync_filter") + (test_only and " --dry-run" or ""),
#extra_opts="-ci --dry-run",
)
def get_version(repo_path):
label = subprocess.check_output(["git", "describe"], cwd=repo_path).strip().decode()
stats = subprocess.check_output(['git', 'diff', '--shortstat'], cwd=repo_path)
dirty = len(stats) > 0 and stats[-1]
return label + (dirty and "-dirty" or "")
@task
def write_cm_version():
version = get_version(WORKSPACE)
print("CloudMailing version %s" % version)
write_version(version, target_path=os.path.join(WORKSPACE, 'cloud_mailing'))
def write_version(version, target_path):
with open(os.path.join(target_path, 'version.properties'), 'wt') as f:
f.write('VERSION=%s\n' % version)
@task
def inject_copyright():
"""Put or update copyright header in all python source files"""
os.system('python ' + os.path.join(WORKSPACE, 'deployment', 'license', 'update_copyright.py'))
@task
def deploy_sources(compile=True):
run("mkdir -p %s" % TARGET_PATH())
if compile:
clean_compiled_files()
compile_static_files()
write_cm_version()
sync_sources()
if compile:
compile_python_files()
update_files_rights(TARGET_PATH())
@task
def update_venv():
# put(os.path.join(WORKSPACE, "requirements.txt"), TARGET_PATH())
with cd(TARGET_PATH()):
if files.exists(".env_cm"):
run("rm -r .env_cm")
if files.exists(".env_mf"):
run("rm -r .env_mf")
with settings(warn_only=True):
if 'Python 3.7' not in run(".env/bin/python -V"):
run("rm -r .env")
if run("test -d .env").failed:
run("python3.7 -m venv .env")
with prefix('. .env/bin/activate'):
run('pip install pip --upgrade')
run('pip install incremental --upgrade')
run('pip install -r requirements.txt --upgrade')
update_files_rights(TARGET_PATH())
@task
def install_packages():
remote_system = get_system_name()
if remote_system == "Linux":
# run("apt-get install -y software-properties-common")
# run("add-apt-repository -y ppa:fkrull/deadsnakes")
run("apt-get update")
run("apt-get install -y mongodb supervisor build-essential rsync python3-dev ")
# @task
# def init_db():
# run("createuser -U pgsql cm")
# run("createdb -O cm -U pgsql cm")
#
@task
def create_user():
"""
create the 'cm' user and group on new system.
@return:
"""
username, group = get_cm_user_and_group().split(':')
if 'uid=' in run("id %s" % username):
print(("User '%s' already exists" % username))
return
remote_system = get_system_name()
if remote_system == "Linux":
run("adduser --home %(TARGET_PATH)s --shell /bin/tcsh --disabled-password --disabled-login --gecos '' %(username)s" % {
'TARGET_PATH': TARGET_PATH(), 'username': username})
elif remote_system == "FreeBSD":
run("pw useradd %(username)s -d %(TARGET_PATH)s -m -s /bin/tcsh -w no" % {'TARGET_PATH': TARGET_PATH(), 'username': username})
else:
print("create_user: Unsupported remote system '%s'" % remote_system)
@task
def create_initial_config():
"""
create a new config file for CloudMailing.
@return:
"""
config_filename = os.path.join(TARGET_PATH(), 'config', 'cloud-mailing.ini')
from configparser import RawConfigParser
config = RawConfigParser()
host_conf = local_settings.targets.get(env.host_string, {})
serial = host_conf.get('serial')
if serial:
config.add_section("ID")
config.set('ID', 'SERIAL', serial)
test_target = default_cm_config.get('test_target')
if test_target:
config.add_section("MAILING")
config.set('MAILING', 'test_target_ip', test_target['ip'])
config.set('MAILING', 'test_target_port', test_target['port'])
remote_master_conf = host_conf.get('remote_master')
if remote_master_conf:
# satellite only
if not config.has_section('MAILING'):
config.add_section("MAILING")
config.set('MAILING', 'master_ip', remote_master_conf['master_ip'])
config.set('MAILING', 'master_port', remote_master_conf.get('master_port', 33620))
config.set('MAILING', 'shared_key', host_conf['shared_key'])
else:
# master + (eventually) satellite
config.add_section("CM_MASTER")
config.set('CM_MASTER', 'API_KEY', "".join([random.choice("abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)") for i in range(50)]))
other_config = host_conf.get('config')
if other_config:
for section, content in list(other_config.items()):
if not config.has_section(section):
config.add_section(section)
for key, value in list(content.items()):
config.set(section, key, value)
with tempfile.NamedTemporaryFile('w+t') as tmp:
config.write(tmp)
tmp.flush()
run("mkdir -p %s/config" % TARGET_PATH())
put(tmp.name, config_filename)
update_files_rights(config_filename)
@task
def create_supervisord_config():
"""
create the supervisord config files for CloudMailing jobs
@return:
"""
host_conf = get_host_conf()
satellite_only = host_conf.get('remote_master') is not None
username, group = get_cm_user_and_group().split(':')
group_name = host_conf.get("supervisor_group", "cm")
conf_filename = host_conf.get("supervisor_filename", "cloud_mailing.conf")
if satellite_only:
config = """[group:%(group_name)s]
programs=%(group_name)s_satellite
""" % {'group_name': group_name}
else:
config = """[group:%(group_name)s]
programs=%(group_name)s_master,%(group_name)s_satellite,%(group_name)s_smtpd
[program:%(group_name)s_master]
command=%(TARGET_PATH)s/.env/bin/python -O bin/cm_master.py
directory=%(TARGET_PATH)s
numprocs=1
stdout_logfile=/var/log/supervisor.%(group_name)s_master.log
autostart=true
autorestart=true
user=%(user)s
priority=10
[program:%(group_name)s_smtpd]
command=%(TARGET_PATH)s/.env/bin/python -O bin/cm_smtpd.py -u %(user)s -g %(group)s
directory=%(TARGET_PATH)s
numprocs=1
stdout_logfile=/var/log/cm_smtpd.supervisor.log
autostart=true
autorestart=true
;user=%(user)s
priority=30
""" % {'TARGET_PATH': TARGET_PATH(), 'group_name': group_name, 'user': username, 'group': group}
config += """
[program:%(group_name)s_satellite]
command=%(TARGET_PATH)s/.env/bin/python -O bin/cm_satellite.py
directory=%(TARGET_PATH)s
numprocs=1
stdout_logfile=/var/log/supervisor.%(group_name)s_satellite.log
autostart=true
autorestart=true
user=%(user)s
priority=20
""" % {'TARGET_PATH': TARGET_PATH(), 'group_name': group_name, 'user': username, 'group': group}
with tempfile.NamedTemporaryFile('w+t') as tmp:
tmp.write(config)
tmp.flush()
remote_system = get_system_name()
if remote_system == "Linux":
put(tmp.name, "/etc/supervisor/conf.d/" + conf_filename)
elif remote_system == "FreeBSD":
put(tmp.name, "/usr/local/etc/supervisord.d/" + conf_filename)
run("supervisorctl reread")
run("supervisorctl update")
@task()
def first_setup():
host_conf = get_host_conf()
print(host_conf)
# satellite_only = host_conf.get('remote_master') is not None
# init_db()
# install_packages()
create_user()
create_initial_config()
create_supervisord_config()
deploy_sources()
update_venv()
cm_start()
@task()
def diff():
sync_sources(test_only=True)
@task(default=True)
def deploy():
cm_stop()
deploy_sources()
update_venv()
cm_start()
@task()
def quick_deploy():
""" Only use for minor changes and quick deployment """
deploy_sources(compile=False)
cm_stop()
cm_start()
@task
def make_docker():
compile_static_files()
write_cm_version()
subprocess.check_output(['python', '-O', 'deployment/cm_compile.py'], cwd=os.path.join(WORKSPACE, 'cloud_mailing'))
# subprocess.check_output("docker", cwd=WORKSPACE)
@task
def test_env():
print("running", env.host_string, env.host, local_settings.targets.get(env.host_string, {}))
|
ricard33/cloud-mailing
|
deployment/fab/fabfile.py
|
Python
|
agpl-3.0
| 12,937
|
[
"GULP"
] |
ad87f34a91abed148d79c5f8836a52ad0d280f19ca6396c173bea6d2c0b58800
|
#!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
# create planes
# Create the RenderWindow, Renderer
#
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer( ren )
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create pipeline
#
pl3d = vtk.vtkPLOT3DReader()
pl3d.SetXYZFileName( vtkGetDataRoot() + '/Data/combxyz.bin' )
pl3d.SetQFileName( vtkGetDataRoot() + '/Data/combq.bin' )
pl3d.SetScalarFunctionNumber( 100 )
pl3d.SetVectorFunctionNumber( 202 )
pl3d.Update()
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputConnection(pl3d.GetOutputPort())
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
seeds = vtk.vtkLineSource()
seeds.SetPoint1(15, -5, 32)
seeds.SetPoint2(15, 5, 32)
seeds.SetResolution(10)
integ = vtk.vtkRungeKutta4()
sl = vtk.vtkStreamLine()
sl.SetIntegrator(integ)
sl.SetInputConnection(pl3d.GetOutputPort())
sl.SetSource(seeds.GetOutput())
sl.SetMaximumPropagationTime(0.1)
sl.SetIntegrationStepLength(0.1)
sl.SetIntegrationDirectionToBackward()
sl.SetStepLength(0.001)
scalarSurface = vtk.vtkRuledSurfaceFilter ()
scalarSurface.SetInputConnection(sl.GetOutputPort())
scalarSurface.SetOffset(0)
scalarSurface.SetOnRatio(2)
scalarSurface.PassLinesOn()
scalarSurface.SetRuledModeToResample()
scalarSurface.SetResolution(100,1)
scalarSurface.SetDistanceFactor(30)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(scalarSurface.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
mmapper = vtk.vtkPolyDataMapper()
mmapper.SetInputConnection(seeds.GetOutputPort())
mactor = vtk.vtkActor()
mactor.SetMapper(mmapper)
ren.AddActor(mactor)
ren.AddActor(actor)
ren.AddActor(outlineActor)
cam=ren.GetActiveCamera()
cam.SetClippingRange( 3.95297, 50 )
cam.SetFocalPoint( 8.88908, 0.595038, 29.3342 )
cam.SetPosition( -12.3332, 31.7479, 41.2387 )
cam.SetViewUp( 0.060772, -0.319905, 0.945498 )
renWin.Render()
|
naucoin/VTKSlicerWidgets
|
Graphics/Testing/Python/streamSurface2.py
|
Python
|
bsd-3-clause
| 2,074
|
[
"VTK"
] |
4adde1d41fcc6a285ac1bc690071d2b2346e5459a2dbecc498ef969b5efaeac0
|
#!/usr/bin/env python
import os
import sys
import time
import platform
import argparse
from datetime import datetime
from space_checker_utils import wget_wrapper
import ConfigParser
def create_directory(directory):
"""Create parent directories as necessary.
:param directory: (~str) Path of directory to be made.
:return: True - if directory is created, and False - if not.
"""
try:
os.makedirs(directory)
return True
except OSError:
# Checks if the folder is empty
if not os.listdir(directory):
return True
return False
def run_command(command):
"""Execute the provided shell command.
:param command: (~str) Linux shell command.
:return: True - if command executed, and False if not.
"""
Colorizer.normal("[*] Running following command")
Colorizer.info("%s" % command)
# If command is `wget`, then before execution, `wget_wrapper` checks whether there is enough disk space available
if not wget_wrapper(command):
return False
return os.system(command)
def owtf_last_commit():
"""Prints the local git repo's last commit hash."""
if os.path.exists(os.path.join(root_dir, '.git')):
command = 'git log -n 1 --pretty=format:"%H"'
commit_hash = os.popen(command).read()
return commit_hash
else:
return "*Not a git repository.*"
def check_sudo():
"""Checks if the user has sudo access."""
sudo = os.system("sudo -v")
if not sudo:
return
else:
Colorizer.warning("[!] Your user does not have sudo privileges. Some OWTF components require sudo permissions to install")
sys.exit()
def install_in_directory(directory, command):
"""Execute a certain command while staying inside one directory.
:param directory: (~str) Path of directory in which installation command has to be executed.
:param command: (~str) Linux shell command (most likely `wget` here)
:return: True - if installation successful or directory already exists, and False if not.
"""
if create_directory(directory):
Colorizer.info("[*] Switching to %s" % directory)
os.chdir(directory)
return run_command(command)
else:
Colorizer.warning("[!] Directory %s already exists, so skipping installation for this" % directory)
return True
def install_using_pip(requirements_file):
"""Install pip libraries as mentioned in a requirements file.
:param requirements_file: (~str) Path to requirements file - in which libraries are listed.
:return: True - if installation successful, and False if not.
"""
# Instead of using file directly with pip which can crash because of single library
return run_command("sudo -E pip2 install --upgrade -r %s" % requirements_file)
def install_restricted_from_cfg(config_file):
"""Install restricted tools and dependencies which are distro independent.
:param config_file: (~str) Path to configuration file having information about restricted content.
"""
cp = ConfigParser.ConfigParser({"RootDir": root_dir, "Pid": pid})
cp.read(config_file)
for section in cp.sections():
Colorizer.info("[*] Installing %s" % section)
install_in_directory(os.path.expanduser(cp.get(section, "directory")), cp.get(section, "command"))
def is_compatible():
compatible_value = os.system("which apt-get >> /dev/null 2>&1")
if compatible_value>>8 == 1:
return False
else:
return True
def finish(error_code):
if error_code == 1:
Colorizer.danger("\n[!] The installation was not successful.")
Colorizer.normal("[*] Visit https://github.com/owtf/owtf for help ")
else:
Colorizer.success("[*] Finished!")
Colorizer.info("[*] Start OWTF by running './owtf.py' in parent directory")
def install(cmd_arguments):
"""Perform installation of OWTF Framework. Wraps around all helper methods made in this module.
:param cmd_arguments:
"""
args = parser.parse_args(cmd_arguments)
# User asked to select distro (in case it can't be automatically detected) and distro related stuff is installed
cp = ConfigParser.ConfigParser({"RootDir": root_dir, "Pid": pid})
cp.read(distros_cfg)
# Try get the distro automatically
distro, version, arch = platform.linux_distribution()
distro_num = 0
if "kali" in distro.lower():
distro_num = 1
elif "samurai" in distro.lower():
distro_num = 2
elif is_compatible():
distro_num = 3
# Loop until proper input is received
while True:
if distro_num != 0:
Colorizer.info("[*] %s has been automatically detected... " % distro)
Colorizer.normal("[*] Continuing in auto-mode")
break
if args.no_user_input:
distro_num = 0
break
print("")
for i, item in enumerate(cp.sections()):
Colorizer.warning("(%d) %s" % (i + 1, item))
Colorizer.warning("(0) My distro is not listed :( %s" % distro)
distro_num = raw_input("Select a number based on your distribution : ")
try:
# Checking if valid input is received
distro_num = int(distro_num)
break
except ValueError:
print('')
Colorizer.warning("[!] Invalid Number specified")
continue
# First all distro independent stuff is installed
install_restricted_from_cfg(restricted_cfg)
if distro_num != 0:
run_command(cp.get(cp.sections()[int(distro_num)-1], "install"))
else:
Colorizer.normal("[*] Skipping distro related installation :(")
# Return if option to install only owtf dependencies is given, as there are optional tools further
if args.core_only:
return
Colorizer.normal("[*] Upgrading pip to the latest version ...")
# Upgrade pip before install required libraries
run_command("sudo pip2 install --upgrade pip")
Colorizer.normal("Upgrading setuptools to the latest version ...")
# Upgrade setuptools
run_command("sudo pip2 install --upgrade setuptools")
Colorizer.normal("Upgrading cffi to the latest version ...")
# Mitigate cffi errors by upgrading it first
run_command("sudo pip2 install --upgrade cffi")
if distro_num == '1':
# check kali major release number 0.x, 1.x, 2.x
kali_version = os.popen("cat /etc/issue", "r").read().split(" ")[2][0]
if kali_version == '1':
if args.no_user_input:
fixsetuptools = 'n'
else:
fixsetuptools = raw_input("Delete /usr/lib/python2.7/dist-packages/setuptools.egg-info? (y/n)\n(recommended, solves some issues in Kali 1.xx)")
if fixsetuptools == 'y':
Colorizer.normal("[*] Backing up the original symlink...")
ts = time.time()
human_timestamp = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M:%S')
symlink_orig_path = "/usr/lib/python2.7/dist-packages/setuptools.egg-info"
run_command("mv %s %s-BACKUP-%s" % (symlink_orig_path, symlink_orig_path, human_timestamp))
Colorizer.info("[*] The original symlink exists at %s-BACKUP-%s" % (symlink_orig_path, human_timestamp))
install_using_pip(owtf_pip)
else:
Colorizer.warning("[!] Moving on with the installation but you were warned: there may be some errors!")
install_using_pip(owtf_pip)
run_command("sudo sh %s init" % (os.path.join(scripts_path, "db_setup.sh")))
run_command("sudo sh %s" % (os.path.join(scripts_path, "db_run.sh")))
class Colorizer:
"""Helper class for colorized strings.
Different statements will have different colors:
- `normal`, denoting ongoing procedure (WHITE)
- `info`, any file path, commit hash or any other info (BLUE)
- `warning`, any potential hindrance in installation (YELLOW)
- `danger`, abrupt failure, desired file/dir not found etc. (RED)
"""
BOLD = '\033[1m'
RED = BOLD + '\033[91m'
GREEN = BOLD + '\033[92m'
YELLOW = BOLD + '\033[93m'
BLUE = BOLD + '\033[34m'
PURPLE = BOLD + '\033[95m'
CYAN = BOLD + '\033[36m'
WHITE = BOLD + '\033[37m'
END = '\033[0m\033[0m'
def __init__(self):
pass
@classmethod
def normal(cls, string):
print(cls.WHITE + string + cls.END)
@classmethod
def info(cls, string):
print(cls.CYAN + string + cls.END)
@classmethod
def warning(cls, string):
print(cls.YELLOW + string + cls.END)
@classmethod
def success(cls, string):
print(cls.GREEN + string + cls.END)
@classmethod
def danger(cls, string):
print(cls.RED + string + cls.END)
if __name__ == "__main__":
root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
pid = os.getpid()
# Path to custom scripts for tasks such as setting up/ running PostgreSQL db, run arachni, nikto, wapiti etc.
scripts_path = os.path.join(root_dir, "scripts")
# OWTF python libraries
owtf_pip = os.path.join(root_dir, "install", "owtf.pip")
# Restricted tools and dictionaries which are distro independent
restricted_cfg = os.path.join(root_dir, "install", "distro-independent.cfg")
# Various distros and install scripts
distros_cfg = os.path.join(root_dir, "install", "linux-distributions.cfg")
parser = argparse.ArgumentParser()
parser.add_argument('--no-user-input', help='run script with default options for user input', action="store_true")
parser.add_argument('--core-only', help='install only owtf dependencies, skip optional tools', action="store_true")
Colorizer.normal("[*] Great that you are installing OWTF :D")
Colorizer.warning("[!] There will be lot of output, please be patient")
Colorizer.info("[*] Last commit hash: %s" % owtf_last_commit())
check_sudo()
installer_status_code = install(sys.argv[1:])
finish(installer_status_code)
|
DePierre/owtf
|
install/install.py
|
Python
|
bsd-3-clause
| 10,142
|
[
"VisIt"
] |
2d02eaaa92666fc776e64807508f8789c0236bb9bf6251e8d0a7b62cde62ef8f
|
# !usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under a 3-clause BSD license.
#
# @Author: Brian Cherinka
# @Date: 2017-08-04 15:36:16
# @Last modified by: Brian Cherinka
# @Last Modified time: 2017-08-04 16:02:22
from __future__ import print_function, division, absolute_import
from sciserver import authentication, config
userNames = ['matlab', 'recount']
userPasswords = ['matlab', 'recount']
userTokens = []
for i in range(len(userNames)):
authentication.login(userNames[i], userPasswords[i])
token = authentication.getKeystoneToken()
userTokens.append(token)
|
havok2063/SciScript-Python
|
python/sciserver/integrationtest.py
|
Python
|
apache-2.0
| 596
|
[
"Brian"
] |
6986b8123d176e8f62a7c91f137ba100310f53975fbc59c2538dc88eb8f35384
|
from setuptools import setup, find_packages
import re
VERSIONFILE = "pymaid/__init__.py"
verstrline = open(VERSIONFILE, "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,))
with open('requirements.txt') as f:
requirements = f.read().splitlines()
requirements = [l for l in requirements if not l.startswith('#')]
setup(
name='python-catmaid',
version=verstr,
packages=find_packages(),
license='GNU GPL V3',
description='Python interface to CATMAID servers',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
url='https://github.com/schlegelp/pymaid',
project_urls={
"Documentation": "http://pymaid.readthedocs.io",
"Source": "https://github.com/schlegelp/pymaid",
"Changelog": "https://pymaid.readthedocs.io/en/latest/source/whats_new.html",
},
author='Philipp Schlegel',
author_email='pms70@cam.ac.uk',
keywords='CATMAID interface neuron navis',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
install_requires=requirements,
extras_require={'extras': ['fuzzywuzzy[speedup]~=0.17.0',
'ujson~=1.35']},
python_requires='>=3.6',
zip_safe=False
)
|
schlegelp/pymaid
|
setup.py
|
Python
|
gpl-3.0
| 1,845
|
[
"NEURON"
] |
698ae89b5e6d52536b902fddb9bdf69d13947acc398b028b6afaa915772c9088
|
#
# Copyright 2020 Johannes Hoermann (U. Freiburg)
#
# matscipy - Materials science with Python at the atomic-scale
# https://github.com/libAtoms/matscipy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Electrochemistry module utility functions
Copyright 2019, 2020 IMTEK Simulation
University of Freiburg
Authors:
Johannes Hoermann <johannes.hoermann@imtek-uni-freiburg.de>
Lukas Elflein <elfleinl@cs.uni-freiburg.de>
"""
import matplotlib.pyplot as plt
def get_centers(bins):
"""Return the center of the provided bins.
Example:
>>> get_centers(bins=np.array([0.0, 1.0, 2.0]))
array([ 0.5, 1.5])
"""
bins = bins.astype(float)
return (bins[:-1] + bins[1:]) / 2
def plot_dist(histogram, name, reference_distribution=None):
"""Plot histogram with an optional reference distribution."""
hist, bins = histogram
width = 1 * (bins[1] - bins[0])
centers = get_centers(bins)
fi, ax = plt.subplots()
ax.bar( centers, hist, align='center', width=width, label='Empirical distribution',
edgecolor="none")
if reference_distribution is not None:
ref = reference_distribution(centers)
ref /= sum(ref)
ax.plot(centers, ref, color='red', label='Target distribution')
plt.title(name)
plt.legend()
plt.xlabel('Distance ' + name)
plt.savefig(name + '.png')
|
libAtoms/matscipy
|
matscipy/electrochemistry/utility.py
|
Python
|
lgpl-2.1
| 1,955
|
[
"Matscipy"
] |
4fb9e74cca79b302c327c56a65f550ade6850795e3228e9e40456851f6643073
|
#!/usr/bin/env python
import os, sys, numpy as np, cPickle as pickle, copy
import sim, pickleTraj, parse_potential as pp
# user input
NB = None
NW = None
AATraj = None
Prefix = None
FF_File = None
# System conditions
TempSet = 300
Name_W = 'W' ; Name_B = 'B'
Mass_W = 18.01 ; Mass_B = 78.11
# Pair potential settings
Dia_W = 2.8 ; Dia_B = 5.3
SPCutScale = 2.5
NSPKnots = 30
# Local density potential settings
LDCutWW = 3.5
LDCutBB = 7.5
LDCutBW = LDCutWB = 0.5 * (LDCutBB + LDCutWW)
RhoMin = 0
RhoMax = 20
LD_Delta = 1.0
NLDKnots = 30
# MD settings
MinSteps = 10000 # since test particle is placed at origin --> greater repulsion
EquilSteps = 1000000
ProdSteps = 2000000
StepFreq = 500
AutoSubmit = False
# Lammps settings
LammpsExec = 'lmp_mpich2'
sim.export.lammps.InnerCutoff = 0.02
# TI settings
lambda_factors = np.linspace(0.0, 1.0, 11)
# make TI Sys
def makeTISys(TPType = 'W', lambda_factor = 1.0):
print 'Making TI Sys object at lambda = %g' % lambda_factor
global NB, NW
global LDCutBB, LDCutBW, LDCutWB, LDCutWW
global AATraj, Prefix
# system chemistry
if Prefix is None: Prefix = 'NB%dNW%d' % (NB, NW)
AtomTypeW = sim.chem.AtomType(Name_W, Mass = Mass_W, Charge = 0.0, Color = (0,0,1))
AtomTypeB = sim.chem.AtomType(Name_B, Mass = Mass_B, Charge = 0.0, Color = (1,1,0))
# insert test particle
if TPType == 'B': AtomTypeX = sim.chem.AtomType('X', Mass = Mass_B, Charge = 0.0)
elif TPType == 'W': AtomTypeX = sim.chem.AtomType('X', Mass = Mass_W, Charge = 0.0)
MolTypeW = sim.chem.MolType(Name_W, [AtomTypeW])
MolTypeB = sim.chem.MolType(Name_B, [AtomTypeB])
MolTypeX = sim.chem.MolType('X', [AtomTypeX])
World = sim.chem.World([MolTypeB, MolTypeW, MolTypeX], Dim = 3, Units = sim.units.AtomicUnits)
Sys = sim.system.System(World, Name = Prefix)
# add 1 less particle of whichever is greater
if NB > NW:
thisNB = NB - 1
thisNW = NW
elif NB < NW:
thisNB = NB
thisNW = NW - 1
for i in range(thisNB): Sys += MolTypeB.New()
for i in range(thisNW): Sys += MolTypeW.New()
Sys += MolTypeX.New()
# pair potential cutoffs
SPCutWW = SPCutScale * Dia_W
SPCutBB = SPCutScale * Dia_B
SPCutBW = SPCutScale * 0.5 * (Dia_B + Dia_W)
# atom selection filters for system
FilterWW = sim.atomselect.PolyFilter([AtomTypeW, AtomTypeW])
FilterBB = sim.atomselect.PolyFilter([AtomTypeB, AtomTypeB])
FilterBW = sim.atomselect.PolyFilter([AtomTypeW, AtomTypeB])
FilterWW_ordered = sim.atomselect.PolyFilter([AtomTypeW, AtomTypeW], Ordered = True)
FilterBB_ordered = sim.atomselect.PolyFilter([AtomTypeB, AtomTypeB], Ordered = True)
FilterBW_ordered = sim.atomselect.PolyFilter([AtomTypeB, AtomTypeW], Ordered = True)
FilterWB_ordered = sim.atomselect.PolyFilter([AtomTypeW, AtomTypeB], Ordered = True)
# atom selection filters for test particle
# no X-X filters required since only 1 particle is inserted
FilterXW = sim.atomselect.PolyFilter([AtomTypeX, AtomTypeW])
FilterXB = sim.atomselect.PolyFilter([AtomTypeX, AtomTypeB])
FilterXW_ordered = sim.atomselect.PolyFilter([AtomTypeX, AtomTypeW], Ordered = True)
FilterWX_ordered = sim.atomselect.PolyFilter([AtomTypeW, AtomTypeX], Ordered = True)
FilterXB_ordered = sim.atomselect.PolyFilter([AtomTypeX, AtomTypeB], Ordered = True)
FilterBX_ordered = sim.atomselect.PolyFilter([AtomTypeB, AtomTypeX], Ordered = True)
# system forcefield
SP_BB = SP_WW = SP_BW = LD_BB = LD_WW = LD_BW = LD_WB = None
SP = sim.potential.PairSpline
LD = sim.potential.LocalDensity
if NB > 0:
SP_BB = SP(Sys, Cut = SPCutBB, NKnot = NSPKnots, Filter = FilterBB, Label = "SP_BB")
if NW > 0:
SP_WW = SP(Sys, Cut = SPCutWW, NKnot = NSPKnots, Filter = FilterWW, Label = "SP_WW")
if NB > 0 and NW > 0:
SP_BW = SP(Sys, Cut = SPCutBW, NKnot = NSPKnots, Filter = FilterBW, Label = "SP_BW")
if NW > 0:
LD_WW = LD(Sys, Cut = LDCutWW, LowerCut = LDCutWW - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_WW", Filter = FilterWW_ordered)
if NB > 0:
LD_BB = LD(Sys, Cut = LDCutBB, LowerCut = LDCutBB - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_BB", Filter = FilterBB_ordered)
if NB > 0 and NW > 0:
LD_BW = LD(Sys, Cut = LDCutBW, LowerCut = LDCutBW - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_BW", Filter = FilterBW_ordered)
LD_WB = LD(Sys, Cut = LDCutWB, LowerCut = LDCutWB - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_WB", Filter = FilterWB_ordered)
# test particle forcefield
if TPType == 'B':
SPCutXB = SPCutBB
SPCutXW = SPCutBW
LDCutXB = LDCutBX = LDCutBB
LDCutXW = LDCutWX = LDCutBW
elif TPType == 'W':
SPCutXB = SPCutBW
SPCutXW = SPCutWW
LDCutXB = LDCutBX = LDCutBW
LDCutXW = LDCutWX = LDCutWW
SP_XB = SP_XW = LD_XB = LD_BX = LD_XW = LD_WX = None
if NB > 0:
SP_XB = SP(Sys, Cut = SPCutXB, NKnot = NSPKnots, Filter = FilterXB, Label = "SP_XB")
LD_XB = LD(Sys, Cut = LDCutXB, LowerCut = LDCutXB - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_XB", Filter = FilterXB_ordered)
LD_BX = LD(Sys, Cut = LDCutBX, LowerCut = LDCutBX - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_BX", Filter = FilterBX_ordered)
if NW > 0:
SP_XW = SP(Sys, Cut = SPCutXW, NKnot = NSPKnots, Filter = FilterXW, Label = "SP_XW")
LD_XW = LD(Sys, Cut = LDCutXW, LowerCut = LDCutXW - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_XW", Filter = FilterXW_ordered)
LD_WX = LD(Sys, Cut = LDCutWX, LowerCut = LDCutWX - LD_Delta, NKnot = NLDKnots, RhoMin = RhoMin, RhoMax = RhoMax, Label = "LD_WX", Filter = FilterWX_ordered)
# load forcefield parameters for system
for P in [SP_BB, SP_WW, SP_BW, LD_BB, LD_WW, LD_BW, LD_WB]:
if not P is None:
Knots = pp.parseParamDict(FF_File, P.Name)['Knots']
P.SetParam(Knots = Knots)
# load forcefied parameters for test particle
if TPType == 'B':
pmap = {SP_XB: 'SP_BB', SP_XW: 'SP_BW',
LD_XB: 'LD_BB', LD_BX: 'LD_BB', LD_XW: 'LD_BW', LD_WX: 'LD_WB'}
elif TPType == 'W':
pmap = {SP_XB: 'SP_BW', SP_XW: 'SP_WW',
LD_XB: 'LD_WB', LD_BX: 'LD_BW', LD_XW: 'LD_WW', LD_WX: 'LD_WW'}
for P in [SP_XB, SP_XW, LD_XB, LD_BX, LD_XW, LD_WX]:
if (not P is None):
Knots = pp.parseParamDict(FF_File, pmap[P])['Knots']
P.SetParam(Knots = lambda_factor * Knots)
# add only relevant potentials to the forcefield
for P in [SP_BB, SP_WW, SP_BW, LD_BB, LD_WW, LD_BW, LD_WB, SP_XB, SP_XW, LD_XB, LD_BX, LD_XW, LD_WX]:
if not P is None: Sys.ForceField.append(P)
for P in Sys.ForceField: P.Arg.SetupHist(NBin = 10000, ReportNBin = 100)
# system setup
Sys.Load()
if not AATraj is None:
Trj = pickleTraj(AATraj)
Sys.BoxL = Trj.FrameData['BoxL']
Sys.Arrays.Pos = Trj[0]
# place test particle at origin
#Sys.Arrays.Pos[-1, :] = [0.0, 0.0, 0.0]
else:
Sys.BoxL = 0.0
sim.system.init.positions.CubicLatticeFill(Sys, L = 1000., Random = 0.1)
sim.system.init.velocities.Canonical(Sys, Temp = TempSet)
Int = Sys.Int
# integrator setup
Int.Method = Int.Methods.VVIntegrate
Int.Method.Thermostat = Int.Method.ThermostatLangevin
Int.Method.LangevinGamma = 0.01
Sys.TempSet = TempSet
return Sys
def write2Lammps(Sys, lambda_index = 0):
global Prefix
global MinSteps, EquilSteps, ProdSteps, StepFreq
global AutoSubmit
MDPrefix = '%s_lambda%d' % (Prefix, lambda_index)
#add compute for inserted particle energy
s_before = '''
group X id %(XID)d
compute pertpe all pe/atom
compute tpe X reduce sum c_pertpe
fix write2file all ave/time %(STEPFREQ)d 1 %(STEPFREQ)d c_tpe file %(DELTAUFILE)s
'''
d_before = {'XID': NB+NW, 'STEPFREQ': StepFreq, 'DELTAUFILE': MDPrefix + '_tpe.dat'}
s_after = '''
unfix write2file
uncompute tpe
uncompute pertpe
'''
# write lammps input script
LammpsFiles, TrajFile = sim.export.lammps_tsanyal.MakeLammpsMD(Sys, NStepsMin = MinSteps, NStepsEquil = EquilSteps,
NStepsProd = ProdSteps, WriteFreq = StepFreq,
Prefix = MDPrefix, TrajFile = ".lammpstrj.gz",
LammpsCommandsBefore = s_before % d_before,
LammpsCommandsAfter = s_after)
# write jobscript and submit
s_job = '''
#!/bin/bash
#
#$ -V
#$ -cwd
#$ -j y
#$ -S /bin/bash
#$ -N %(JOBNAME)s
date
%(LAMMPSEXEC)s -in %(INFILE)s -log %(LOGFILE)s
'''
d = {'JOBNAME': MDPrefix, 'LAMMPSEXEC': LammpsExec, 'INFILE': LammpsFiles[0], 'LOGFILE': MDPrefix + '.log'}
JobFile = MDPrefix + '.sh'
file(JobFile, 'w').write(s_job % d)
if AutoSubmit: os.system('qsub %s' % JobFile) # need to submit from head node
def compute_Mu():
global Prefix, lambda_factors
# initialize arrays
Nlambda = len(lambda_factors)
dudl = []
var_dudl = []
mu= 0.0
err = 0.0
# reject data recorded during equilbration
start = int(ProdSteps / StepFreq)
for i, lambda_factor in enumerate(lambda_factors):
if lambda_factor == 0.0:
dudl.append(0.0)
var_dudl.append(0.0)
else:
DeltaUFile = '%s_lambda%d_tpe.dat' % (Prefix, i)
u_lambda = np.loadtxt(DeltaUFile)[-start:, 1]
dudl.append(np.mean(u_lambda))
var_dudl.append(np.std(u_lambda, ddof = 1))
# calculate mu and err using trapezoidal rule
dudl = np.array(dudl)
var_dudl = np.array(var_dudl)
print dudl, var_dudl
for i in range(Nlambda):
if i == 0 or i == Nlambda - 1:
mu += 0.5 * dudl[i]
err += 0.25 * var_dudl[i]
else:
mu += dudl[i]
err += var_dudl[i]
err /= np.sqrt(err)
ret = (mu, err)
OutPickle = Prefix + '_mu.pickle'
pickle.dump(ret, open(OutPickle, 'w'))
print 'mu = %g kcal/mol' % mu
print 'err = %g kcal/mol' % mu
def main(Mode = 'test'):
global NB, NW, AATraj, TPType, FF_File, Prefix
global AutoSubmit
if Mode == 'test' or Mode == 'lammps':
AutoSubmit = (Mode == 'lammps')
for i, l in enumerate(lambda_factors):
Sys = makeTISys(TPType = TPType, lambda_factor = l)
write2Lammps(Sys, lambda_index = i)
if Mode == 'mu':
compute_Mu()
######## MAIN ########
if __name__ == '__main__':
NB = int(sys.argv[1])
NW = int(sys.argv[2])
TPType = sys.argv[3]
FFType = sys.argv[4]
Mode = sys.argv[5]
if len(sys.argv) > 6: refNB = int(sys.argv[6])
else: refNB = 250
AATraj = '/home/cask0/home/tsanyal/benwat/data/gromacs/NB%dNW%d/NB%dNW%d_prod_mapped.lammpstrj.gz' % (NB, NW, NB, NW)
FF_File = '/home/cask0/home/tsanyal/benwat/data/cgff/ff_ref/NB%d/NB%dNW%d_%s_ff.dat' % (refNB, refNB, 500-refNB, FFType)
Prefix = 'ti_%s' % FFType
main(Mode = Mode)
|
tanmoy7989/benwat
|
ti.py
|
Python
|
gpl-2.0
| 11,514
|
[
"Gromacs",
"LAMMPS"
] |
2c2c182afcc6ae2724125a853e21bad9d366bda14e7da99e757b77d7cda5281e
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from pathlib import Path
import pytest
import os.path as osp
from sisl import geom
from sisl import Geometry, Atom
from sisl.io import fdfSileSiesta, SileError
from sisl.messages import SislWarning
from sisl.unit.siesta import unit_convert
import numpy as np
pytestmark = [pytest.mark.io, pytest.mark.siesta, pytest.mark.fdf,
pytest.mark.filterwarnings("ignore", message="*number of supercells")
]
_dir = osp.join('sisl', 'io', 'siesta')
def test_fdf1(sisl_tmp, sisl_system):
f = sisl_tmp('gr.fdf', _dir)
sisl_system.g.write(fdfSileSiesta(f, 'w'))
fdf = fdfSileSiesta(f)
str(fdf)
with fdf:
fdf.readline()
# Be sure that we can read it in a loop
assert fdf.get('LatticeConstant') > 0.
assert fdf.get('LatticeConstant') > 0.
assert fdf.get('LatticeConstant') > 0.
fdf.read_supercell()
fdf.read_geometry()
def test_fdf2(sisl_tmp, sisl_system):
f = sisl_tmp('gr.fdf', _dir)
sisl_system.g.write(fdfSileSiesta(f, 'w'))
g = fdfSileSiesta(f).read_geometry()
# Assert they are the same
assert np.allclose(g.cell, sisl_system.g.cell)
assert np.allclose(g.xyz, sisl_system.g.xyz)
for ia in g:
assert g.atoms[ia].Z == sisl_system.g.atoms[ia].Z
assert g.atoms[ia].tag == sisl_system.g.atoms[ia].tag
def test_fdf_units(sisl_tmp, sisl_system):
f = sisl_tmp('gr.fdf', _dir)
fdf = fdfSileSiesta(f, 'w')
g = sisl_system.g
for unit in ['bohr', 'ang', 'fractional', 'frac']:
fdf.write_geometry(g, unit=unit)
g2 = fdfSileSiesta(f).read_geometry()
assert np.allclose(g.cell, g2.cell)
assert np.allclose(g.xyz, g2.xyz)
for ia in g:
assert g.atoms[ia].Z == g2.atoms[ia].Z
assert g.atoms[ia].tag == g2.atoms[ia].tag
def test_supercell(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
lines = [
'Latticeconstant 1. Ang',
'%block Latticevectors',
' 1. 1. 1.',
' 0. 0. 1.',
' 1. 0. 1.',
'%endblock',
]
with open(f, 'w') as fh:
fh.write('\n'.join(lines))
cell = np.array([[1.]*3, [0, 0, 1], [1, 0, 1]])
sc = fdfSileSiesta(f).read_supercell()
assert np.allclose(sc.cell, cell)
lines = [
'Latticeconstant 1. Bohr',
'%block Latticevectors',
' 1. 1. 1.',
' 0. 0. 1.',
' 1. 0. 1.',
'%endblock',
]
with open(f, 'w') as fh:
fh.write('\n'.join(lines))
sc = fdfSileSiesta(f).read_supercell()
assert np.allclose(sc.cell, cell * unit_convert('Bohr', 'Ang'))
cell = np.diag([2.] * 3)
lines = [
'Latticeconstant 2. Ang',
'%block Latticeparameters',
' 1. 1. 1. 90. 90. 90.',
'%endblock',
]
with open(f, 'w') as fh:
fh.write('\n'.join(lines))
sc = fdfSileSiesta(f).read_supercell()
assert np.allclose(sc.cell, cell)
def test_supercell_fail(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
lines = [
'%block Latticevectors',
' 1. 1. 1.',
' 0. 0. 1.',
' 1. 0. 1.',
'%endblock',
]
with open(f, 'w') as fh:
fh.write('\n'.join(lines))
with pytest.raises(SileError):
fdfSileSiesta(f).read_supercell()
def test_geometry(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
sc_lines = [
'Latticeconstant 1. Ang',
'%block latticeparameters',
' 1. 1. 1. 90. 90. 90.',
'%endblock',
]
lines = [
'NumberOfAtoms 2',
'%block chemicalSpeciesLabel',
' 1 6 C',
' 2 12 H',
'%endblock',
'AtomicCoordinatesFormat Ang',
'%block atomiccoordinatesandatomicspecies',
' 1. 1. 1. 1',
' 0. 0. 1. 1',
' 1. 0. 1. 2',
'%endblock',
]
with open(f, 'w') as fh:
fh.write('\n'.join(sc_lines) + '\n')
fh.write('\n'.join(lines))
fdf = fdfSileSiesta(f, base=sisl_tmp.getbase())
g = fdf.read_geometry()
assert g.na == 2
assert np.allclose(g.xyz, [[1.] * 3,
[0, 0, 1]])
assert g.atoms[0].Z == 6
assert g.atoms[1].Z == 6
# default read # of atoms from list
with open(f, 'w') as fh:
fh.write('\n'.join(sc_lines) + '\n')
fh.write('\n'.join(lines[1:]))
fdf = fdfSileSiesta(f, base=sisl_tmp.getbase())
g = fdf.read_geometry()
assert g.na == 3
assert np.allclose(g.xyz, [[1.] * 3,
[0, 0, 1],
[1, 0, 1]])
assert g.atoms[0].Z == 6
assert g.atoms[1].Z == 6
assert g.atoms[2].Z == 12
def test_re_read(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
with open(f, 'w') as fh:
fh.write('Flag1 date\n')
fh.write('Flag1 not-date\n')
fh.write('Flag1 not-date-2\n')
fh.write('Flag3 true\n')
fdf = fdfSileSiesta(f)
for i in range(10):
assert fdf.get('Flag1') == 'date'
assert fdf.get('Flag3')
def test_get_set(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
with open(f, 'w') as fh:
fh.write('Flag1 date\n')
fdf = fdfSileSiesta(f)
assert fdf.get('Flag1') == 'date'
fdf.set('Flag1', 'not-date')
assert fdf.get('Flag1') == 'not-date'
fdf.set('Flag1', 'date')
assert fdf.get('Flag1') == 'date'
fdf.set('Flag1', 'date-date')
assert fdf.get('Flag1') == 'date-date'
fdf.set('Flag1', 'date-date', keep=False)
def test_get_block(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
with open(f, 'w') as fh:
fh.write('%block MyBlock\n date\n%endblock\n')
fdf = fdfSileSiesta(f)
assert isinstance(fdf.get('MyBlock'), list)
assert fdf.get('MyBlock')[0] == 'date'
assert 'block' in fdf.print("MyBlock", fdf.get("MyBlock"))
def test_include(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
with open(f, 'w') as fh:
fh.write('Flag1 date\n')
fh.write('# Flag2 comment\n')
fh.write('Flag2 date2\n')
fh.write('# Flag3 is read through < from file hello\n')
fh.write('Flag3 Sub < hello\n')
fh.write('FakeInt 1\n')
fh.write('Test 1. eV\n')
fh.write(' %INCLUDE file2.fdf\n')
fh.write('TestRy 1. Ry\n')
fh.write('%block Hello < hello\n')
fh.write('TestLast 1. eV\n')
hello = sisl_tmp('hello', _dir)
with open(hello, 'w') as fh:
fh.write('Flag4 hello\n')
fh.write('# Comments should be discarded\n')
fh.write('Flag3 test\n')
fh.write('Sub sub-test\n')
file2 = sisl_tmp('file2.fdf', _dir)
with open(file2, 'w') as fh:
fh.write('Flag4 non\n')
fh.write('FakeReal 2.\n')
fh.write(' %incLude file3.fdf')
file3 = sisl_tmp('file3.fdf', _dir)
with open(file3, 'w') as fh:
fh.write('Sub level\n')
fh.write('Third level\n')
fh.write('MyList [1 , 2 , 3]\n')
fdf = fdfSileSiesta(f, base=sisl_tmp.getbase())
assert fdf.includes() == [Path(hello), Path(file2), Path(file3)]
assert fdf.get('Flag1') == 'date'
assert fdf.get('Flag2') == 'date2'
assert fdf.get('Flag3') == 'test'
assert fdf.get('Flag4') == 'non'
assert fdf.get('FLAG4') == 'non'
assert fdf.get('Fakeint') == 1
assert fdf.get('Fakeint', '0') == '1'
assert fdf.get('Fakereal') == 2.
assert fdf.get('Fakereal', 0.) == 2.
assert fdf.get('test', 'eV') == pytest.approx(1.)
assert fdf.get('test', with_unit=True)[0] == pytest.approx(1.)
assert fdf.get('test', with_unit=True)[1] == 'eV'
assert fdf.get('test', unit='Ry') == pytest.approx(unit_convert('eV', 'Ry'))
assert fdf.get('testRy') == pytest.approx(unit_convert('Ry', 'eV'))
assert fdf.get('testRy', with_unit=True)[0] == pytest.approx(1.)
assert fdf.get('testRy', with_unit=True)[1] == 'Ry'
assert fdf.get('testRy', unit='Ry') == pytest.approx(1.)
assert fdf.get('Sub') == 'sub-test'
assert fdf.get('Third') == 'level'
assert fdf.get('test-last', with_unit=True)[0] == pytest.approx(1.)
assert fdf.get('test-last', with_unit=True)[1] == 'eV'
# Currently lists are not implemented
#assert np.allclose(fdf.get('MyList'), np.arange(3) + 1)
#assert np.allclose(fdf.get('MyList', []), np.arange(3) + 1)
# Read a block
ll = open(sisl_tmp('hello', _dir)).readlines()
ll.pop(1)
assert fdf.get('Hello') == [l.replace('\n', '').strip() for l in ll]
def test_xv_preference(sisl_tmp):
g = geom.graphene()
g.write(sisl_tmp('file.fdf', _dir))
g.xyz[0, 0] += 1.
g.write(sisl_tmp('siesta.XV', _dir))
sc = fdfSileSiesta(sisl_tmp('file.fdf', _dir)).read_supercell(True)
g2 = fdfSileSiesta(sisl_tmp('file.fdf', _dir)).read_geometry(True)
assert np.allclose(sc.cell, g.cell)
assert np.allclose(g.cell, g2.cell)
assert np.allclose(g.xyz, g2.xyz)
g2 = fdfSileSiesta(sisl_tmp('file.fdf', _dir)).read_geometry(order=['fdf'])
assert np.allclose(g.cell, g2.cell)
g2.xyz[0, 0] += 1.
assert np.allclose(g.xyz, g2.xyz)
def test_geom_order(sisl_tmp):
gfdf = geom.graphene()
gxv = gfdf.copy()
gxv.xyz[0, 0] += 0.5
gnc = gfdf.copy()
gnc.xyz[0, 0] += 0.5
gfdf.write(sisl_tmp('siesta.fdf', _dir))
# Create fdf-file
fdf = fdfSileSiesta(sisl_tmp('siesta.fdf', _dir))
assert fdf.read_geometry(True, order=['nc']) is None
gxv.write(sisl_tmp('siesta.XV', _dir))
gnc.write(sisl_tmp('siesta.nc', _dir))
# Should read from XV
g = fdf.read_geometry(True)
assert np.allclose(g.xyz, gxv.xyz)
g = fdf.read_geometry(order=['nc', 'fdf'])
assert np.allclose(g.xyz, gnc.xyz)
g = fdf.read_geometry(order=['fdf', 'nc'])
assert np.allclose(g.xyz, gfdf.xyz)
g = fdf.read_geometry(order=['xv', 'nc'])
assert np.allclose(g.xyz, gxv.xyz)
def test_geom_constraints(sisl_tmp):
gfdf = geom.graphene().tile(2, 0).tile(2, 1)
gfdf['CONSTRAIN'] = 0
gfdf['CONSTRAIN-x'] = 2
gfdf['CONSTRAIN-y'] = [1, 3, 4, 5]
gfdf['CONSTRAIN-z'] = range(len(gfdf))
gfdf.write(sisl_tmp('siesta.fdf', _dir))
def test_h2_dynamical_matrix(sisl_files):
si = fdfSileSiesta(sisl_files(_dir, 'H2_dynamical_matrix.fdf'))
trans_inv = [True, False]
sum0 = trans_inv[:]
hermitian = trans_inv[:]
eV2cm = 8065.54429
hw_true = [-88.392650, -88.392650, -0.000038, -0.000001, 0.000025, 3797.431825]
from itertools import product
for ti, s0, herm in product(trans_inv, sum0, hermitian):
dyn = si.read_dynamical_matrix(trans_inv=ti, sum0=s0, hermitian=herm)
hw = dyn.eigenvalue().hw
if ti and s0 and herm:
assert np.allclose(hw * eV2cm, hw_true, atol=1e-4)
def test_dry_read(sisl_tmp):
# This test runs the read-functions. They aren't expected to actually read anything,
# it is only a dry-run.
file = sisl_tmp('siesta.fdf', _dir)
geom.graphene().write(file)
fdf = fdfSileSiesta(file)
read_methods = set(m for m in dir(fdf) if m.startswith("read_"))
output = dict(output=True)
kwargs = {
"supercell": output,
"geometry": output,
"grid": dict(name="rho"),
}
with pytest.warns(SislWarning):
assert np.allclose(fdf.read_supercell_nsc(), (1, 1, 1))
read_methods.remove("read_supercell_nsc")
geom_methods = set(f"read_{x}" for x in ("basis", "supercell", "geometry"))
read_methods -= geom_methods
for methodname in read_methods:
kwarg = kwargs.get(methodname[5:], dict())
assert getattr(fdf, methodname)(**kwarg) is None
for methodname in geom_methods:
# Also run these, but dont assert None due to the graphene values being present
# in the fdf. The read functions will still go dry-running through eg. nc-files.
kwarg = kwargs.get(methodname[5:], dict())
getattr(fdf, methodname)(**kwarg)
def test_fdf_argumentparser(sisl_tmp):
f = sisl_tmp('file.fdf', _dir)
with open(f, 'w') as fh:
fh.write('Flag1 date\n')
fh.write('Flag1 not-date\n')
fh.write('Flag1 not-date-2\n')
fh.write('Flag3 true\n')
fdfSileSiesta(f).ArgumentParser()
def test_fdf_fe_basis(sisl_files):
geom = fdfSileSiesta(sisl_files(_dir, 'fe.fdf')).read_geometry()
assert geom.no == 15
assert geom.na == 1
def test_fdf_pao_basis():
fdf = fdfSileSiesta
block = """
Mg 1 # Species label, number of l-shells
n=3 0 1 # n, l, Nzeta
6.620
1.000
C 2 # Species label, number of l-shells
n=2 0 1 # n, l, Nzeta
4.192
1.000
n=2 1 1 # n, l, Nzeta
4.870
1.000
O 2 # Species label, number of l-shells
n=2 0 1 # n, l, Nzeta
3.305
1.000
n=2 1 1 # n, l, Nzeta
3.937
1.000
""".splitlines()
atom_orbs = fdf._parse_pao_basis(block)
assert len(atom_orbs) == 3
assert len(atom_orbs["Mg"]) == 1
assert len(atom_orbs["C"]) == 4
assert len(atom_orbs["O"]) == 4
for i, (tag, orbs) in enumerate(atom_orbs.items()):
specie_orbs = fdf._parse_pao_basis(block, specie=tag)
assert specie_orbs == orbs
block = """
Fe_SOC 2 # Species label, number of l-shells
n=4 0 2 P 1 # n, l, Nzeta, Polarization, NzetaPol
7.329 6.153
1.000 1.000
n=3 2 2 # n, l, Nzeta
4.336 2.207
1.000 1.000
Pt_SOC 2 # Species label, number of l-shells
n=6 0 2 P 1 # n, l, Nzeta, Polarization, NzetaPol
7.158 6.009
1.000 1.000
n=5 2 2 # n, l, Nzeta
5.044 3.022
1.000 1.000
"""
atom_orbs = fdf._parse_pao_basis(block)
assert len(atom_orbs) == 2
assert len(atom_orbs["Fe_SOC"]) == 5 + 10
assert len(atom_orbs["Pt_SOC"]) == 5 + 10
for i, (tag, orbs) in enumerate(atom_orbs.items()):
specie_orbs = fdf._parse_pao_basis(block, specie=tag)
assert specie_orbs == orbs
def test_fdf_gz(sisl_files):
f = sisl_files(osp.join(_dir, 'fdf'), 'main.fdf.gz')
fdf = fdfSileSiesta(f)
# read from gzipped file
assert fdf.get("Main.Foo") == "hello"
assert fdf.get("Main.Bar") == "world"
# read from included non-gzipped file
assert fdf.get("Lvl2.Foo") == "world"
assert fdf.get("Lvl2.Bar") == "hello"
# read from nested included gzipped file
assert fdf.get("Lvl3.Foo") == "world3"
assert fdf.get("Lvl3.Bar") == "hello3"
f = sisl_files(osp.join(_dir, 'fdf'), 'level2.fdf')
fdf = fdfSileSiesta(f)
# read from non-gzipped file
assert fdf.get("Lvl2.Foo") == "world"
assert fdf.get("Lvl2.Bar") == "hello"
# read from included gzipped file
assert fdf.get("Lvl3.Foo") == "world3"
assert fdf.get("Lvl3.Bar") == "hello3"
|
zerothi/sisl
|
sisl/io/siesta/tests/test_fdf.py
|
Python
|
mpl-2.0
| 15,227
|
[
"SIESTA"
] |
eb70974b781b0e153f52083b814769df693bbc30741bc320070197a796976a04
|
from django.db import models
from edc_base.audit_trail import AuditTrail
from edc_base.model.models import BaseUuidModel
from edc_constants.choices import ARV_STATUS_WITH_NEVER
from edc_constants.choices import YES_NO
from edc_constants.constants import NOT_APPLICABLE
from edc_sync.models import SyncModelMixin
from edc_visit_tracking.models.crf_inline_model_mixin import CrfInlineModelMixin
from ..managers import MaternalArvPostModManager
from ..maternal_choices import REASON_FOR_HAART, ARV_DRUG_LIST, DOSE_STATUS, ARV_MODIFICATION_REASON
from .maternal_crf_model import MaternalCrfModel
class MaternalArvPost (MaternalCrfModel):
""" A model completed by the user on the mother's ARVs administered post-partum. """
on_arv_since = models.CharField(
max_length=25,
choices=YES_NO,
verbose_name=("Was the mother supposed to be on triple ARVs any time since the last"
" attended scheduled visit?"),
help_text="If 'NO' End. Otherwise continue go to section one",)
on_arv_reason = models.CharField(
verbose_name="Reason for triple ARVs ",
max_length=25,
choices=REASON_FOR_HAART,
default=NOT_APPLICABLE,
help_text="",)
on_arv_reason_other = models.TextField(
max_length=35,
verbose_name="if other, specify",
blank=True,
null=True,)
arv_status = models.CharField(
verbose_name=("What is the status of the participant's antiretroviral"
" treatment / prophylaxis at this visit or since the last visit? "),
max_length=25,
choices=ARV_STATUS_WITH_NEVER,
default=NOT_APPLICABLE,)
history = AuditTrail()
def visit(self):
return self.maternal_visit
def __unicode__(self):
return unicode(self.maternal_visit)
class Meta:
app_label = 'mb_maternal'
verbose_name = "Maternal ARV Post"
verbose_name_plural = "Maternal ARV Post"
class MaternalArvPostMod(CrfInlineModelMixin, SyncModelMixin, BaseUuidModel):
""" Maternal ARV modifications post-partum.
if art_status never, no_mod or N/A then this is not required"""
maternal_arv_post = models.ForeignKey(MaternalArvPost)
arv_code = models.CharField(
verbose_name="ARV Code",
max_length=25,
choices=ARV_DRUG_LIST)
dose_status = models.CharField(
max_length=25,
choices=DOSE_STATUS,
verbose_name="Dose Status")
modification_date = models.DateField(
verbose_name="Date ARV Modified")
modification_code = models.CharField(
max_length=50,
choices=ARV_MODIFICATION_REASON,
verbose_name="Reason for Modification")
objects = MaternalArvPostModManager()
history = AuditTrail()
class Meta:
app_label = 'mb_maternal'
verbose_name = 'Maternal ARVs Post: Mods'
verbose_name_plural = 'Maternal ARVs Post: Mods'
unique_together = ('maternal_arv_post', 'arv_code', 'modification_date')
class MaternalArvPostAdh(MaternalCrfModel):
"""Maternal ARV adherence post-partum"""
missed_doses = models.IntegerField(
verbose_name=("Since the last attended last scheduled visit, how many doses of"
" triple ARVs were missed? "))
missed_days = models.IntegerField(
verbose_name=("Since the last attended scheduled visit, how many entire days"
" were triple ARVS not taken?"),
default=0)
missed_days_discnt = models.IntegerField(
verbose_name=("If triple ARVs discontinued by health provider, how many days were triple ARVs missed"
" prior to discontinuation?"),
default=0)
comment = models.TextField(
max_length=250,
verbose_name="Comment",
blank=True,
null=True)
history = AuditTrail()
class Meta:
app_label = 'mb_maternal'
verbose_name = "Maternal ARVs Post: Adherence"
verbose_name_plural = "Maternal ARVs Post: Adherence"
|
botswana-harvard/microbiome
|
microbiome/apps/mb_maternal/models/maternal_arv_post.py
|
Python
|
gpl-2.0
| 4,064
|
[
"VisIt"
] |
213edcbc400d51edac2724fa4d4b589d16e6e52fe9f9eeb64c8b2e506cd0c115
|
from __future__ import absolute_import
import re
import json
import time
import logging
import random
import six
from django.conf import settings
from django.core.cache import cache
from six.moves.urllib.parse import parse_qsl
from sentry import http
from sentry.utils.strings import count_sprintf_parameters
logger = logging.getLogger(__name__)
SOFT_TIMEOUT = 600
SOFT_TIMEOUT_FUZZINESS = 10
HARD_TIMEOUT = 7200
REACT_MAPPING_URL = ('https://raw.githubusercontent.com/facebook/'
'react/master/scripts/error-codes/codes.json')
error_processors = {}
def is_expired(ts):
return ts > (time.time() - SOFT_TIMEOUT -
random.random() * SOFT_TIMEOUT_FUZZINESS)
class Processor(object):
def __init__(self, vendor, mapping_url, regex, func):
self.vendor = vendor
self.mapping_url = mapping_url
self.regex = re.compile(regex)
self.func = func
def load_mapping(self):
key = 'javascript.errormapping:%s' % self.vendor
mapping = cache.get(key)
cached_rv = None
if mapping is not None:
ts, cached_rv = json.loads(mapping)
if not is_expired(ts):
return cached_rv
try:
http_session = http.build_session()
response = http_session.get(self.mapping_url,
allow_redirects=True,
timeout=settings.SENTRY_SOURCE_FETCH_TIMEOUT,
)
# Make sure we only get a 2xx to prevent caching bad data
response.raise_for_status()
data = response.json()
cache.set(key, json.dumps([time.time(), data]), HARD_TIMEOUT)
except Exception:
if cached_rv is None:
raise
return cached_rv
return data
def try_process(self, exc):
if not exc['value']:
return False
match = self.regex.search(exc['value'])
if match is None:
return False
mapping = self.load_mapping()
return self.func(exc, match, mapping)
def minified_error(vendor, mapping_url, regex):
def decorator(f):
error_processors[vendor] = Processor(vendor, mapping_url, regex, f)
return decorator
@minified_error(
vendor='react',
mapping_url=REACT_MAPPING_URL,
regex=r'Minified React error #(\d+); visit https?://[^?]+\?(\S+)'
)
def process_react_exception(exc, match, mapping):
error_id, qs = match.groups()
msg_format = mapping.get(error_id)
if msg_format is None:
return False
arg_count = count_sprintf_parameters(msg_format)
args = []
for k, v in parse_qsl(qs, keep_blank_values=True):
if k == 'args[]':
args.append(v)
# Due to truncated error messages we sometimes might not be able to
# get all arguments. In that case we fill up missing parameters for
# the format string with <redacted>.
args = tuple(args + ['<redacted>'] * (arg_count - len(args)))[:arg_count]
exc['value'] = msg_format % args
return True
def rewrite_exception(data):
"""Rewrite an exception in an event if needed. Updates the exception
in place and returns `True` if a modification was performed or `False`
otherwise.
"""
exc_data = data.get('sentry.interfaces.Exception')
if not exc_data:
return False
rv = False
for exc in exc_data['values']:
for processor in six.itervalues(error_processors):
try:
if processor.try_process(exc):
rv = True
break
except Exception as e:
logger.error('Failed to run processor "%s": %s',
processor.vendor, e, exc_info=True)
return rv
|
alexm92/sentry
|
src/sentry/lang/javascript/errormapping.py
|
Python
|
bsd-3-clause
| 3,763
|
[
"VisIt"
] |
c5adc601d03d7ff7fafcdfaf5de401eab9e1d95f8f2eb468022ae1c6a3845cdf
|
#!/usr/bin/env python
#
# threat_note v3.0 #
# Developed By: Brian Warehime #
# Defense Point Security (defpoint.com) #
# October 26, 2015 #
#
import argparse
import csv
import hashlib
import io
import random
import re
import time
from flask import flash
from flask import Flask
from flask import make_response
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from flask.ext.login import current_user
from flask.ext.login import login_required
from flask.ext.login import login_user
from flask.ext.login import LoginManager
from flask.ext.login import logout_user
from flask.ext.wtf import Form
from libs import circl
from libs import cuckoo
from libs import database
from libs import farsight
from libs import helpers
from libs import opendns
from libs import passivetotal
from libs import shodan
from libs import virustotal
from libs import whoisinfo
from libs.API import tn_api
from libs.database import db_session
from libs.database import init_db
from libs.models import Indicator
from libs.models import Setting
from libs.models import User
from werkzeug.datastructures import ImmutableMultiDict
from wtforms import PasswordField
from wtforms import StringField
from wtforms.validators import DataRequired
#
# Configuration #
#
app = Flask(__name__)
app.config['SECRET_KEY'] = 'yek_terces'
app.debug = True
app.template_debug = True
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
# Setup Database if Necessary
init_db()
app.register_blueprint(tn_api)
class LoginForm(Form):
user = StringField('user', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
def get_user(self):
return db_session.query(User).filter_by(user=self.user.data.lower(), password=hashlib.md5(
self.password.data.encode('utf-8')).hexdigest()).first()
class RegisterForm(Form):
user = StringField('user', validators=[DataRequired()])
key = PasswordField('key', validators=[DataRequired()])
email = StringField('email')
#
# Creating routes #
#
@lm.user_loader
def load_user(id):
return db_session.query(User).filter_by(_id=id).first()
@app.route('/register', methods=['GET', 'POST'])
def register():
form = RegisterForm()
if form.validate_on_submit():
user = db_session.query(User).filter_by(user=form.user.data.lower()).first()
if user:
flash('User exists.')
else:
user = User(form.user.data.lower(), form.key.data, form.email.data)
db_session.add(user)
# Set up the settings table when the first user is registered.
if not Setting.query.filter_by(_id=1).first():
settings = Setting('off', 'off', 'off', 'off', 'off', 'off', 'off', 'off', 'off', 'off', 'off', 'off', 'off', 'off', '', '', '',
'', '', '', '', '', '', '', '', '')
db_session.add(settings)
# Commit all database changes once they have been completed
db_session.commit()
login_user(user)
if current_user.is_authenticated:
return redirect(url_for('home'))
return render_template('register.html', form=form, title='Register')
@app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = form.get_user()
if not user:
flash('Invalid User or Key.')
else:
login_user(user)
if current_user.is_authenticated:
return redirect(url_for('home'))
return render_template('login.html', form=form, title='Login')
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('login'))
@app.route('/', methods=['GET'])
@login_required
def home():
try:
counts = Indicator.query.distinct(Indicator._id).count()
types = Indicator.query.group_by(Indicator.type).all()
network = Indicator.query.order_by(Indicator._id.desc()).limit(5).all()
campaigns = Indicator.query.group_by(Indicator.campaign).all()
taglist = Indicator.query.distinct(Indicator.tags).all()
# Generate Tag Cloud
tags = set()
for object in taglist:
if object.tags == "":
pass
else:
for tag in object.tags.split(","):
tags.add(tag.strip())
dictcount = {}
dictlist = []
typecount = {}
typelist = []
# Generate Campaign Statistics Graph
for object in campaigns:
c = Indicator.query.filter_by(campaign=object.campaign).count()
if object.campaign == '':
dictcount["category"] = "Unknown"
tempx = (float(c) / float(counts)) * 100
dictcount["value"] = round(tempx, 2)
else:
dictcount["category"] = object.campaign
tempx = (float(c) / float(counts)) * 100
dictcount["value"] = round(tempx, 2)
dictlist.append(dictcount.copy())
# Generate Indicator Type Graph
for t in types:
c = Indicator.query.filter_by(type=t.type).count()
typecount["category"] = t.type
tempx = float(c) / float(counts)
newtemp = tempx * 100
typecount["value"] = round(newtemp, 2)
typelist.append(typecount.copy())
favs = []
# Add Import from Cuckoo button to Dashboard page
settings = Setting.query.filter_by(_id=1).first()
if 'on' in settings.cuckoo:
importsetting = True
else:
importsetting = False
return render_template('dashboard.html', networks=dictlist, network=network, favs=favs, typelist=typelist,
taglist=tags, importsetting=importsetting)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/about', methods=['GET'])
@login_required
def about():
return render_template('about.html')
@app.route('/tags', methods=['GET'])
@login_required
def tags():
try:
# Grab tags
taglist = dict()
rows = Indicator.query.distinct(Indicator.tags).all()
if rows:
for row in rows:
if row.tags:
for tag in row.tags.split(','):
taglist[tag.strip()] = list()
# Match indicators to tags
del rows, row
for tag, indicators in taglist.iteritems():
rows = Indicator.query.filter(Indicator.tags.like('%' + tag + '%')).all()
tmp = {}
for row in rows:
tmp[row.object] = row.type
indicators.append(tmp)
return render_template('tags.html', tags=taglist)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/networks', methods=['GET'])
@login_required
def networks():
try:
# Grab only network indicators
network = Indicator.query.filter(Indicator.type.in_(('IPv4', 'IPv6', 'Domain', 'Network'))).all()
return render_template('networks.html', network=network)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/threatactors', methods=['GET'])
@login_required
def threatactors():
try:
# Grab threat actors
threatactors = Indicator.query.filter(Indicator.type == 'Threat Actor').all()
return render_template('threatactors.html', network=threatactors)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/victims', methods=['GET'])
@login_required
def victims():
try:
# Grab victims
victims = Indicator.query.filter(Indicator.diamondmodel == ('Victim')).all()
return render_template('victims.html', network=victims)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/files', methods=['GET'])
@login_required
def files():
try:
# Grab files/hashes
files = Indicator.query.filter(Indicator.type == ('Hash')).all()
return render_template('files.html', network=files)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/campaigns', methods=['GET'])
@login_required
def campaigns():
try:
# Grab campaigns
campaignents = dict()
rows = Indicator.query.group_by(Indicator.campaign).all()
for c in rows:
if c.campaign == '':
name = 'Unknown'
else:
name = c.campaign
campaignents[name] = list()
# Match indicators to campaigns
for camp, indicators in campaignents.iteritems():
if camp == 'Unknown':
camp = ''
rows = Indicator.query.filter(Indicator.campaign == camp).all()
tmp = {}
for i in rows:
tmp[i.object] = i.type
indicators.append(tmp)
return render_template('campaigns.html', campaignents=campaignents)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/settings', methods=['GET'])
@login_required
def settings():
try:
settings = Setting.query.filter_by(_id=1).first()
user = User.query.filter(User.user == current_user).first
return render_template('settings.html', records=settings, suser=user)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/campaign/<uid>/info', methods=['GET'])
@login_required
def campaignsummary(uid):
try:
http = Indicator.query.filter_by(object=uid).first()
# Run ipwhois or domainwhois based on the type of indicator
if str(http.type) == "IPv4" or str(http.type) == "IPv6" or str(http.type) == "Domain" or \
str(http.type) == "Network":
return redirect(url_for('objectsummary', uid=http.object))
elif str(http.type) == "Hash":
return redirect(url_for('filesobject', uid=http.object))
else:
return redirect(url_for('threatactorobject', uid=http.object))
except Exception as e:
return render_template('error.html', error=e)
@app.route('/newobject', methods=['GET'])
@login_required
def newobj():
try:
currentdate = time.strftime("%Y-%m-%d")
return render_template('newobject.html', currentdate=currentdate)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/insert/object/', methods=['POST'])
@login_required
def newobject():
try:
something = request.form
imd = ImmutableMultiDict(something)
records = helpers.convert(imd)
# Import indicators from Cuckoo for the selected analysis task
if 'type' in records and 'cuckoo' in records['type']:
host_data, dns_data, sha1, firstseen = cuckoo.report_data(records['cuckoo_task_id'])
if host_data and dns_data and sha1 and firstseen:
# Import IP Indicators from Cuckoo Task
for ip in host_data:
ip = ip['ip']
ind = Indicator.query.filter_by(object=ip).first()
if ind is None:
indicator = Indicator(ip.strip(), 'IPv4', firstseen, '', 'Infrastructure', records['campaign'],
'Low', '', records['tags'], '')
db_session.add(indicator)
db_session.commit()
# Import Domain Indicators from Cuckoo Task
for dns in dns_data:
ind = Indicator.query.filter_by(object=dns['request']).first()
if ind is None:
indicator = Indicator(dns['request'], 'Domain', firstseen, '', 'Infrastructure',
records['campaign'], 'Low', '', records['tags'], '')
db_session.add(indicator)
db_session.commit()
# Import File/Hash Indicators from Cuckoo Task
ind = Indicator.query.filter_by(object=sha1).first()
if ind is None:
indicator = Indicator(sha1, 'Hash', firstseen, '', 'Capability',
records['campaign'], 'Low', '', records['tags'], '')
db_session.add(indicator)
db_session.commit()
# Redirect to Dashboard after successful import
return redirect(url_for('home'))
else:
errormessage = 'Task is not a file analysis'
return redirect(url_for('import_indicators'))
if 'inputtype' in records:
# Makes sure if you submit an IPv4 indicator, it's an actual IP
# address.
ipregex = re.match(
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', records['inputobject'])
# Convert the inputobject of IP or Domain to a list for Bulk Add functionality.
records['inputobject'] = records['inputobject'].split(',')
for newobject in records['inputobject']:
if records['inputtype'] == "IPv4":
if ipregex:
object = Indicator.query.filter_by(object=newobject).first()
if object is None:
ipv4_indicator = Indicator(newobject.strip(), records['inputtype'],
records['inputfirstseen'], records['inputlastseen'],
records['diamondmodel'], records['inputcampaign'],
records['confidence'], records['comments'], records['tags'], None)
db_session.add(ipv4_indicator)
db_session.commit()
network = Indicator.query.filter(Indicator.type.in_(
('IPv4', 'IPv6', 'Domain', 'Network'))).all()
else:
errormessage = "Entry already exists in database."
return render_template('newobject.html', errormessage=errormessage,
inputtype=records['inputtype'], inputobject=newobject,
inputfirstseen=records['inputfirstseen'],
inputlastseen=records['inputlastseen'],
inputcampaign=records['inputcampaign'],
comments=records['comments'],
diamondmodel=records['diamondmodel'],
tags=records['tags'])
else:
errormessage = "Not a valid IP Address."
return render_template('newobject.html', errormessage=errormessage,
inputtype=records['inputtype'],
inputobject=newobject, inputfirstseen=records['inputfirstseen'],
inputlastseen=records['inputlastseen'],
confidence=records['confidence'], inputcampaign=records['inputcampaign'],
comments=records['comments'], diamondmodel=records['diamondmodel'],
tags=records['tags'])
else:
object = Indicator.query.filter_by(object=newobject).first()
if object is None:
indicator = Indicator(newobject.strip(), records['inputtype'], records['inputfirstseen'],
records['inputlastseen'], records['diamondmodel'], records['inputcampaign'],
records['confidence'], records['comments'], records['tags'], None)
db_session.add(indicator)
db_session.commit()
else:
errormessage = "Entry already exists in database."
return render_template('newobject.html', errormessage=errormessage,
inputtype=records['inputtype'], inputobject=newobject,
inputfirstseen=records['inputfirstseen'],
inputlastseen=records['inputlastseen'],
inputcampaign=records['inputcampaign'],
comments=records['comments'],
diamondmodel=records['diamondmodel'],
tags=records['tags'])
# TODO: Change 'network' to 'object' in HTML templates to standardize on verbiage
if records['inputtype'] == "IPv4" or records['inputtype'] == "Domain" or records['inputtype'] == "Network"\
or records['inputtype'] == "IPv6":
network = Indicator.query.filter(Indicator.type.in_(('IPv4', 'IPv6', 'Domain', 'Network'))).all()
return render_template('networks.html', network=network)
elif records['diamondmodel'] == "Victim":
victims = Indicator.query.filter(Indicator.diamondmodel == ('Victim')).all()
return render_template('victims.html', network=victims)
elif records['inputtype'] == "Hash":
files = Indicator.query.filter(Indicator.type == ('Hash')).all()
return render_template('files.html', network=files)
else:
threatactors = Indicator.query.filter(Indicator.type == ('Threat Actors')).all()
return render_template('threatactors.html', network=threatactors)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/edit/<uid>', methods=['POST', 'GET'])
@login_required
def editobject(uid):
try:
http = Indicator.query.filter_by(object=uid).first()
newdict = helpers.row_to_dict(http)
return render_template('neweditobject.html', entry=newdict)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/editcampaign/<uid>', methods=['POST', 'GET'])
@login_required
def editcampaign(uid):
return render_template('error.html', error='Not Implemented')
@app.route('/delete/network/<uid>', methods=['GET'])
@login_required
def deletenetworkobject(uid):
try:
Indicator.query.filter_by(object=uid).delete()
db_session.commit()
network = Indicator.query.filter(Indicator.type.in_(('IPv4', 'IPv6', 'Domain', 'Network'))).all()
return render_template('networks.html', network=network)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/delete/threatactor/<uid>', methods=['GET'])
@login_required
def deletethreatactorobject(uid):
try:
Indicator.query.filter_by(object=uid).delete()
db_session.commit()
threatactors = Indicator.query.filter_by(type='Threat Actor')
return render_template('threatactors.html', network=threatactors)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/delete/victims/<uid>', methods=['GET'])
@login_required
def deletevictimobject(uid):
try:
Indicator.query.filter_by(object=uid).delete()
db_session.commit()
victims = Indicator.query.filter_by(diamondmodel='Victim')
return render_template('victims.html', network=victims)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/delete/files/<uid>', methods=['GET'])
@login_required
def deletefilesobject(uid):
try:
Indicator.query.filter_by(object=uid).delete()
db_session.commit()
files = Indicator.query.filter_by(type='Hash')
return render_template('victims.html', network=files)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/update/settings/', methods=['POST'])
@login_required
def updatesettings():
try:
something = request.form
imd = ImmutableMultiDict(something)
newdict = helpers.convert(imd)
# Query the first set of settings, could query custom settings for individual users
settings = Setting.query.filter_by(_id=1).first()
# Make sure we're updating the settings instead of overwriting them
if 'threatcrowd' in newdict.keys():
settings.threatcrowd = 'on'
else:
settings.threatcrowd = 'off'
for pt_type in ['pt_pdns', 'pt_whois', 'pt_pssl', 'pt_host_attr']:
auth = [newdict['pt_username'], newdict['pt_api_key']]
if pt_type in newdict.keys() and ('' not in auth):
setattr(settings, pt_type, 'on')
else:
setattr(settings, pt_type, 'off')
if 'cuckoo' in newdict.keys():
settings.cuckoo = 'on'
else:
settings.cuckoo = 'off'
if 'vtinfo' in newdict.keys() and newdict['apikey'] is not '':
settings.vtinfo = 'on'
else:
settings.vtinfo = 'off'
if 'vtfile' in newdict.keys() and newdict['apikey'] is not '':
settings.vtfile = 'on'
else:
settings.vtfile = 'off'
if 'circlinfo' in newdict.keys() and newdict['circlusername'] is not '':
settings.circlinfo = 'on'
else:
settings.circlinfo = 'off'
if 'circlssl' in newdict.keys() and newdict['circlusername'] is not '':
settings.circlssl = 'on'
else:
settings.circlssl = 'off'
if 'whoisinfo' in newdict.keys():
settings.whoisinfo = 'on'
else:
settings.whoisinfo = 'off'
if 'farsightinfo' in newdict.keys() and newdict['farsightkey'] is not '':
settings.farsightinfo = 'on'
else:
settings.farsightinfo = 'off'
if 'shodaninfo' in newdict.keys() and newdict['shodankey'] is not '':
settings.shodaninfo = 'on'
else:
settings.shodaninfo = 'off'
if 'odnsinfo' in newdict.keys() and newdict['odnskey'] is not '':
settings.odnsinfo = 'on'
else:
settings.odnsinfo = 'off'
settings.farsightkey = newdict['farsightkey']
settings.apikey = newdict['apikey']
settings.odnskey = newdict['odnskey']
settings.httpproxy = newdict['httpproxy']
settings.httpsproxy = newdict['httpsproxy']
settings.cuckoohost = newdict['cuckoohost']
settings.cuckooapiport = newdict['cuckooapiport']
settings.circlusername = newdict['circlusername']
settings.circlpassword = newdict['circlpassword']
settings.pt_username = newdict['pt_username']
settings.pt_api_key = newdict['pt_api_key']
settings.shodankey = newdict['shodankey']
db_session.commit()
settings = Setting.query.first()
return render_template('settings.html', records=settings)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/update/object/', methods=['POST'])
@login_required
def updateobject():
try:
# Updates entry information
something = request.form
imd = ImmutableMultiDict(something)
records = helpers.convert(imd)
# taglist = records['tags'].split(",") - Unused
# indicator = Indicator.query.filter_by(object=records['object']).first() - Unused
try:
Indicator.query.filter_by(object=records['object']).update(records)
except Exception as e:
# SQLAlchemy does not outright support altering tables.
for k, v in records.iteritems():
if Indicator.query.group_by(k).first() is None:
print 'ALTER Table'
# db_session.engine.execute("ALTER TABLE indicators ADD COLUMN " + k + " TEXT DEFAULT ''")
db_session.commit()
# db_session.execute('ALTER TABLE indicators ADD COLUMN')
# con = helpers.db_connection()
# with con:
# cur = con.cursor()
# cur.execute(
# "ALTER TABLE indicators ADD COLUMN " + t + " TEXT DEFAULT ''")
# cur.execute("UPDATE indicators SET " + t + "= '" + records[
# t] + "' WHERE id = '" + records['id'] + "'")
if records['type'] == "IPv4" or records['type'] == "IPv6" or records['type'] == "Domain" or \
records['type'] == "Network":
return redirect(url_for('objectsummary', uid=str(records['object'])))
elif records['type'] == "Hash":
return redirect(url_for('filesobject', uid=str(records['object'])))
elif records['type'] == "Entity":
return redirect(url_for('victimobject', uid=str(records['object'])))
elif records['type'] == "Threat Actor":
return redirect(url_for('threatactorobject', uid=str(records['object'])))
except Exception as e:
return render_template('error.html', error=e)
@app.route('/insert/newfield/', methods=['POST'])
@login_required
def insertnewfield():
try:
something = request.form
imd = ImmutableMultiDict(something)
records = helpers.convert(imd)
newdict = {}
for i in records:
if i == "inputnewfieldname":
newdict[records[i]] = records['inputnewfieldvalue']
elif i == "inputnewfieldvalue":
pass
else:
newdict[i] = records[i]
return render_template('neweditobject.html', entry=newdict)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/network/<uid>/info', methods=['GET'])
@login_required
def objectsummary(uid):
try:
row = Indicator.query.filter_by(object=uid).first()
newdict = helpers.row_to_dict(row)
settings = Setting.query.filter_by(_id=1).first()
taglist = row.tags.split(",")
temprel = {}
if row.relationships:
rellist = row.relationships.split(",")
for rel in rellist:
row = Indicator.query.filter_by(object=rel).first()
temprel[row.object] = row.type
reldata = len(temprel)
jsonvt = ""
whoisdata = ""
odnsdata = ""
circldata = ""
circlssl = ""
pt_pdns_data = ""
pt_whois_data = ""
pt_pssl_data = ""
pt_host_attr_data = ""
farsightdata = ""
shodandata = ""
# Run ipwhois or domainwhois based on the type of indicator
if str(row.type) == "IPv4" or str(row.type) == "IPv6":
if settings.vtinfo == "on":
jsonvt = virustotal.vt_ipv4_lookup(str(row.object))
if settings.whoisinfo == "on":
whoisdata = whoisinfo.ipwhois(str(row.object))
if settings.odnsinfo == "on":
odnsdata = opendns.ip_investigate(str(row.object))
if settings.circlinfo == "on":
circldata = circl.circlquery(str(row.object))
if settings.circlssl == "on":
circlssl = circl.circlssl(str(row.object))
if settings.pt_pdns == "on":
pt_pdns_data = passivetotal.pt_lookup('dns', str(row.object))
if settings.pt_whois == "on":
pt_whois_data = passivetotal.pt_lookup('whois', str(row.object))
if settings.pt_pssl == "on":
pt_pssl_data = passivetotal.pt_lookup('ssl', str(row.object))
if settings.pt_host_attr == "on":
pt_host_attr_data = passivetotal.pt_lookup('attributes', str(row.object))
if settings.farsightinfo == "on":
farsightdata = farsight.farsightip(str(row.object))
if settings.shodaninfo == "on":
shodandata = shodan.shodan(str(row.object))
elif str(row.type) == "Domain":
if settings.whoisinfo == "on":
whoisdata = whoisinfo.domainwhois(str(row.object))
if settings.vtinfo == "on":
jsonvt = virustotal.vt_domain_lookup(str(row.object))
if settings.odnsinfo == "on":
odnsdata = opendns.domains_investigate(str(row.object))
if settings.circlinfo == "on":
circldata = circl.circlquery(str(row.object))
if settings.pt_pdns == "on":
pt_pdns_data = passivetotal.pt_lookup('dns', str(row.object))
if settings.pt_whois == "on":
pt_whois_data = passivetotal.pt_lookup('whois', str(row.object))
if settings.pt_pssl == "on":
pt_pssl_data = passivetotal.pt_lookup('ssl', str(row.object))
if settings.pt_host_attr == "on":
pt_host_attr_data = passivetotal.pt_lookup('attributes', str(row.object))
if settings.farsightinfo == "on":
farsightdata = farsight.farsightdomain(str(row.object))
if settings.shodaninfo == "on":
shodandata = shodan.shodan(str(row.object))
if settings.whoisinfo == "on":
if str(row.type) == "Domain":
address = str(whoisdata['city']) + ", " + str(whoisdata['country'])
else:
address = str(whoisdata['nets'][0]['city']) + ", " + str(
whoisdata['nets'][0]['country'])
else:
address = "Information about " + str(row.object)
return render_template('networkobject.html', records=newdict, jsonvt=jsonvt, whoisdata=whoisdata,
odnsdata=odnsdata, settingsvars=settings, address=address,
temprel=temprel, circldata=circldata, circlssl=circlssl, reldata=reldata,
taglist=taglist, farsightdata=farsightdata, shodandata=shodandata,
pt_pdns_data=pt_pdns_data, pt_whois_data=pt_whois_data, pt_pssl_data=pt_pssl_data,
pt_host_attr_data=pt_host_attr_data)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/threatactors/<uid>/info', methods=['GET'])
@login_required
def threatactorobject(uid):
try:
row = Indicator.query.filter(Indicator.object == uid).first()
newdict = helpers.row_to_dict(row)
temprel = {}
if row.relationships:
rellist = row.relationships.split(",")
for rel in rellist:
reltype = Indicator.query.filter(Indicator.object == rel)
temprel[reltype.object] = reltype.type
reldata = len(temprel)
return render_template('threatactorobject.html', records=newdict, temprel=temprel, reldata=reldata)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/relationships/<uid>', methods=['GET'])
@login_required
def relationships(uid):
try:
row = Indicator.query.filter_by(object=uid).first()
indicators = Indicator.query.all()
if row.relationships:
rellist = row.relationships.split(",")
temprel = {}
for rel in rellist:
reltype = Indicator.query.filter_by(object=rel).first()
temprel[reltype.object] = reltype.type
return render_template('addrelationship.html', records=row, indicators=indicators)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/addrelationship', methods=['GET', 'POST'])
@login_required
def addrelationship():
try:
something = request.form
imd = ImmutableMultiDict(something)
records = helpers.convert(imd)
# Add Direct Relationship
row = Indicator.query.filter_by(object=records['id']).first()
if row.relationships:
row.relationships = str(row.relationships) + ",{}".format(records['indicator'])
else:
row.relationships = str(records['indicator'])
db_session.commit()
# Add Reverse Relationship
row = Indicator.query.filter_by(object=records['indicator']).first()
if row.relationships:
row.relationships = str(row.relationships) + ",{}".format(records['id'])
else:
row.relationships = str(records['id'])
db_session.commit()
if records['type'] == "IPv4" or records['type'] == "IPv6" or records['type'] == "Domain" or \
records['type'] == "Network":
return redirect(url_for('objectsummary', uid=str(records['id'])))
elif records['type'] == "Hash":
return redirect(url_for('filesobject', uid=str(records['id'])))
elif records['type'] == "Entity":
return redirect(url_for('victimobject', uid=str(records['id'])))
elif records['type'] == "Threat Actor":
return redirect(url_for('threatactorobject', uid=str(records['id'])))
except Exception as e:
return render_template('error.html', error=e)
@app.route('/apikey', methods=['POST'])
@login_required
def apiroll():
print "Rolling API Key"
try:
print "Time to roll the key!"
user = User.query.filter_by(user=current_user.user.lower()).first()
user.apikey = hashlib.md5("{}{}".format(user, str(random.random())).encode('utf-8')).hexdigest()
db_session.commit()
return redirect(url_for('profile'))
except Exception as e:
return render_template('error.html', error=e)
@app.route('/profile', methods=['GET', 'POST'])
@login_required
def profile():
try:
user = User.query.filter_by(user=current_user.user.lower()).first()
imd = ImmutableMultiDict(request.form)
records = helpers.convert(imd)
if 'currentpw' in records:
if hashlib.md5(records['currentpw'].encode('utf-8')).hexdigest() == user.password:
if records['newpw'] == records['newpwvalidation']:
user.password = hashlib.md5(records['newpw'].encode('utf-8')).hexdigest()
db_session.commit()
errormessage = "Password updated successfully."
return render_template('profile.html', errormessage=errormessage)
else:
errormessage = "New passwords don't match."
return render_template('profile.html', errormessage=errormessage)
else:
errormessage = "Current password is incorrect."
return render_template('profile.html', errormessage=errormessage)
return render_template('profile.html')
except Exception as e:
return render_template('error.html', error=e)
@app.route('/victims/<uid>/info', methods=['GET'])
@login_required
def victimobject(uid):
try:
http = Indicator.query.filter(Indicator.object == uid).first()
newdict = helpers.row_to_dict(http)
settings = Setting.query.filter_by(_id=1).first()
taglist = http.tags.split(",")
temprel = {}
if http.relationships:
rellist = http.relationships.split(",")
for rel in rellist:
reltype = Indicator.query.filter(Indicator.object == rel)
temprel[reltype.object] = reltype.type
reldata = len(temprel)
jsonvt = ""
whoisdata = ""
odnsdata = ""
circldata = ""
circlssl = ""
pt_pdns_data = ""
pt_whois_data = ""
pt_pssl_data = ""
pt_host_attr_data = ""
farsightdata = ""
# shodaninfo = ""
# Run ipwhois or domainwhois based on the type of indicator
if str(http.type) == "IPv4" or str(http.type) == "IPv6":
if settings.vtinfo == "on":
jsonvt = virustotal.vt_ipv4_lookup(str(http.object))
if settings.whoisinfo == "on":
whoisdata = whoisinfo.ipwhois(str(http.object))
if settings.odnsinfo == "on":
odnsdata = opendns.ip_investigate(str(http.object))
if settings.circlinfo == "on":
circldata = circl.circlquery(str(http.object))
if settings.circlssl == "on":
circlssl = circl.circlssl(str(http.object))
if settings.pt_pdns == "on":
pt_pdns_data = passivetotal.pt_lookup('dns', str(http.object))
if settings.pt_whois == "on":
pt_whois_data = passivetotal.pt_lookup('whois', str(http.object))
if settings.pt_pssl == "on":
pt_pssl_data = passivetotal.pt_lookup('ssl', str(http.object))
if settings.pt_host_attr == "on":
pt_host_attr_data = passivetotal.pt_lookup('attributes', str(http.object))
if settings.farsightinfo == "on":
farsightdata = farsight.farsightip(str(http.object))
elif str(http.type) == "Domain":
if settings.whoisinfo == "on":
whoisdata = whoisinfo.domainwhois(str(http.object))
if settings.vtinfo == "on":
jsonvt = virustotal.vt_domain_lookup(str(http.object))
if settings.odnsinfo == "on":
odnsdata = opendns.domains_investigate(
str(http.object))
if settings.circlinfo == "on":
circldata = circl.circlquery(str(http.object))
if settings.pt_pdns == "on":
pt_pdns_data = passivetotal.pt_lookup('dns', str(http.object))
if settings.pt_whois == "on":
pt_whois_data = passivetotal.pt_lookup('whois', str(http.object))
if settings.pt_pssl == "on":
pt_pssl_data = passivetotal.pt_lookup('ssl', str(http.object))
if settings.pt_host_attr == "on":
pt_host_attr_data = passivetotal.pt_lookup('attributes', str(http.object))
if settings.whoisinfo == "on":
if str(http.type) == "Domain":
address = str(whoisdata['city']) + ", " + str(
whoisdata['country'])
else:
address = str(whoisdata['nets'][0]['city']) + ", " + str(
whoisdata['nets'][0]['country'])
else:
address = "Information about " + str(http.object)
return render_template('victimobject.html', records=newdict, jsonvt=jsonvt, whoisdata=whoisdata,
odnsdata=odnsdata, circldata=circldata, circlssl=circlssl, settingsvars=settings,
address=address, temprel=temprel, reldata=reldata, taglist=taglist, farsightdata=farsightdata,
pt_pdns_data=pt_pdns_data, pt_whois_data=pt_whois_data, pt_pssl_data=pt_pssl_data,
pt_host_attr_data=pt_host_attr_data)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/files/<uid>/info', methods=['GET'])
@login_required
def filesobject(uid):
try:
http = Indicator.query.filter(Indicator.object == uid).first()
newdict = helpers.row_to_dict(http)
settings = Setting.query.filter_by(_id=1).first()
taglist = http.tags.split(",")
temprel = {}
if http.relationships:
rellist = http.relationships.split(",")
for rel in rellist:
reltype = Indicator.query.filter(Indicator.object == rel).first()
temprel[reltype.object] = reltype.type
reldata = len(temprel)
if settings.vtfile == "on":
jsonvt = virustotal.vt_hash_lookup(str(http.object))
else:
jsonvt = ""
return render_template('fileobject.html', records=newdict, settingsvars=settings, address=http.object,
temprel=temprel, reldata=reldata, jsonvt=jsonvt, taglist=taglist)
except Exception as e:
return render_template('error.html', error=e)
@app.route('/import', methods=['GET', 'POST'])
@login_required
def import_indicators():
cuckoo_tasks = cuckoo.get_tasks()
return render_template('import.html', cuckoo_tasks=cuckoo_tasks)
@app.route('/download/<uid>', methods=['GET'])
@login_required
def download(uid):
if uid == 'Unknown':
uid = ""
rows = Indicator.query.filter_by(campaign=uid).all()
# Lazy hack. This takes care of downloading indicators by Tags, could be put into its own app.route
if not rows:
rows = Indicator.query.filter(Indicator.tags.like('%' + uid + '%')).all()
indlist = []
for i in rows:
indicator = helpers.row_to_dict(i)
for key, value in indicator.iteritems():
if value is None or value == "":
indicator[key] = '-'
indlist.append(indicator)
out_file = io.BytesIO()
fieldnames = indlist[0].keys()
w = csv.DictWriter(out_file, fieldnames=fieldnames)
w.writeheader()
w.writerows(indlist)
response = make_response(out_file.getvalue())
response.headers[
"Content-Disposition"] = "attachment; filename=" + uid + "-campaign.csv"
response.headers["Content-type"] = "text/csv"
return response
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-H', '--host', default="127.0.0.1", help="Specify the host IP address")
parser.add_argument('-p', '--port', default=8888, help="Specify port to listen on")
parser.add_argument('-d', '--debug', default=False, help="Run in debug mode", action="store_true")
parser.add_argument('-db', '--database', help="Path to sqlite database - Not Implemented")
args = parser.parse_args()
if args.database:
# TODO
database.db_file = args.database
init_db()
app.run(host=args.host, port=args.port, debug=args.debug)
|
defpoint/threat_note
|
threat_note/threat_note.py
|
Python
|
apache-2.0
| 42,723
|
[
"Brian"
] |
f93118cbfc3f325a39adbfa88517ba55cdf223c6c3bbffd8950c6236f2722214
|
import os, sys, getopt
try:
from PyQt4 import QtCore, QtGui
QtCore.Signal = QtCore.pyqtSignal
QtCore.Slot = QtCore.pyqtSlot
except ImportError:
try:
from PySide import QtCore, QtGui
QtCore.QString = str
except ImportError:
raise ImportError("Cannot load either PyQt or PySide")
import vtk
import time
from ExodusResult import ExodusResult
import glob, math
from ContourChoices import *
from vtk.qt4.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
pathname = os.path.dirname(os.path.realpath(sys.argv[0]))
pathname = os.path.abspath(pathname)
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class ExodusResultRenderWidget(QtGui.QWidget):
def __init__(self, input_file_widget, execution_widget, qt_app, application):
QtGui.QWidget.__init__(self)
self.input_file_widget = input_file_widget
self.qt_app = qt_app
self.application = application
self.plane = vtk.vtkPlane()
self.plane.SetOrigin(-1000, 0, 0)
self.plane.SetNormal(1, 0, 0)
self.exodus_result = None
# The multiple (from adaptivity)
self.exodus_results = []
self.timestep_to_exodus_result = {}
self.file_name = None
self.setupLuts()
# The multiple (from adaptivity) file names we know of
self.file_names = []
self.current_max_timestep = 0
# Whether or not there is new data to read
self.new_stuff_to_read = False
self.timer = QtCore.QTimer()
self.timer.stop()
self.timer.setInterval(100)
self.timer.timeout.connect(self._updateData)
self.execution_widget = execution_widget
self.execution_widget.run_started.connect(self._runStarted)
self.execution_widget.run_stopped.connect(self._runStopped)
self.execution_widget.timestep_begin.connect(self._timestepBegin)
self.execution_widget.timestep_end.connect(self._timestepEnd)
self.left_layout = QtGui.QVBoxLayout()
# self.left_layout.setContentsMargins(0,0,0,0)
self.left_layout.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
self.left_widget = QtGui.QWidget()
self.left_widget.setLayout(self.left_layout)
self.right_layout = QtGui.QVBoxLayout()
self.right_layout.setContentsMargins(0,0,0,0)
self.right_widget = QtGui.QWidget()
self.right_widget.setLayout(self.right_layout)
self.splitter = QtGui.QSplitter()
self.splitter.setContentsMargins(0,0,0,0)
self.splitter.addWidget(self.left_widget)
self.splitter.addWidget(self.right_widget)
self.splitter.setStretchFactor(0, 0.1)
self.splitter.setStretchFactor(1, 1)
# build the render viewport, default background to the gradient look
self.vtkwidget = QVTKRenderWindowInteractor(self.right_widget)
self.renderer = vtk.vtkRenderer()
self._renderViewBackgroundChanged(0)
self.renderer.ResetCamera()
# add view to the right pane of the splitter
self.right_layout.addWidget(self.vtkwidget)
self.right_layout.setStretchFactor(self.vtkwidget, 100)
# show view and set interaction mode
self.vtkwidget.show()
self.vtkwidget.GetRenderWindow().AddRenderer(self.renderer)
self.interactor = self.vtkwidget.GetRenderWindow().GetInteractor()
self.interactor.SetInteractorStyle(vtk.vtkInteractorStyleTrackballCamera())
# assemble the overall layout
self.main_layout = QtGui.QVBoxLayout()
self.main_layout.setContentsMargins(0,0,0,0)
self.main_layout.addWidget(self.splitter)
self.setLayout(self.main_layout)
self.show()
self.interactor.Initialize()
self.first = True
self.exodus_result = None
self.has_displacements = False
self.current_displacement_magnitude = 1.0
self.current_scale_x_magnitude = 1.0
self.current_scale_y_magnitude = 1.0
self.current_scale_z_magnitude = 1.0
self.current_variable = None
self.current_component = None
# Holds a mapping of variable name to contour choices so they can be restored when variables are selected
self.contour_choices = {}
# If we are currently restoring contours then don't save the intermediate ones
self.currently_restoring_contours = False
self.setupControls()
self.modifyUI()
''' This will be called after the interface is completely setup to allow an application to modify this tab '''
def modifyUI(self):
pass
''' Return the name to use for this tab '''
def name(self):
return 'Visualize'
def setupControls(self):
# add to left splitter pane
self.controls_layout = QtGui.QVBoxLayout()
self.left_layout.addLayout(self.controls_layout)
#
# Select Output group
#
self.output_control_group_box = QtGui.QGroupBox("Select Output") # adds a box for storing widget
self.output_control_group_box.setFlat(True)
self.output_control_layout = QtGui.QVBoxLayout() # creates a layout
self.output_control_layout.setContentsMargins(0,0,0,0)
self.output_control = QtGui.QComboBox() # adds the actual dropdown menu
# Set-up the control
self.output_control.setToolTip('Select output file to view') # sets menu tooltip
self.updateOutputControl() # populate the list of outputs
self.output_control.activated[str].connect(self._outputChanged) # set the callback function
# Add the control to the GUI
self.output_control_layout.addWidget(self.output_control) # add the dropdown widget to the layout
self.output_control_group_box.setLayout(self.output_control_layout) # add the layout to the box
self.controls_layout.addWidget(self.output_control_group_box) # add the box to the gui control layout
#
# Show Blocks group
#
self.block_view_group_box = QtGui.QGroupBox('Show Blocks')
self.block_view_layout = QtGui.QVBoxLayout()
self.block_view_layout.setContentsMargins(0,0,0,0)
self.block_view_list = QtGui.QListView()
self.block_view_model = QtGui.QStandardItemModel()
self.block_view_model.itemChanged.connect(self._blockViewItemChanged)
self.block_view_list.setModel(self.block_view_model)
self.block_view_layout.addWidget(self.block_view_list)
self.block_view_group_box.setLayout(self.block_view_layout)
self.controls_layout.addWidget(self.block_view_group_box)
# self.automatic_update_checkbox = QtGui.QCheckBox("Automatically Update")
# self.automatic_update_checkbox.setToolTip('Toggle automattically reading new timesteps as they finish computing')
# self.automatic_update_checkbox.setCheckState(QtCore.Qt.Checked)
self.automatically_update = True
# self.automatic_update_checkbox.stateChanged[int].connect(self._automaticUpdateChanged)
# self.left_controls_layout.addWidget(self.automatic_update_checkbox)
#
# Contour group
#
self.contour_groupbox = QtGui.QGroupBox("Contour")
self.contour_groupbox.setFlat(True)
self.contour_groupbox.setSizePolicy(QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Fixed)
self.contour_layout = QtGui.QVBoxLayout()
self.contour_layout.setContentsMargins(0,0,0,0)
self.contour_groupbox.setLayout(self.contour_layout)
self.variable_contour_layout = QtGui.QHBoxLayout()
self.contour_layout.addLayout(self.variable_contour_layout)
self.contour_label = QtGui.QLabel("Contour:")
self.variable_contour = QtGui.QComboBox()
self.variable_contour_is_nodal = {}
self.variable_contour.setToolTip('Which variable to color by')
self.variable_contour.currentIndexChanged[int].connect(self._contourVariableSelected)
self.variable_contour_layout.addWidget(self.variable_contour)
self.variable_contour_layout.setStretchFactor(self.variable_contour, 1)
#self.component_label = QtGui.QLabel("Component:")
self.variable_component = QtGui.QComboBox()
self.variable_component.setToolTip('If the variable is a vector this selects what component of that vector (or the Magnitude) to color by')
self.variable_component.currentIndexChanged[str].connect(self._variableComponentSelected)
self.variable_contour_layout.addWidget(self.variable_component, alignment=QtCore.Qt.AlignHCenter)
# min line
self.min_layout = QtGui.QHBoxLayout()
self.min_layout.setContentsMargins(0,0,0,0)
self.min_widget = QtGui.QWidget()
self.min_widget.setLayout(self.min_layout)
self.min_radio_layout = QtGui.QVBoxLayout()
self.min_current_radio = QtGui.QRadioButton('Current')
self.min_current_radio.setChecked(QtCore.Qt.Checked)
self.min_current_radio.toggled.connect(self._updateContours)
self.min_global_radio = QtGui.QRadioButton('Global')
self.min_global_radio.toggled.connect(self._updateContours)
self.min_radio_layout.addWidget(self.min_current_radio)
self.min_custom_layout = QtGui.QHBoxLayout()
self.min_custom_layout.setContentsMargins(0,0,0,0)
self.min_custom_layout.setSpacing(5)
self.min_custom_radio = QtGui.QRadioButton()
self.min_custom_radio.toggled.connect(self._updateContours)
self.min_custom_text = QtGui.QLineEdit()
self.min_custom_text.returnPressed.connect(self._updateContours)
self.min_custom_text.setDisabled(True)
self.min_custom_layout.addWidget(self.min_custom_radio)
self.min_custom_layout.addWidget(self.min_custom_text)
self.min_custom_layout.setStretchFactor(self.min_custom_text, 1)
self.min_layout.addWidget(QtGui.QLabel("Min"))
self.min_layout.addSpacing(10)
self.min_layout.addStretch(0.1)
self.min_layout.addLayout(self.min_radio_layout)
self.min_layout.addLayout(self.min_custom_layout)
self.min_layout.setStretchFactor(self.min_custom_layout, 1)
# add min line
self.contour_layout.addWidget(self.min_widget)
# max line
self.max_layout = QtGui.QHBoxLayout()
self.max_layout.setContentsMargins(0,0,0,0)
self.max_widget = QtGui.QWidget()
self.max_widget.setLayout(self.max_layout)
self.max_radio_layout = QtGui.QVBoxLayout()
self.max_current_radio = QtGui.QRadioButton('Current')
self.max_current_radio.setChecked(QtCore.Qt.Checked)
self.max_current_radio.toggled.connect(self._updateContours)
self.max_global_radio = QtGui.QRadioButton('Global')
self.max_global_radio.toggled.connect(self._updateContours)
self.max_radio_layout.addWidget(self.max_current_radio)
self.max_custom_layout = QtGui.QHBoxLayout()
self.max_custom_layout.setContentsMargins(0,0,0,0)
self.max_custom_layout.setSpacing(5)
self.max_custom_radio = QtGui.QRadioButton()
self.max_custom_radio.toggled.connect(self._updateContours)
self.max_custom_text = QtGui.QLineEdit()
self.max_custom_text.returnPressed.connect(self._updateContours)
self.max_custom_text.setDisabled(True)
self.max_custom_layout.addWidget(self.max_custom_radio)
self.max_custom_layout.addWidget(self.max_custom_text)
self.max_custom_layout.setStretchFactor(self.max_custom_text, 1)
self.max_layout.addWidget(QtGui.QLabel("Max"))
self.max_layout.addSpacing(10)
self.max_layout.addStretch(0.1)
self.max_layout.addLayout(self.max_radio_layout)
self.max_layout.addLayout(self.max_custom_layout)
self.max_layout.setStretchFactor(self.max_custom_layout, 1)
# add max line
self.contour_layout.addWidget(self.max_widget)
self.color_scheme_label = QtGui.QLabel("Color Scheme:")
self.color_scheme_component = QtGui.QComboBox()
self.color_scheme_component.addItem('HSV (Cool to Warm)')
self.color_scheme_component.addItem('Diverging (Blue to Red)')
self.color_scheme_component.addItem('Shock')
self.color_scheme_component.setToolTip('The color scheme used by the render view')
self.color_scheme_component.currentIndexChanged[str].connect(self._colorSchemeSelected)
# add color scheme selector
self.contour_layout.addWidget(self.color_scheme_component)
self.controls_layout.addWidget(self.contour_groupbox)
self.controls_layout.addSpacing(10)
#
# View group
#
self.toggle_groupbox = QtGui.QGroupBox("View")
self.toggle_groupbox.setFlat(True)
self.toggle_groupbox.setSizePolicy(QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Fixed)
self.toggle_layout = QtGui.QHBoxLayout()
self.toggle_layout.setContentsMargins(0,0,0,0)
self.toggle_groupbox.setLayout(self.toggle_layout)
# Create the View Mesh toggle
self.draw_edges_checkbox = QtGui.QCheckBox("View Mesh")
self.draw_edges_checkbox.setToolTip('Show mesh elements')
self.draw_edges_checkbox.stateChanged[int].connect(self._drawEdgesChanged)
self.toggle_layout.addWidget(self.draw_edges_checkbox, alignment=QtCore.Qt.AlignHCenter)
self.toggle_layout.addStretch()
# Add a button for toggling the scalebar legend
self.hide_scalebar_checkbox = QtGui.QCheckBox("Scalebar")
self.hide_scalebar_checkbox.setToolTip('Toggle visibility of colorbar')
self.hide_scalebar_checkbox.setCheckState(QtCore.Qt.Checked)
self.hide_scalebar_checkbox.stateChanged[int].connect(self._hideScalebarChanged)
self.toggle_layout.addWidget(self.hide_scalebar_checkbox, alignment=QtCore.Qt.AlignHCenter)
self.toggle_layout.addStretch()
# Render view background selector
self.viewport_background = QtGui.QComboBox()
self.viewport_background.addItem('Gradient')
self.viewport_background.addItem('Black')
self.viewport_background.addItem('White')
self.viewport_background.setToolTip('Render view background')
self.viewport_background.currentIndexChanged[int].connect(self._renderViewBackgroundChanged)
self.toggle_layout.addWidget(self.viewport_background, alignment=QtCore.Qt.AlignRight)
self.controls_layout.addWidget(self.toggle_groupbox)
self.controls_layout.addSpacing(10)
#
# mesh display options displace/scale/clip
#
self.mesh_groupbox = QtGui.QGroupBox("Mesh")
self.mesh_groupbox.setFlat(True)
self.mesh_groupbox.setSizePolicy(QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Fixed)
self.mesh_layout = QtGui.QVBoxLayout()
self.mesh_layout.setContentsMargins(0,0,0,0)
self.mesh_groupbox.setLayout(self.mesh_layout)
self.displace_layout = QtGui.QHBoxLayout()
self.displace_layout.setContentsMargins(0,0,0,0)
self.displace_checkbox = QtGui.QCheckBox("Displace")
self.displace_checkbox.setChecked(True)
self.displace_checkbox.toggled[bool].connect(self._displaceToggled)
self.displace_magnitude_label = QtGui.QLabel("Multiplier: ")
self.displace_magnitude_text = QtGui.QDoubleSpinBox()
self.displace_magnitude_text.setValue(self.current_displacement_magnitude)
self.displace_magnitude_text.setMinimumWidth(10)
self.displace_magnitude_text.valueChanged.connect(self._displaceMagnitudeChanged)
self.displace_layout.addWidget(self.displace_checkbox)
self.displace_layout.addSpacing(10)
self.displace_layout.addStretch(0.1)
self.displace_layout.addWidget(self.displace_magnitude_label)
self.displace_layout.addWidget(self.displace_magnitude_text)
self.displace_layout.setStretchFactor(self.displace_magnitude_text, 1)
# add displace line
self.mesh_layout.addLayout(self.displace_layout)
# Scale line
self.scale_layout = QtGui.QHBoxLayout()
self.scale_layout.setSpacing(0)
self.scale_layout.setContentsMargins(0,0,0,0)
self.scale_checkbox = QtGui.QCheckBox("Scale")
self.scale_checkbox.setChecked(False)
self.scale_checkbox.toggled[bool].connect(self._scaleToggled)
self.scale_layout.addWidget(self.scale_checkbox)
self.scale_layout.addSpacing(10)
self.scale_layout.addStretch(0.2)
self.scale_x_label = QtGui.QLabel(" x:")
self.scale_x_text = QtGui.QDoubleSpinBox()
self.scale_x_text.setValue(self.current_scale_x_magnitude)
self.scale_x_text.setSingleStep(0.1)
self.scale_x_text.setMinimumWidth(10)
self.scale_layout.addWidget(self.scale_x_label)
self.scale_layout.addWidget(self.scale_x_text)
self.scale_layout.setStretchFactor(self.scale_x_text, 1)
self.scale_layout.addSpacing(5)
self.scale_y_label = QtGui.QLabel(" y:")
self.scale_y_text = QtGui.QDoubleSpinBox()
self.scale_y_text.setValue(self.current_scale_y_magnitude)
self.scale_y_text.setSingleStep(0.1)
self.scale_y_text.setMinimumWidth(10)
self.scale_layout.addWidget(self.scale_y_label)
self.scale_layout.addWidget(self.scale_y_text)
self.scale_layout.setStretchFactor(self.scale_y_text, 1)
self.scale_layout.addSpacing(5)
self.scale_z_label = QtGui.QLabel(" z:")
self.scale_z_text = QtGui.QDoubleSpinBox()
self.scale_z_text.setValue(self.current_scale_z_magnitude)
self.scale_z_text.setSingleStep(0.1)
self.scale_z_text.setMinimumWidth(10)
self.scale_layout.addWidget(self.scale_z_label)
self.scale_layout.addWidget(self.scale_z_text)
self.scale_layout.setStretchFactor(self.scale_z_text, 1)
# Connect signals and slots for scale spinners
self.scale_x_text.valueChanged.connect(self._scaleMagnitudeChanged)
self.scale_y_text.valueChanged.connect(self._scaleMagnitudeChanged)
self.scale_z_text.valueChanged.connect(self._scaleMagnitudeChanged)
# add scale line
self.mesh_layout.addLayout(self.scale_layout)
# Clip line
self.clip_layout = QtGui.QHBoxLayout()
self.clip_layout.setContentsMargins(0,0,0,0)
self.clip_checkbox = QtGui.QCheckBox("Clip")
self.clip_checkbox.setToolTip('Toggle clipping mode where the solution can be sliced open')
self.clip_checkbox.setChecked(False)
self.clip_checkbox.toggled[bool].connect(self._clippingToggled)
self.clip_layout.addWidget(self.clip_checkbox)
self.clip_layout.addStretch(0.1)
self.clip_plane_combobox = QtGui.QComboBox()
self.clip_plane_combobox.setToolTip('Direction of the normal for the clip plane')
self.clip_plane_combobox.addItem('x')
self.clip_plane_combobox.addItem('y')
self.clip_plane_combobox.addItem('z')
self.clip_plane_combobox.currentIndexChanged[str].connect(self._clipNormalChanged)
self.clip_layout.addWidget(self.clip_plane_combobox)
self.clip_plane_slider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.clip_plane_slider.setToolTip('Slide to change plane position')
self.clip_plane_slider.setRange(0, 100)
self.clip_plane_slider.setSliderPosition(50)
self.clip_plane_slider.sliderReleased.connect(self._clipSliderReleased)
self.clip_plane_slider.sliderMoved[int].connect(self._clipSliderMoved)
self.clip_layout.addWidget(self.clip_plane_slider, 1)
# add clip line
self.mesh_layout.addLayout(self.clip_layout)
# add mesh group
self.controls_layout.addWidget(self.mesh_groupbox)
#
# Open/Save/Reset button row
#
self.view_layout = QtGui.QHBoxLayout()
self.open_button = QtGui.QPushButton('Open')
self.open_button.setMaximumWidth(100)
self.open_button.setToolTip('Open an existing result')
self.open_button.clicked.connect(self._clickedOpen)
self.view_layout.addWidget(self.open_button, alignment=QtCore.Qt.AlignHCenter)
self.save_button = QtGui.QPushButton('Save')
self.save_button.setMaximumWidth(100)
self.save_button.setToolTip('Save the current view to a file')
self.save_button.clicked.connect(self._saveView)
self.view_layout.addWidget(self.save_button, alignment=QtCore.Qt.AlignHCenter)
self.view_layout.addStretch()
self.reset_button = QtGui.QPushButton('Reset')
self.reset_button.setMaximumWidth(100)
self.reset_button.setToolTip('Recenter the camera on the current result')
self.reset_button.clicked.connect(self._resetView)
self.view_layout.addWidget(self.reset_button, alignment=QtCore.Qt.AlignHCenter)
self.controls_layout.addLayout(self.view_layout)
#
# Time controls
#
self.beginning_button = QtGui.QToolButton()
self.beginning_button.setToolTip('Go to first timestep')
self.beginning_button.setIcon(QtGui.QIcon(pathname + '/resources/from_paraview/pqVcrFirst32.png'))
self.beginning_button.clicked.connect(self._beginningClicked)
self.back_button = QtGui.QToolButton()
self.back_button.setToolTip('Previous timestep')
self.back_button.setIcon(QtGui.QIcon(pathname + '/resources/from_paraview/pqVcrBack32.png'))
self.back_button.clicked.connect(self._backClicked)
self.play_button = QtGui.QToolButton()
self.play_button.setToolTip('Play through the currently computed timesteps')
self.play_button.setIcon(QtGui.QIcon(pathname + '/resources/from_paraview/pqVcrPlay32.png'))
self.play_button.clicked.connect(self._playClicked)
self.pause_button = QtGui.QToolButton()
self.pause_button.setToolTip('If playing this will pause playback')
self.pause_button.setDisabled(True)
self.pause_button.setIcon(QtGui.QIcon(pathname + '/resources/from_paraview/pqVcrPause32.png'))
self.pause_button.clicked.connect(self._pauseClicked)
self.forward_button = QtGui.QToolButton()
self.forward_button.setToolTip('Next timestep')
self.forward_button.setIcon(QtGui.QIcon(pathname + '/resources/from_paraview/pqVcrForward32.png'))
self.forward_button.clicked.connect(self._forwardClicked)
self.last_button = QtGui.QToolButton()
self.last_button.setToolTip('Go to last timestep')
self.last_button.setIcon(QtGui.QIcon(pathname + '/resources/from_paraview/pqVcrLast32.png'))
self.last_button.clicked.connect(self._lastClicked)
self.loop_button = QtGui.QToolButton()
self.loop_button.setToolTip('Toggle replaying all timesteps when the end is reached during playback. Note that as new timesteps finish they will automatically be picked up and added to the end of the sequence.')
self.loop_button.setCheckable(True)
self.loop_button.setIcon(QtGui.QIcon(pathname + '/resources/from_paraview/pqVcrLoop24.png'))
self.loop_button.toggled.connect(self._loopClicked)
self.currently_looping = False
self.time_slider_label = QtGui.QLabel("Timestep:")
self.time_slider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.time_slider.setToolTip('Slide to select a timestep to display')
self.time_slider.setMinimumWidth(50)
self.time_slider.valueChanged.connect(self._timeSliderChanged)
self.time_slider.sliderReleased.connect(self._timeSliderReleased)
self.time_slider_textbox = QtGui.QLineEdit()
self.time_slider_textbox.setToolTip('Enter a number and press Enter to go to that timestep')
self.time_slider_textbox.setMaximumWidth(40)
self.time_slider_textbox.setMinimumWidth(40)
self.time_slider_textbox.returnPressed.connect(self._sliderTextboxReturn)
self.time_layout = QtGui.QHBoxLayout()
self.time_layout.addWidget(self.beginning_button)
self.time_layout.addWidget(self.back_button)
self.time_layout.addWidget(self.play_button)
self.time_layout.addWidget(self.pause_button)
self.time_layout.addWidget(self.forward_button)
self.time_layout.addWidget(self.last_button)
self.time_layout.addWidget(self.loop_button)
self.time_layout.addWidget(self.time_slider_label, alignment=QtCore.Qt.AlignRight)
self.time_layout.addWidget(self.time_slider)
self.time_layout.addWidget(self.time_slider_textbox, alignment=QtCore.Qt.AlignLeft)
self.time_layout.addSpacing(10)
# add time controls in the right pane under the render view
self.right_layout.addLayout(self.time_layout)
def _updateControls(self):
self.old_contour = self.variable_contour.currentText()
self.variable_contour.clear()
self.has_displacements = False
for variable in self.exodus_result.current_nodal_variables:
if 'ObjectId' not in variable:
self.variable_contour.addItem(variable)
item_num = self.variable_contour.count()-1
self.variable_contour.setItemIcon(item_num,QtGui.QIcon(pathname + '/resources/from_paraview/pqNodalData16.png'))
self.variable_contour_is_nodal[item_num] = True
if 'disp' in variable:
self.has_displacements = True
for variable in self.exodus_result.current_elemental_variables:
if 'ObjectId' not in variable:
self.variable_contour.addItem(variable)
item_num = self.variable_contour.count()-1
self.variable_contour.setItemIcon(item_num,QtGui.QIcon(pathname + '/resources/from_paraview/pqElemData16.png'))
self.variable_contour_is_nodal[item_num] = False
if 'disp' in variable:
self.has_displacements = True
if self.has_displacements:
self.displace_checkbox.setDisabled(False)
self.block_view_model.clear()
for block in self.exodus_result.blocks:
block_display_name = str(block)
if block in self.exodus_result.block_to_name:
block_display_name += ' : ' + self.exodus_result.block_to_name[block]
item = QtGui.QStandardItem(str(block_display_name))
item.exodus_block = block
item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable)
item.setCheckState(QtCore.Qt.Checked)
self.block_view_model.appendRow(item)
# Try to restore back to the view of the variable we were looking at
found_index = self.variable_contour.findText(self.old_contour)
if found_index != -1:
self.variable_contour.setCurrentIndex(found_index)
else: # If this variable doesn't exist then we are probably running a new simulation... try to reset the camera
self._resetView()
self.time_slider.setMinimum(0)
self.time_slider.setMaximum(self.current_max_timestep)
##
# Updates the list of available output file names
def updateOutputControl(self):
# Get the current item selected
name = self.output_control.currentText()
# Clear the existing list
self.output_control.clear()
# Update the list block names and store the filename as data
data = self.input_file_widget.getOutputFileAndBlockNames();
if data != None:
for i in range(len(data[0])):
self.output_control.addItem(data[1][i], data[0][i])
# Restore the selected name, if it is available
idx = self.output_control.findText(name)
if idx != -1:
self.output_control.setCurrentIndex(idx)
##
# Executes when the user selects an item from the output selection dropdown box
def _outputChanged(self):
idx = self.output_control.currentIndex()
name = self.output_control.itemData(idx)
if hasattr(QtCore, 'QVariant') and isinstance(name, QtCore.QVariant):
name = str(name.toString())
self._openFile(name, False)
def setupLuts(self):
self.luts = []
# HSV (Blue to REd) Default
lut = vtk.vtkLookupTable()
lut.SetHueRange(0.667, 0.0)
lut.SetNumberOfColors(256)
lut.Build()
self.luts.append(lut)
# Diverging (Cool to Warm) color scheme
ctf = vtk.vtkColorTransferFunction()
ctf.SetColorSpaceToDiverging()
ctf.AddRGBPoint(0.0, 0.230, 0.299, 0.754)
ctf.AddRGBPoint(1.0, 0.706, 0.016, 0.150)
cc = list()
for i in xrange(256):
cc.append(ctf.GetColor(float(i) / 255.0))
lut = vtk.vtkLookupTable()
lut.SetNumberOfColors(256)
for i, item in enumerate(cc):
lut.SetTableValue(i, item[0], item[1], item[2], 1.0)
lut.Build()
self.luts.append(lut)
# Shock
ctf = vtk.vtkColorTransferFunction()
min = 93698.4
max = 230532
ctf.AddRGBPoint(self._normalize(min, max, 93698.4), 0.0, 0.0, 1.0)
ctf.AddRGBPoint(self._normalize(min, max, 115592.0), 0.0, 0.905882, 1.0)
ctf.AddRGBPoint(self._normalize(min, max, 138853.0), 0.0941176, 0.733333, 0.027451)
ctf.AddRGBPoint(self._normalize(min, max, 159378.0), 1.0, 0.913725, 0.00784314)
ctf.AddRGBPoint(self._normalize(min, max, 181272.0), 1.0, 0.180392, 0.239216)
ctf.AddRGBPoint(self._normalize(min, max, 203165.0), 1.0, 0.701961, 0.960784)
ctf.AddRGBPoint(self._normalize(min, max, 230532.0), 1.0, 1.0, 1.0)
cc = list()
for i in xrange(256):
cc.append(ctf.GetColor(float(i) / 255.0))
lut = vtk.vtkLookupTable()
lut.SetNumberOfColors(256)
for i, item in enumerate(cc):
lut.SetTableValue(i, item[0], item[1], item[2], 1.0)
lut.Build()
self.luts.append(lut)
self.current_lut = self.luts[0]
def _normalize(self, min, max, value):
return (value - min) / (max - min)
def _blockViewItemChanged(self, item):
if item.checkState() == QtCore.Qt.Checked:
self.exodus_result.showBlock(item.exodus_block)
self.exodus_result.reader.Update()
self.exodus_result.geom.Update()
self.current_bounds = self.exodus_result.actor.GetBounds()
self._updateContours()
else:
self.exodus_result.hideBlock(item.exodus_block)
self.exodus_result.reader.Update()
self.exodus_result.geom.Update()
self.current_bounds = self.exodus_result.actor.GetBounds()
self._updateContours()
def _displaceToggled(self, value):
self._timeSliderReleased()
def _scaleToggled(self, value):
self._timeSliderReleased()
def _displaceMagnitudeChanged(self):
self.current_displacement_magnitude = self.displace_magnitude_text.value()
self._timeSliderReleased()
def _scaleMagnitudeChanged(self):
self.current_scale_x_magnitude = self.scale_x_text.value()
self.current_scale_y_magnitude = self.scale_y_text.value()
self.current_scale_z_magnitude = self.scale_z_text.value()
self._timeSliderReleased()
def _drawEdgesChanged(self, value):
if value == QtCore.Qt.Checked:
self.exodus_result.actor.GetProperty().EdgeVisibilityOn()
self.exodus_result.clip_actor.GetProperty().EdgeVisibilityOn()
else:
self.exodus_result.actor.GetProperty().EdgeVisibilityOff()
self.exodus_result.clip_actor.GetProperty().EdgeVisibilityOff()
self.vtkwidget.repaint()
##
# A method for toggling visiability of the scale bar legend, it is controlled
# by the 'Hide Scalebar' toggle on the Visualize tab
# @param value The interger value from the checkbox (1=checked)
def _hideScalebarChanged(self, value):
# Show when checked
if value == QtCore.Qt.Checked:
self.exodus_result.scalar_bar.VisibilityOn()
# Hide when unchecked
else:
self.exodus_result.scalar_bar.VisibilityOff()
# Update the GUI
self.vtkwidget.repaint()
##
# Set render view text to print style
def _setRenderViewTextBlack(self):
if self.exodus_result is not None:
propT = self.exodus_result.scalar_bar.GetTitleTextProperty()
propT.SetColor(0,0,0)
propT.ShadowOff()
self.exodus_result.scalar_bar.SetTitleTextProperty(propT)
propL = self.exodus_result.scalar_bar.GetLabelTextProperty()
propL.SetColor(0,0,0)
propL.ShadowOff()
self.exodus_result.scalar_bar.SetLabelTextProperty(propL)
##
# Set render view text to white/shadow style
def _setRenderViewTextWhite(self):
if self.exodus_result is not None:
propT = self.exodus_result.scalar_bar.GetTitleTextProperty()
propT.SetColor(1,1,1)
propT.ShadowOn()
self.exodus_result.scalar_bar.SetTitleTextProperty(propT)
propL = self.exodus_result.scalar_bar.GetLabelTextProperty()
propL.SetColor(1,1,1)
propL.ShadowOn()
self.exodus_result.scalar_bar.SetLabelTextProperty(propL)
##
# A method for selecting the render view background, it is controlled
# by the viewport_background combobox on the Visualize tab
# @param value The combobox value (0=Gradient, 1=Black, 2=White)
def _renderViewBackgroundChanged(self, value):
# Gradient
if value == 0:
self.renderer.SetBackground(0,0,0)
self.renderer.SetBackground(0.2,0.2,0.2)
self.renderer.SetBackground2(1,1,1)
self.renderer.SetGradientBackground(1)
self._setRenderViewTextWhite()
# Black
elif value == 1:
self.renderer.SetBackground(0,0,0)
self.renderer.SetGradientBackground(0)
self._setRenderViewTextWhite()
# White
elif value == 2:
self.renderer.SetBackground(1,1,1)
self.renderer.SetGradientBackground(0)
self._setRenderViewTextBlack()
# Update thew GUI
self.vtkwidget.repaint()
def _fillComponentCombo(self, variable_name, components):
self.variable_component.clear()
self.variable_component.addItem('Magnitude')
num_components = components[variable_name]
if num_components > 1 and self.exodus_result.current_dim >= 2:
self.variable_component.setDisabled(False)
self.variable_component.addItem('X')
self.variable_component.addItem('Y')
else:
self.variable_component.setDisabled(True)
if num_components > 1 and self.exodus_result.current_dim == 3:
self.variable_component.addItem('Z')
def _contourVariableSelected(self, index):
value_string = str(self.variable_contour.itemText(index))
self.current_variable = value_string
self.current_variable_index = index
if index in self.variable_contour_is_nodal:
self.current_variable_is_nodal = self.variable_contour_is_nodal[index]
else:
self.current_variable_is_nodal = True
self.currently_restoring_contours = True
# Maybe results haven't been written yet...
if not self.exodus_result.data.GetPointData().GetVectors(value_string) and not self.exodus_result.data.GetCellData().GetVectors(value_string):
return
if value_string in self.exodus_result.current_nodal_components:
self._fillComponentCombo(value_string, self.exodus_result.current_nodal_components)
elif value_string in self.exodus_result.current_elemental_components:
self._fillComponentCombo(value_string, self.exodus_result.current_elemental_components)
if self.current_variable not in self.contour_choices:
self.contour_choices[self.current_variable] = ContourChoices()
self.contour_choices[self.current_variable].restore(self)
self.currently_restoring_contours = False
def _variableComponentSelected(self, value):
value_string = str(value)
self.current_component = value_string
if value_string == 'Magnitude':
self.component_index = -1
elif value_string == 'X':
self.component_index = 0
elif value_string == 'Y':
self.component_index = 1
elif value_string == 'Z':
self.component_index = 2
self._updateContours()
def _updateContours(self):
self.exodus_result.setColorScheme(self.current_lut)
if self.component_index == -1:
self.exodus_result.lut.SetVectorModeToMagnitude()
elif self.component_index == 0:
self.exodus_result.lut.SetVectorModeToComponent()
self.exodus_result.lut.SetVectorComponent(0)
elif self.component_index == 1:
self.exodus_result.lut.SetVectorModeToComponent()
self.exodus_result.lut.SetVectorComponent(1)
elif self.component_index == 2:
self.exodus_result.lut.SetVectorModeToComponent()
self.exodus_result.lut.SetVectorComponent(2)
if self.clip_checkbox.isChecked():
self.exodus_result.clipper.Modified()
self.exodus_result.clipper.Update()
self.exodus_result.clip_geom.Update()
self.exodus_result.clip_mapper.Update()
data = None
if self.current_variable_is_nodal and self.current_variable in self.exodus_result.current_nodal_components:
data = self.exodus_result.data.GetPointData().GetVectors(self.current_variable)
self.exodus_result.mapper.SetScalarModeToUsePointFieldData()
self.exodus_result.clip_mapper.SetScalarModeToUsePointFieldData()
elif self.current_variable in self.exodus_result.current_elemental_components:
data = self.exodus_result.data.GetCellData().GetVectors(self.current_variable)
self.exodus_result.mapper.SetScalarModeToUseCellFieldData()
self.exodus_result.clip_mapper.SetScalarModeToUseCellFieldData()
self.exodus_result.mapper.SelectColorArray(self.current_variable)
self.exodus_result.clip_mapper.SelectColorArray(self.current_variable)
if data:
self.current_range = data.GetRange(self.component_index)
if self.min_current_radio.isChecked():
self.min_custom_text.setText(str(self.current_range[0]))
self.min_custom_text.setCursorPosition(0)
if self.max_current_radio.isChecked():
self.max_custom_text.setText(str(self.current_range[1]))
self.max_custom_text.setCursorPosition(0)
if self.min_custom_radio.isChecked():
self.min_custom_text.setDisabled(False)
else:
self.min_custom_text.setDisabled(True)
if self.max_custom_radio.isChecked():
self.max_custom_text.setDisabled(False)
else:
self.max_custom_text.setDisabled(True)
min = 0.0
try:
min = float(self.min_custom_text.displayText())
except:
min = 0.0
max = 0.0
try:
max = float(self.max_custom_text.displayText())
except:
max = 0.0
if self.current_variable not in self.contour_choices:
self.contour_choices[self.current_variable] = ContourChoices()
if not self.currently_restoring_contours:
self.contour_choices[self.current_variable].save(self)
the_range = (min, max)
if min <= max:
self.exodus_result.mapper.SetScalarRange(the_range)
self.exodus_result.clip_mapper.SetScalarRange(the_range)
self.exodus_result.scalar_bar.SetTitle(self.current_variable)
self.renderer.AddActor2D(self.exodus_result.scalar_bar)
self.vtkwidget.repaint()
def _colorSchemeSelected(self, value):
self.current_lut = self.luts[self.color_scheme_component.currentIndex()]
self._updateContours()
def _openFile(self, file_name, reset=True):
self._clear()
self.base_stamp = os.path.getmtime(file_name)
self.file_name = str(file_name)
self.new_stuff_to_read = True
self._updateData()
self._updateData() # Call it again to read any adaptive results
self._lastClicked() # Go to the last timestep
if reset:
self._resetView() # Reset the camera
def _clickedOpen(self):
file_name = QtGui.QFileDialog.getOpenFileName(self, "Open Result", "~/", "Input Files (*.e)")
if isinstance(file_name, QtCore.QString):
file_name = str(file_name)
if not isinstance(file_name, basestring): # This happens when using pyside
file_name = file_name[0]
if file_name:
self._openFile(file_name)
def _resetView(self):
self.renderer.ResetCamera()
fp = self.renderer.GetActiveCamera().GetFocalPoint()
p = self.renderer.GetActiveCamera().GetPosition()
dist = math.sqrt( (p[0]-fp[0])**2 + (p[1]-fp[1])**2 + (p[2]-fp[2])**2 )
self.renderer.GetActiveCamera().SetPosition(fp[0], fp[1], fp[2]+dist)
self.renderer.GetActiveCamera().SetViewUp(0.0, 1.0, 0.0)
self.vtkwidget.repaint()
def _saveView(self):
file_name = QtGui.QFileDialog.getSaveFileName(self, "Image File Name", "~/", "Image Files (*.png)")
if isinstance(file_name, QtCore.QString):
file_name = str(file_name)
if not isinstance(file_name, basestring): # This happens when using pyside
file_name = file_name[0]
if file_name != '':
w2i = vtk.vtkWindowToImageFilter()
writer = vtk.vtkPNGWriter()
w2i.SetInput(self.vtkwidget.GetRenderWindow())
w2i.Update()
writer.SetInputConnection(w2i.GetOutputPort())
writer.SetFileName(str(file_name))
self.vtkwidget.GetRenderWindow().Render()
writer.Write()
def _automaticUpdateChanged(self, value):
if value == QtCore.Qt.Checked:
self.automatically_update = True
else:
self.automatically_update = False
def _beginningClicked(self):
self.time_slider.setSliderPosition(0)
self._timeSliderReleased()
def _backClicked(self):
self.time_slider.setSliderPosition(self.time_slider.sliderPosition()-1)
self._timeSliderReleased()
def _playClicked(self):
self.play_button.setDisabled(True)
self.pause_button.setDisabled(False)
self.currently_playing = True
first = True
while((first or self.currently_looping) and self.currently_playing):
first = False
# If the slider is at the end then start over
self.qt_app.processEvents()
time.sleep(0.02)
self.qt_app.processEvents()
if self.time_slider.sliderPosition() == self.time_slider.maximum():
self.time_slider.setSliderPosition(0)
while self.time_slider.sliderPosition() < self.time_slider.maximum():
self.time_slider.setSliderPosition(self.time_slider.sliderPosition()+1)
self.qt_app.processEvents()
self._timeSliderReleased()
time.sleep(0.02)
self.qt_app.processEvents()
if not self.currently_playing:
break
self.play_button.setDisabled(False)
self.pause_button.setDisabled(True)
def _pauseClicked(self):
self.play_button.setDisabled(False)
self.pause_button.setDisabled(True)
self.currently_playing = False
def _forwardClicked(self):
self.time_slider.setSliderPosition(self.time_slider.sliderPosition()+1)
self._timeSliderReleased()
def _lastClicked(self):
self.time_slider.setSliderPosition(self.time_slider.maximum())
self._timeSliderReleased()
def _loopClicked(self, state):
if state:
self.currently_looping = True
else:
self.currently_looping = False
def _timeSliderChanged(self):
self.time_slider_textbox.setText(str(self.time_slider.sliderPosition()))
def _timeSliderReleased(self):
textbox_string = self.time_slider_textbox.text()
if textbox_string == '':
textbox_string = str(self.exodus_result.min_timestep)
if int(textbox_string) in self.timestep_to_exodus_result:
for actor in self.exodus_result.current_actors:
self.renderer.RemoveActor(actor)
self.exodus_result = self.timestep_to_exodus_result[int(textbox_string)]
if self.clip_checkbox.isChecked():
self.renderer.AddActor(self.exodus_result.clip_actor)
if self.draw_edges_checkbox.checkState() == QtCore.Qt.Checked:
self.exodus_result.clip_actor.GetProperty().EdgeVisibilityOn()
else:
self.exodus_result.clip_actor.GetProperty().EdgeVisibilityOff()
else:
self.renderer.AddActor(self.exodus_result.actor)
if self.draw_edges_checkbox.checkState() == QtCore.Qt.Checked:
self.exodus_result.actor.GetProperty().EdgeVisibilityOn()
else:
self.exodus_result.actor.GetProperty().EdgeVisibilityOff()
num_block_view_items = self.block_view_model.rowCount()
for i in xrange(num_block_view_items):
item = self.block_view_model.item(i)
if item.checkState() == QtCore.Qt.Checked:
self.exodus_result.showBlock(item.exodus_block)
else:
self.exodus_result.hideBlock(item.exodus_block)
if self.has_displacements and self.displace_checkbox.isChecked():
self.exodus_result.reader.SetApplyDisplacements(1)
self.exodus_result.reader.SetDisplacementMagnitude(float(self.current_displacement_magnitude))
else:
self.exodus_result.reader.SetApplyDisplacements(0)
if self.scale_checkbox.isChecked():
self.exodus_result.actor.SetScale(self.current_scale_x_magnitude, self.current_scale_y_magnitude, self.current_scale_z_magnitude)
else:
self.exodus_result.actor.SetScale(1.0, 1.0, 1.0)
if self.exodus_results and self.exodus_result.reader:
self.exodus_result.reader.SetTimeStep(self.timestep_to_timestep[int(textbox_string)])
self.exodus_result.reader.Update()
self.exodus_result.geom.Update()
self.current_bounds = self.exodus_result.actor.GetBounds()
self._updateContours()
def _sliderTextboxReturn(self):
self.time_slider.setSliderPosition(int(self.time_slider_textbox.text()))
self._timeSliderReleased()
def _associateResultsWithTimesteps(self):
self.timestep_to_exodus_result = {}
self.timestep_to_timestep = {}
self.current_max_timestep = -1
for result in self.exodus_results:
if vtk.VTK_MAJOR_VERSION <= 5:
result.reader.UpdateTimeInformation()
else:
result.reader.UpdateInformation()
min = result.reader.GetTimeStepRange()[0]
max = result.reader.GetTimeStepRange()[1]
for timestep in xrange(min, max+1):
self.current_max_timestep += 1
self.timestep_to_exodus_result[self.current_max_timestep] = result
self.timestep_to_timestep[self.current_max_timestep] = timestep
def _updateData(self):
# Check to see if there are new exodus files with adapted timesteps in them.
if self.file_name and self.exodus_result:
for file_name in sorted(glob.glob(self.file_name + '-s*')):
file_stamp = os.path.getmtime(file_name)
if int(file_stamp) >= int(self.base_stamp) and int(file_stamp) <= int(time.time() - 1) and file_name not in self.file_names:
self.file_names.append(file_name)
exodus_result = ExodusResult(self, self.plane)
exodus_result.setFileName(file_name, self.current_lut)
self.exodus_results.append(exodus_result)
self.new_stuff_to_read = True
if not self.exodus_result:
# If the file_name is not set in the object, set if from the dropdown selection, otherwise use the stored name
if not self.file_name: # Might have been set by opening a file or from drop-down
idx = self.output_control.currentIndex()
file_name = self.output_control.itemData(idx)
if hasattr(QtCore, 'QVariant') and isinstance(file_name, QtCore.QVariant):
file_name = str(file_name.toString())
else:
file_name = self.file_name
if os.path.exists(file_name):
file_stamp = os.path.getmtime(file_name)
if int(file_stamp) >= int(self.base_stamp) and int(file_stamp) <= int(time.time() - 1) and file_name not in self.file_names:
self.file_name = file_name
self.exodus_result = ExodusResult(self, self.plane)
self.exodus_result.setFileName(file_name, self.current_lut)
self.exodus_results.append(self.exodus_result)
self.current_max_timestep = self.exodus_result.max_timestep
self.renderer.AddActor(self.exodus_result.actor)
self._drawEdgesChanged(self.draw_edges_checkbox.checkState())
if self.first:
self.first = False
self.renderer.ResetCamera()
# Avoid z-buffer fighting
vtk.vtkPolyDataMapper().SetResolveCoincidentTopologyToPolygonOffset()
if self.clip_checkbox.isChecked():
_clippingToggled(True)
self.vtkwidget.repaint()
self._updateControls()
self.time_slider.setSliderPosition(self.current_max_timestep)
if self.new_stuff_to_read and self.exodus_result and self.automatically_update:
self._associateResultsWithTimesteps()
# self.exodus_result.reader.UpdateTimeInformation()
# range = self.exodus_result.reader.GetTimeStepRange()
# self.exodus_result.min_timestep = range[0]
# self.exodus_result.max_timestep = range[1]
self.time_slider.setMinimum(0)
# Only automatically move forward if they're on the current step
if self.time_slider.sliderPosition() == self.time_slider.maximum():
self.time_slider.setMaximum(self.current_max_timestep)
self.time_slider.setSliderPosition(self.current_max_timestep)
self._timeSliderReleased()
if self.clip_checkbox.isChecked():
self._clipSliderReleased()
self.vtkwidget.repaint()
else:
self.time_slider.setMaximum(self.current_max_timestep)
self.new_stuff_to_read = False
def _timestepBegin(self):
self.new_stuff_to_read = True
def _timestepEnd(self):
pass
def _clear(self):
self.application.addExodusResultActors(self.renderer)
self.file_name = None
self.file_names = []
if not self.exodus_result:
return
for actor in self.exodus_result.current_actors:
self.renderer.RemoveActor(actor)
del self.exodus_result.current_actors[:]
self.exodus_result = None
self.exodus_results = []
self.timestep_to_exodus_result = {}
def _runStarted(self):
# Set the base time
self.base_stamp = time.time()
self._clear()
self.timer.start()
def _finalRead(self):
self.new_stuff_to_read = True # Set this to true so we get one more update
# Do it twice in case of adapted results
self._updateData()
self._updateData()
def _runStopped(self):
self.timer.stop()
self.run_stopped_timer = QtCore.QTimer()
self.run_stopped_timer.setInterval(1000) # Wait a second before updating the plots one last time
self.run_stopped_timer.setSingleShot(True)
self.run_stopped_timer.timeout.connect(self._finalRead)
self.run_stopped_timer.start()
def _clippingToggled(self, value):
if value:
self.renderer.RemoveActor(self.exodus_result.current_actor)
self.renderer.AddActor(self.exodus_result.clip_actor)
self.exodus_result.current_actor = self.exodus_result.clip_actor
self.clip_plane_slider.setSliderPosition(50)
self._clipSliderMoved(50)
self._clipSliderReleased()
else:
self.renderer.RemoveActor(self.exodus_result.current_actor)
self.renderer.AddActor(self.exodus_result.actor)
self.exodus_result.current_actor = self.exodus_result.actor
self.vtkwidget.repaint()
def _clipNormalChanged(self, value):
self.plane.SetOrigin(self.current_bounds[0],
self.current_bounds[2],
self.current_bounds[4])
if value == 'x':
self.plane.SetNormal(1, 0, 0)
elif value == 'y':
self.plane.SetNormal(0, 1, 0)
else:
self.plane.SetNormal(0, 0, 1)
self.clip_plane_slider.setSliderPosition(50)
self._clipSliderMoved(50)
self.vtkwidget.repaint()
def _clipSliderReleased(self):
self._updateContours()
self.vtkwidget.repaint()
def _clipSliderMoved(self, value):
direction = str(self.clip_plane_combobox.currentText())
min = 0
max = 0
if direction == 'x':
min = self.current_bounds[0]
max = self.current_bounds[1]
elif direction == 'y':
min = self.current_bounds[2]
max = self.current_bounds[3]
elif direction == 'z':
min = self.current_bounds[4]
max = self.current_bounds[5]
step_size = (max - min)/100.0
steps = value
distance = float(steps)*step_size
position = min + distance
old = self.plane.GetOrigin()
self.plane.SetOrigin(position if direction == 'x' else old[0],
position if direction == 'y' else old[1],
position if direction == 'z' else old[2])
self._updateContours()
self.vtkwidget.repaint()
|
katyhuff/moose
|
gui/vtk/ExodusResultRenderWidget.py
|
Python
|
lgpl-2.1
| 50,695
|
[
"VTK"
] |
a37c17114577108c0b4832cc0d407613b4ebe6545c3d81948f7bfe9cf2ecf23b
|
# Copyright (C) 2002 Greg Landrum and Rational Discovery LLC
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""piddleQt
This module implements the PIDDLE/Sping API for a Qt canvas
Bits have been shamelessly cobbled from piddleSVG.py
Greg Landrum (Landrum@RationalDiscovery.com) 29 Octover, 2002
"""
"""
Functionality implemented:
x drawLine
x drawPolygon
x drawString
x drawImage
Known problems:
"""
from rdkit.sping import pid
from qt import *
from qtcanvas import *
from math import *
import copy
def _ColorToQt(color):
""" convenience function for converting a sping.pid color to a Qt color
"""
if color == pid.transparent:
return None
else:
return QColor(int(color.red * 255), int(color.green * 255), int(color.blue * 255))
class QCanvasRotText(QCanvasText):
""" used to draw (UGLY) rotated text
"""
def __init__(self, txt, canvas, angle=0):
QCanvasText.__init__(self, txt, canvas)
self._angle = angle
def draw(self, qP):
qP.save()
x = self.x()
y = self.y()
theta = -self._angle
qP.rotate(theta)
qP.translate(-x, -y)
thetaR = theta * pi / 180.
newX = cos(-thetaR) * x - sin(-thetaR) * y
newY = sin(-thetaR) * x + cos(-thetaR) * y
qP.translate(newX, newY)
QCanvasText.draw(self, qP)
qP.restore()
class QtCanvas(pid.Canvas):
def __init__(self, destCanvas, size=(300, 300), name='QtCanvas'):
self.size = size
pid.Canvas.__init__(self, size, name)
self._canvas = destCanvas
self._brush = QBrush()
self._pen = QPen()
#self._font = QFont()
self._font = QApplication.font()
self.objs = []
self._initOutput()
self.nObjs = 0
def _initOutput(self):
for obj in self.objs:
if type(obj) == tuple:
obj[0].hide()
else:
obj.hide()
self.objs = []
self.nObjs = 0
def _adjustFont(self, font):
if font.face:
self._font.setFamily(font.face)
self._font.setBold(font.bold)
self._font.setItalic(font.italic)
self._font.setPointSize(font.size)
self._font.setUnderline(font.underline)
# public functions
def clear(self):
self._initOutput()
def flush(self):
self._canvas.update()
def save(self, file=None, format=None):
self._canvas.update()
#------------- drawing methods --------------
def drawLine(self, x1, y1, x2, y2, color=None, width=None, dash=None, **kwargs):
"Draw a straight line between x1,y1 and x2,y2."
# set color...
if color:
if color == pid.transparent:
return
elif self.defaultLineColor == pid.transparent:
return
else:
color = self.defaultLineColor
qColor = _ColorToQt(color)
if width:
w = width
else:
w = self.defaultLineWidth
self._pen.setColor(qColor)
self._pen.setWidth(int(w))
if dash is not None:
self._pen.setStyle(Qt.DashLine)
else:
self._pen.setStyle(Qt.SolidLine)
l = QCanvasLine(self._canvas)
l.setPen(self._pen)
l.setPoints(x1, y1, x2, y2)
l.setVisible(1)
l.setZ(self.nObjs)
if dash is not None:
self._pen.setStyle(Qt.SolidLine)
self.nObjs += 1
self.objs.append(l)
def drawPolygon(self, pointlist, edgeColor=None, edgeWidth=None, fillColor=pid.transparent,
closed=0, dash=None, **kwargs):
"""drawPolygon(pointlist) -- draws a polygon
pointlist: a list of (x,y) tuples defining vertices
"""
pts = []
for point in pointlist:
pts += list(point)
ptArr = QPointArray()
ptArr.setPoints(pts)
# set color for fill...
filling = 0
if fillColor:
if fillColor != pid.transparent:
filling = 1
qColor = _ColorToQt(fillColor)
self._brush.setColor(qColor)
if filling:
self._brush.setStyle(Qt.SolidPattern)
else:
self._brush.setStyle(Qt.NoBrush)
# set color for edge...
if not edgeColor:
edgeColor = self.defaultLineColor
qColor = _ColorToQt(edgeColor)
if qColor:
self._pen.setColor(qColor)
# set edge width...
if edgeWidth is None:
edgeWidth = self.defaultLineWidth
self._pen.setWidth(edgeWidth)
self._pen.setJoinStyle(Qt.RoundJoin)
if dash is not None:
self._pen.setStyle(Qt.DashLine)
else:
self._pen.setStyle(Qt.SolidLine)
poly = QCanvasPolygon(self._canvas)
poly.setPen(self._pen)
poly.setBrush(self._brush)
poly.setPoints(ptArr)
poly.setVisible(1)
poly.setZ(self.nObjs)
self.nObjs += 1
self.objs.append(poly)
# qt is moronic and doesn't draw the outlines of polygons
if edgeColor != pid.transparent:
for i in range(len(pointlist) - 1):
l = QCanvasLine(self._canvas)
l.setPoints(pointlist[i][0], pointlist[i][1], pointlist[i + 1][0], pointlist[i + 1][1])
l.setPen(self._pen)
l.setVisible(1)
l.setZ(self.nObjs)
self.objs.append(l)
if closed:
l = QCanvasLine(self._canvas)
l.setPoints(pointlist[0][0], pointlist[0][1], pointlist[-1][0], pointlist[-1][1])
l.setPen(self._pen)
l.setVisible(1)
l.setZ(self.nObjs)
self.objs.append(l)
if dash is not None:
self._pen.setStyle(Qt.SolidLine)
self.nObjs += 1
def drawString(self, s, x, y, font=None, color=None, angle=0, **kwargs):
# set color...
if color:
if color == pid.transparent:
return
elif self.defaultLineColor == pid.transparent:
return
else:
color = self.defaultLineColor
if font is None:
font = self.defaultFont
qColor = _ColorToQt(color)
if font is not None:
self._adjustFont(font)
if angle != 0:
txt = QCanvasRotText(s, self._canvas, angle=angle)
else:
txt = QCanvasText(s, self._canvas)
txt.setTextFlags(Qt.AlignLeft | Qt.AlignVCenter)
if self._font:
txt.setFont(self._font)
txt.setColor(qColor)
txt.setVisible(1)
txt.setX(x)
y -= font.size
txt.setY(y)
txt.setZ(self.nObjs)
self.nObjs += 1
self.objs.append(txt)
def drawImage(self, image, x1, y1, x2=None, y2=None, **kwargs):
"""
"""
from io import StringIO
sio = StringIO()
image.save(sio, format='png')
base = QPixmap()
base.loadFromData(sio.getvalue())
pm = QCanvasPixmap(base, QPoint(0, 0))
pma = QCanvasPixmapArray()
pma.setImage(0, pm)
img = QCanvasSprite(pma, self._canvas)
img.setVisible(1)
img.setX(x1)
img.setY(y1)
self.objs.append((img, base, pm, pma))
def stringWidth(self, s, font=None):
"Return the logical width of the string if it were drawn \
in the current font (defaults to self.font)."
if not font:
font = self.defaultFont
if font:
self._adjustFont(font)
t = QCanvasText(s, self._canvas)
t.setFont(self._font)
rect = t.boundingRect()
return rect.width()
def fontAscent(self, font=None):
if not font:
font = self.defaultFont
if font:
self._adjustFont(font)
t = QCanvasText('B', self._canvas)
t.setFont(self._font)
rect = t.boundingRect()
# FIX: this is a hack, but I can't immediately figure out how to solve the
# problem that the bounding rectangle includes the descent:
return 1.0 * rect.height()
def fontDescent(self, font=None):
if not font:
font = self.defaultFont
if font:
self._adjustFont(font)
t = QCanvasText('B', self._canvas)
t.setFont(self._font)
rect1 = t.boundingRect()
t = QCanvasText('y', self._canvas)
t.setFont(self._font)
rect2 = t.boundingRect()
return 1. * (rect2.height() - rect1.height())
def test(canvas):
#... for testing...
canvas.defaultLineColor = Color(0.7, 0.7, 1.0) # light blue
canvas.drawLines(map(lambda i: (i * 10, 0, i * 10, 300), range(30)))
canvas.drawLines(map(lambda i: (0, i * 10, 300, i * 10), range(30)))
canvas.defaultLineColor = black
canvas.drawLine(10, 200, 20, 190, color=red)
canvas.drawEllipse(130, 30, 200, 100, fillColor=yellow, edgeWidth=4)
canvas.drawArc(130, 30, 200, 100, 45, 50, fillColor=blue, edgeColor=navy, edgeWidth=4)
canvas.defaultLineWidth = 4
canvas.drawRoundRect(30, 30, 100, 100, fillColor=blue, edgeColor=maroon)
canvas.drawCurve(20, 20, 100, 50, 50, 100, 160, 160)
#canvas.drawString("This is a test!", 30,130, Font(face="times",size=16,bold=1),
# color=green, angle=-45)
#canvas.drawString("This is a test!", 30,130, color=red, angle=-45)
polypoints = [(160, 120), (130, 190), (210, 145), (110, 145), (190, 190)]
canvas.drawPolygon(polypoints, fillColor=lime, edgeColor=red, edgeWidth=3, closed=1)
canvas.drawRect(200, 200, 260, 260, edgeColor=yellow, edgeWidth=5)
canvas.drawLine(200, 260, 260, 260, color=green, width=5)
canvas.drawLine(260, 200, 260, 260, color=red, width=5)
canvas.flush()
def dashtest(canvas):
#... for testing...
canvas.defaultLineColor = Color(0.7, 0.7, 1.0) # light blue
canvas.drawLines(map(lambda i: (i * 10, 0, i * 10, 300), range(30)), dash=(3, 3))
canvas.drawLines(map(lambda i: (0, i * 10, 300, i * 10), range(30)), dash=(3, 3))
canvas.defaultLineColor = black
canvas.drawLine(10, 200, 20, 190, color=red, dash=(3, 3))
canvas.drawEllipse(130, 30, 200, 100, fillColor=yellow, edgeWidth=4, dash=(3, 3))
canvas.drawArc(130, 30, 200, 100, 45, 50, fillColor=blue, edgeColor=navy, edgeWidth=4, dash=(3,
3))
canvas.defaultLineWidth = 4
canvas.drawRoundRect(30, 30, 100, 100, fillColor=blue, edgeColor=maroon, dash=(3, 3))
canvas.drawCurve(20, 20, 100, 50, 50, 100, 160, 160, dash=(3, 3))
canvas.drawString("This is a test!", 30, 130, Font(face="times", size=16, bold=1), color=green,
angle=-45)
canvas.drawString("This is a test!", 30, 130, color=red, angle=-45)
polypoints = [(160, 120), (130, 190), (210, 145), (110, 145), (190, 190)]
canvas.drawPolygon(polypoints, fillColor=lime, edgeColor=red, edgeWidth=3, closed=1, dash=(3, 3))
canvas.drawRect(200, 200, 260, 260, edgeColor=yellow, edgeWidth=5, dash=(3, 3))
canvas.drawLine(200, 260, 260, 260, color=green, width=5, dash=(3, 3))
canvas.drawLine(260, 200, 260, 260, color=red, width=5, dash=(3, 3))
canvas.flush()
if __name__ == '__main__':
import sys
from rdkit.sping.pid import *
app = QApplication(sys.argv)
w = QCanvasView()
qCanv = QCanvas(300, 300)
w.setCanvas(qCanv)
canv = QtCanvas(qCanv)
dashtest(canv)
w.show()
w.adjustSize()
app.setMainWidget(w)
app.exec_loop()
|
ptosco/rdkit
|
rdkit/sping/Qt/pidQt.py
|
Python
|
bsd-3-clause
| 11,286
|
[
"RDKit"
] |
bca7440dcca96da5a2b75024a25b2ee71e906eccf6af62759364a4ba29563521
|
import os
import numpy as np
import time
import desispec
import astropy.io.fits as fits
from astropy.table import Table
from astropy.convolution import convolve, Box1DKernel
import json
import glob
import yaml
from pkg_resources import resource_filename
from scipy.optimize import minimize
from scipy.interpolate import RectBivariateSpline,interp1d
from scipy.signal import fftconvolve
from desiutil.log import get_logger
from desiutil.dust import dust_transmission
from desispec.io import findfile,read_frame,read_fiberflat,read_sky,read_flux_calibration,iotime,read_xytraceset
from desispec.io.spectra import Spectra
from desispec.calibfinder import findcalibfile
from astropy import constants as const
from desimodel.io import load_desiparams
from desimodel.io import load_platescale
from desimodel.fastfiberacceptance import FastFiberAcceptance
from desispec.fiberfluxcorr import flat_to_psf_flux_correction
class Config(object):
def __init__(self, cpath):
with open(cpath) as f:
d = yaml.safe_load(f)
for key in d:
setattr(self, key, d[key])
class gfa_template_ensemble(object):
'''
'''
def __init__(self):
log = get_logger()
log.info('Computing GFA passband TSNR template.')
self.tracer = 'GPBDARK'
# https://desi.lbl.gov/DocDB/cgi-bin/private/ShowDocument?docid=1297
self.pb_fname = resource_filename('desispec', 'data/gfa/gfa-mean-desi-1297.csv')
log.info('Retrieved {}.'.format(self.pb_fname))
# passband: wave [3000., 11000.]
self.pb = Table.read(self.pb_fname, names=['wave', 'trans'])
self.pb_interp = interp1d(self.pb['wave'], self.pb['trans'], kind='linear', copy=True, bounds_error=False, fill_value=0.0, assume_sorted=False)
self.wmin = 3600
self.wmax = 9824
self.wdelta = 0.8
self.wave = np.round(np.arange(self.wmin, self.wmax + self.wdelta, self.wdelta), 1)
self.cslice = {"b": slice(0, 2751), "r": slice(2700, 5026), "z": slice(4900, 7781)}
def compute(self):
log = get_logger()
self.ensemble_dflux = {}
for band in ['b', 'r', 'z']:
band_wave = self.wave[self.cslice[band]]
self.ensemble_dflux[band] = self.pb_interp(band_wave).reshape(1, len(band_wave))
log.info('GPB passband TSNR template computation done.')
def plot(self):
import pylab as pl
for band in ['b', 'r', 'z']:
band_wave = self.wave[self.cslice[band]]
pl.plot(band_wave, self.ensemble_dflux[band][0], label=band)
pl.xlabel('Wavelength [Angstroms]')
pl.ylabel('GPBDARK TSNR DFLUX TEMPLATE')
pl.show()
def write(self,dirname):
log = get_logger()
for tracer in ['gpbdark', 'gpbbright', 'gpbbackup']:
hdr = fits.Header()
hdr['TRACER'] = tracer
hdu_list = [fits.PrimaryHDU(header=hdr)]
for band in ['b', 'r', 'z']:
hdu_list.append(fits.ImageHDU(self.wave[self.cslice[band]], name='WAVE_{}'.format(band.upper())))
hdu_list.append(fits.ImageHDU(self.ensemble_dflux[band], name='DFLUX_{}'.format(band.upper())))
hdu_list = fits.HDUList(hdu_list)
hdu_list.writeto(dirname + '/tsnr-ensemble-{}.fits'.format(tracer), overwrite=True)
log.info('Successfully written GFA TSNR template to ' + dirname + '/tsnr-ensemble-{}.fits'.format(tracer))
log.info('Should now be copied to $DESIMODEL/data/tsnr/.')
class template_ensemble(object):
'''
Generate an ensemble of templates to sample tSNR for a range of points in
(z, m, OII, etc.) space.
If conditioned, uses deepfield redshifts and (currently r) magnitudes
to condition simulated templates.
'''
def read_config(self,filename) :
log = get_logger()
log.info("Reading config {}".format(filename))
self.config = Config(filename)
def __init__(self,tracer, config_filename=None) :
self.tracer = tracer.lower() # support ELG or elg, etc.
# AR/DK DESI spectra wavelengths
# TODO: where are brz extraction wavelengths defined? https://github.com/desihub/desispec/issues/1006.
self.wmin = 3600
self.wmax = 9824
self.wdelta = 0.8
self.wave = np.round(np.arange(self.wmin, self.wmax + self.wdelta, self.wdelta), 1)
self.cslice = {"b": slice(0, 2751), "r": slice(2700, 5026), "z": slice(4900, 7781)}
if config_filename is None :
config_filename = resource_filename('desispec', 'data/tsnr/tsnr-config-{}.yaml'.format(self.tracer))
self.read_config(config_filename)
self.seed = 1
def effmag(self,m1,m2) :
"""
returns an effective mag which is the magnitude that gives the same average flux^2
for the mag range specified [m1,m2] assuming a flat magnitude distribution
"""
return -0.5*2.5*np.log10( (10**(-0.8*m1)-10**(-0.8*m2))/(0.8*np.log(10.))/(m2-m1) )
def generate_templates(self, nmodel, redshifts=None,
mags=None,single_mag=True):
'''
Dedicated wrapper for desisim.templates.GALAXY.make_templates call,
stipulating templates in a redshift range suggested by the FDR.
Further, assume fluxes close to the expected (within ~0.5 mags.)
in the appropriate band.
Class init will write ensemble stack to disk at outdir, for a given
tracer [bgs, lrg, elg, qso], having generated nmodel templates.
Optionally, provide redshifts and mags. to condition appropriately
at cost of runtime.
'''
# Only import desisim if code is run, not at module import
# to minimize desispec -> desisim -> desispec dependency loop
import desisim.templates
log = get_logger()
# https://arxiv.org/pdf/1611.00036.pdf
#
normfilter_south=self.config.filter
zrange = (self.config.zlo, self.config.zhi)
# Variance normalized as for psf, so we need an additional linear
# flux loss so account for the relative factors.
psf_loss = -self.config.psf_fiberloss / 2.5
psf_loss = 10.**psf_loss
rel_loss = -(self.config.wgt_fiberloss - self.config.psf_fiberloss) / 2.5
rel_loss = 10.**rel_loss
magrange = (self.config.med_mag, self.config.limit_mag)
log.info('{} nmodel: {:d}'.format(self.tracer, nmodel))
log.info('{} filter: {}'.format(self.tracer, self.config.filter))
log.info('{} zrange: {} - {}'.format(self.tracer, zrange[0], zrange[1]))
# NOTE THE NORMALIZATION OF MAGNITUDES DOES NOT HAVE ANY EFFECT
# AT THE END OF THE DAY, BECAUSE BOTH TSNR2 VALUES AND EFFTIME
# ARE RECALIBRATED TO VALUES OBTAINED EARLY IN THE SURVEY
# (using table sv1-exposures.csv in py/desispec/data/tsnr/)
# TO AVOID ANY ARTIFICIAL DRIFT IN THE NORMALIZATION OF THOSE
# QUANTITIES.
# See the scale factor applied to the flux in the routine get_ensemble
# and the efftime normalization in the routine tsnr2_to_efftime
# Calibration vector assumes PSF mtype.
log.info('psf fiberloss: {:.3f}'.format(psf_loss))
log.info('Relative fiberloss to psf morphtype: {:.3f}'.format(rel_loss))
log.info('Generating templates ...')
if self.tracer == 'bgs':
# Cut on mag.
# https://github.com/desihub/desitarget/blob/dd353c6c8dd8b8737e45771ab903ac30584db6db/py/desitarget/cuts.py#L1312
magrange = (self.config.med_mag, self.config.limit_mag)
if single_mag and mags is None : mags=np.repeat( self.effmag(magrange[0],magrange[1]) , nmodel)
maker = desisim.templates.BGS(wave=self.wave, normfilter_south=normfilter_south)
flux, wave, meta, objmeta = maker.make_templates(nmodel=nmodel, redshift=redshifts, mag=mags, south=True, zrange=zrange, magrange=magrange, seed=self.seed)
# Additional factor rel. to psf.; TSNR put onto instrumental
# e/A given calibration vector that includes psf-like loss.
flux *= rel_loss
elif self.tracer == 'lrg':
# Cut on fib. mag. with desisim.templates setting FIBERFLUX to FLUX.
# https://github.com/desihub/desitarget/blob/dd353c6c8dd8b8737e45771ab903ac30584db6db/py/desitarget/cuts.py#L447
magrange = (self.config.med_fibmag, self.config.limit_fibmag)
# consistent with tsnr on disk
#magrange = (self.config.med_mag, self.config.limit_mag)
if single_mag and mags is None : mags=np.repeat( self.effmag(magrange[0],magrange[1]) , nmodel)
maker = desisim.templates.LRG(wave=self.wave, normfilter_south=normfilter_south)
flux, wave, meta, objmeta = maker.make_templates(nmodel=nmodel, redshift=redshifts, mag=mags, south=True, zrange=zrange, magrange=magrange, seed=self.seed)
# Take factor rel. to psf.; TSNR put onto instrumental
# e/A given calibration vector that includes psf-like loss.
# Note: Oppostive to other tracers as templates normalized to fibermag.
flux /= psf_loss
#flux *= rel_loss
elif self.tracer == 'elg':
# Cut on mag.
# https://github.com/desihub/desitarget/blob/dd353c6c8dd8b8737e45771ab903ac30584db6db/py/desitarget/cuts.py#L517
magrange = (self.config.med_mag, self.config.limit_mag)
if single_mag and mags is None : mags=np.repeat( self.effmag(magrange[0],magrange[1]) , nmodel)
maker = desisim.templates.ELG(wave=self.wave, normfilter_south=normfilter_south)
flux, wave, meta, objmeta = maker.make_templates(nmodel=nmodel, redshift=redshifts, mag=mags, south=True, zrange=zrange, magrange=magrange, seed=self.seed)
# Additional factor rel. to psf.; TSNR put onto instrumental
# e/A given calibration vector that includes psf-like loss.
flux *= rel_loss
elif self.tracer == 'qso':
# Cut on mag.
# https://github.com/desihub/desitarget/blob/dd353c6c8dd8b8737e45771ab903ac30584db6db/py/desitarget/cuts.py#L1422
magrange = (self.config.med_mag, self.config.limit_mag)
if single_mag and mags is None : mags=np.repeat( self.effmag(magrange[0],magrange[1]) , nmodel)
maker = desisim.templates.QSO(wave=self.wave, normfilter_south=normfilter_south)
flux, wave, meta, objmeta = maker.make_templates(nmodel=nmodel, redshift=redshifts, mag=mags, south=True, zrange=zrange, magrange=magrange, seed=self.seed)
# Additional factor rel. to psf.; TSNR put onto instrumental
# e/A given calibration vector that includes psf-like loss.
flux *= rel_loss
else:
raise ValueError('{} is not an available tracer.'.format(self.tracer))
if single_mag :
log.info('{} single effective mag: {}'.format(self.tracer, mags[0]))
else :
log.info('{} magrange: {} - {}'.format(self.tracer, magrange[0], magrange[1]))
log.info(" Done generating templates")
return wave, flux, meta, objmeta
def compute(self, nmodel=5, smooth=100., nz_table_filename=None, single_mag=True, convolve_to_nz=True):
"""
Compute a template ensemble for template S/N measurements (tSNR)
Options:
nmodel: number of template models to generate
smooth: smoothing scale for dF=<F - smooth(F)>
nz_table_filename: path to ASCII file with columns zmin,zmax,n
single_mag: generate all templates at same average magnitude to limit MC noise
convolve_to_nz: if True, each template dF^2 is convolved to match the n(z) (redshift distribution)
"""
log = get_logger()
if nz_table_filename is None :
nz_table_filename = os.environ['DESIMODEL'] + '/data/targets/nz_{}.dat'.format(self.tracer)
_, flux, meta, objmeta = self.generate_templates(nmodel=nmodel,single_mag=single_mag)
# keep a copy of the templates meta data
self.meta = meta
for k in objmeta.dtype.names :
if k not in self.meta.dtype.names :
self.meta[k] = objmeta[k]
self.ensemble_flux = {}
self.ensemble_dflux = {}
self.ensemble_meta = meta
self.ensemble_objmeta = objmeta
self.ensemble_dflux_stack = {}
self.smooth = smooth
##
smoothing = np.ceil(smooth / self.wdelta).astype(int)
log.info('Applying {:.3f} AA smoothing ({:d} pixels)'.format(smooth, smoothing))
dflux = flux.copy()
for i in range(flux.shape[0]):
sflux = convolve(flux[i], Box1DKernel(smoothing), boundary='extend')
dflux[i] -= sflux
log.info("Read N(z) in {}".format(nz_table_filename))
zmin, zmax, numz = np.loadtxt(nz_table_filename, unpack=True, usecols = (0,1,2))
# trim
b=max(0,np.where(numz>0)[0][0]-1)
e=min(numz.size,np.where(numz>0)[0][-1]+2)
zmin=zmin[b:e]
zmax=zmax[b:e]
numz=numz[b:e]
self.nz = Table()
self.nz["zmin"]=zmin
self.nz["zmax"]=zmax
self.nz["n"]=numz
zmid=(self.nz["zmin"]+self.nz["zmax"])/2.
if convolve_to_nz :
# redshifting is a simple shift in log(wave)
# so we directy convolve with fft in a linear log(wave) grid
# map to log scale for fast convolution with dndz
lwave = np.log(self.wave)
loggrid_lwave = np.linspace(lwave[0],lwave[-1],lwave.size) # linear grid of log(wave)
loggrid_dflux = np.zeros(dflux.shape)
loggrid_step = loggrid_lwave[1]-loggrid_lwave[0]
loggrid_lzmin = np.log(1+zmid[0])
number_z_bins=int((np.log(1+zmid[-1])-loggrid_lzmin)//loggrid_step)+1
if number_z_bins%2==0 : number_z_bins += 1 # need odd number
loggrid_lz=loggrid_lzmin+loggrid_step*np.arange(number_z_bins)
loggrid_nz=np.interp(loggrid_lz,np.log(1+zmid),numz)
# truncate at zrange
loggrid_nz[(loggrid_lz<np.log(1+self.config.zlo))|(loggrid_lz>np.log(1+self.config.zhi))] = 0.
loggrid_nz /= np.sum(loggrid_nz)
central_lz = loggrid_lz[loggrid_lz.size//2]
zconv_dflux = np.zeros(dflux.shape)
for i in range(dflux.shape[0]):
lwave_dflux = np.interp(loggrid_lwave,lwave,dflux[i])
zi=float(self.meta['REDSHIFT'][i])
kern = np.interp(loggrid_lz+(np.log(1+zi)-central_lz),loggrid_lz,loggrid_nz,left=0,right=0)
if np.sum(kern)==0 : continue
kern/=np.sum(kern)
lwave_convolved_dflux2 = fftconvolve(lwave_dflux**2,kern,mode="same")
zconv_dflux2 = np.interp(lwave,loggrid_lwave,lwave_convolved_dflux2,left=0,right=0)
zconv_dflux[i] = np.sqrt(zconv_dflux2*(zconv_dflux2>0))
dflux = zconv_dflux
# Generate template (d)fluxes for brz bands.
for band in ['b', 'r', 'z']:
band_wave = self.wave[self.cslice[band]]
in_band = np.isin(self.wave, band_wave)
self.ensemble_flux[band] = flux[:, in_band]
self.ensemble_dflux[band] = dflux[:, in_band]
zs = meta['REDSHIFT'].data
# Stack ensemble.
for band in ['b', 'r', 'z']:
self.ensemble_dflux_stack[band] = np.sqrt(np.average(self.ensemble_dflux[band]**2., axis=0).reshape(1, len(self.ensemble_dflux[band].T)))
def write(self,filename) :
log = get_logger()
hdr = fits.Header()
hdr['TRACER'] = self.tracer
hdr['FILTER'] = self.config.filter
hdr['ZLO'] = self.config.zlo
hdr['ZHI'] = self.config.zhi
hdr['MEDMAG'] = self.config.med_mag
hdr['LIMMAG'] = self.config.limit_mag
hdr['PSFFLOSS'] = self.config.psf_fiberloss
hdr['WGTFLOSS'] = self.config.wgt_fiberloss
hdr['SMOOTH'] = self.smooth
hdr['SEED'] = self.seed
hdu_list = [fits.PrimaryHDU(header=hdr)]
for band in ['b', 'r', 'z']:
hdu_list.append(fits.ImageHDU(self.wave[self.cslice[band]], name='WAVE_{}'.format(band.upper())))
hdu_list.append(fits.ImageHDU(self.ensemble_dflux_stack[band], name='DFLUX_{}'.format(band.upper())))
hdu_list = fits.HDUList(hdu_list)
self.meta.meta={"EXTNAME":"TEMPLATES_META"}
hdu_list.append(fits.convenience.table_to_hdu(self.meta))
self.nz.meta={"EXTNAME":"NZ"}
hdu_list.append(fits.convenience.table_to_hdu(self.nz))
hdu_list.writeto(filename, overwrite=True)
log.info('Successfully written to '+filename)
def get_ensemble(dirpath=None, bands=["b","r","z"], smooth=0):
'''
Function that takes a frame object and a bitmask and
returns ivar (and optionally mask) array(s) that have fibers with
offending bits in fibermap['FIBERSTATUS'] set to
0 in ivar and optionally flips a bit in mask.
Args:
dirpath: path to the dir. with ensemble dflux files. default is $DESIMODEL/data/tsnr
bands: bands to expect, typically [BRZ] - case ignored.
Options:
smooth: Further convolve the residual ensemble flux.
Returns:
Dictionary with keys labelling each tracer (bgs, lrg, etc.) for which each value
is a Spectra class instance with wave, flux for BRZ arms. Note flux is the high
frequency residual for the ensemble. See doc. 4723.
'''
t0 = time.time()
log=get_logger()
if dirpath is None :
dirpath = os.path.join(os.environ["DESIMODEL"],"data/tsnr")
paths = glob.glob(dirpath + '/tsnr-ensemble-*.fits')
wave = {}
flux = {}
ivar = {}
mask = {}
res = {}
ensembles = {}
for path in paths:
tracer = path.split('/')[-1].split('-')[2].replace('.fits','')
dat = fits.open(path)
if 'FLUXSCAL' in dat[0].header :
scale_factor = dat[0].header['FLUXSCAL']
log.info("for {} apply scale factor = {:4.3f}".format(path,scale_factor))
else :
scale_factor = 1.
for band in bands:
wave[band] = dat['WAVE_{}'.format(band.upper())].data
flux[band] = scale_factor*dat['DFLUX_{}'.format(band.upper())].data
ivar[band] = 1.e99 * np.ones_like(flux[band])
# 125: 100. A in 0.8 pixel.
if smooth > 0:
flux[band] = convolve(flux[band][0,:], Box1DKernel(smooth), boundary='extend')
flux[band] = flux[band].reshape(1, len(flux[band]))
ensembles[tracer] = Spectra(bands, wave, flux, ivar)
ensembles[tracer].meta = dat[0].header
duration = time.time() - t0
log=get_logger()
log.info(iotime.format('read',"tsnr ensemble", duration))
return ensembles
def read_nea(path):
'''
Read a master noise equivalent area [sq. pixel] file.
input:
path: path to a master nea file for a given camera, e.g. b0.
returns:
nea: 2D split object to be evaluated at (fiber, wavelength)
angperpix: 2D split object to be evaluated at (fiber, wavelength),
yielding angstrom per pixel.
'''
with fits.open(path, memmap=False) as fx:
wave=fx['WAVELENGTH'].data
angperpix=fx['ANGPERPIX'].data
nea=fx['NEA'].data
fiber = np.arange(len(nea))
nea = RectBivariateSpline(fiber, wave, nea)
angperpix = RectBivariateSpline(fiber, wave, angperpix)
return nea, angperpix
def fb_rdnoise(fibers, frame, tset):
'''
Approximate the readnoise for a given fiber (on a given camera) for the
wavelengths present in frame. wave.
input:
fibers: e.g. np.arange(500) to index fiber.
frame: frame instance for the given camera.
tset: xytraceset object with fiber traces coordinates.
returns:
rdnoise: (nfiber x nwave) array with the estimated readnosie. Same
units as OBSRDNA, e.g. ang per pix.
'''
ccdsizes = np.array(frame.meta['CCDSIZE'].split(',')).astype(float)
xtrans = ccdsizes[0] / 2.
ytrans = ccdsizes[1] / 2.
rdnoise = np.zeros_like(frame.flux)
amp_ids = desispec.preproc.get_amp_ids(frame.meta)
amp_sec = { amp : desispec.preproc.parse_sec_keyword(frame.meta['CCDSEC'+amp]) for amp in amp_ids }
amp_rdnoise = { amp : frame.meta['OBSRDN'+amp] for amp in amp_ids }
twave=np.linspace(tset.wavemin,tset.wavemax,20) # precision better than 0.3 pixel with 20 nodes
for ifiber in fibers:
x = tset.x_vs_wave(fiber=ifiber, wavelength=frame.wave)
y = tset.y_vs_wave(fiber=ifiber, wavelength=frame.wave)
for amp in amp_ids :
sec = amp_sec[amp]
ii=(x>=sec[1].start)&(x<sec[1].stop)&(y>=sec[0].start)&(y<sec[0].stop)
if np.sum(ii)>0 :
rdnoise[ifiber, ii] = amp_rdnoise[amp]
return rdnoise
def surveyspeed_fiberfrac(tracer, exposure_seeing_fwhm):
# https://desi.lbl.gov/trac/wiki/SurveyOps/SurveySpeed
# Nominal fiberloss dependence on seeing. Assumes zero offset.
if tracer in ['bgs', 'gpbbright']:
return np.exp(0.0341 * np.log(exposure_seeing_fwhm)**3 -0.3611 * np.log(exposure_seeing_fwhm)**2 -0.7175 * np.log(exposure_seeing_fwhm) -1.5643)
elif tracer in ['elg']:
return np.exp(0.0231 * np.log(exposure_seeing_fwhm)**3 -0.4250 * np.log(exposure_seeing_fwhm)**2 -0.8253 * np.log(exposure_seeing_fwhm) -0.7761)
elif tracer in ['psf', 'backup', 'gpbbackup']:
return np.exp(0.0989 * np.log(exposure_seeing_fwhm)**3 -0.5588 * np.log(exposure_seeing_fwhm)**2 -0.9708 * np.log(exposure_seeing_fwhm) -0.4473)
else:
return 1.0
def var_tracer(tracer, frame, angperspecbin, fiberflat, fluxcalib, exposure_seeing_fwhm=1.1):
'''
Source Poisson term to the model ivar, following conventions defined at:
https://desi.lbl.gov/trac/wiki/SurveyOps/SurveySpeed.
See also:
https://github.com/desihub/desispec/blob/master/py/desispec/efftime.py
Args:
tracer: [bgs, backup] string, defines program.
frame: desispec.frame instance
angperspecbin: float, angstroms per bin in spectral reductions
fiberflat: desispec instance
fluxcalib: desispec instance
fiber_diameter_arcsec:
Returns:
nominal flux [e/specbin] corresponding to frame.wave
'''
log = get_logger()
fiberfrac = surveyspeed_fiberfrac(tracer, exposure_seeing_fwhm)
if tracer in ['bgs', 'gpbbright']:
# Note: neglects transparency & EBV corrections.
nominal = 15.8 # r=19.5 [nanomaggie].
elif tracer in ['backup', 'gpbbackup']:
nominal = 27.5 # r=18.9 [nanomaggie].
else:
# No source poisson term otherwise.
nominal = 0.0 # [nanomaggie].
return nominal # [e/specbin].
nominal *= fiberfrac
log.info('TSNR MODEL VAR: include {} poisson source var of {:.6f} [nMg]'.format(tracer, nominal))
nominal *= 1.e-9 # [Mg].
nominal /= (1.e23 / const.c.value / 1.e10 / 3631.)
nominal /= (frame.wave)**2. # [ergs/s/cm2/A].
nominal *= 1.e17 # [1.e-17 ergs/s/cm2/A].
nominal = fluxcalib.calib * nominal # [e/A]
nominal *= angperspecbin # [e/specbin].
nominal *= fiberflat.fiberflat
log.info('TSNR MODEL VAR: include {} poisson source var of {:.6e} [e/specbin]'.format(tracer, np.median(nominal)))
return nominal
def var_model(rdnoise_sigma, npix_1d, angperpix, angperspecbin, fiberflat, skymodel, alpha=1.0, components=False):
'''
Evaluate a model for the 1D spectral flux variance, e.g. quadrature sum of readnoise and sky components.
input:
rdnoise_sigma:
npix_1d: equivalent to (1D) nea.
angperpix: Angstroms per pixel.
angperspecbin: Angstroms per bin.
fiberflat: fiberflat instance
skymodel: Sky instance.
alpha: empirical weighting of the rdnoise term to e.g. better fit sky fibers per exp. cam.
components: if True, return tuple of individual contributions to the variance. Else return variance.
returns:
nfiber x nwave array of the expected variance.
'''
# the extraction is performed with a wavelength bin of width = angperspecbin
# so the effective number of CCD pixels corresponding to a spectral bin width is
npix_2d = npix_1d * (angperspecbin / angperpix)
# then, the extracted flux per specbin is converted to an extracted flux per A, so
# the variance has to be divided by the square of the conversion factor = angperspecbin**2
rdnoise_variance = rdnoise_sigma**2 * npix_2d / angperspecbin**2
# It was verified that this variance has to be increased by about 10% to match the
# inverse variance reported in the frame files of a zero exposure (exptime=0).
# However the correction factor (alpha) can be larger when fitted on sky fibers
# because the precomputed effective noise equivalent number of pixels (npix_1d)
# is valid only when the Poisson noise is negligible. It increases with the spectral flux.
if components:
return (alpha * rdnoise_variance, fiberflat.fiberflat * np.abs(skymodel.flux))
else:
return alpha * rdnoise_variance + fiberflat.fiberflat * np.abs(skymodel.flux)
def gen_mask(frame, skymodel, hw=5.):
"""
Generate a mask for the alpha computation, masking out bright sky lines.
Args:
frame : uncalibrated Frame object for one camera
skymodel : SkyModel object
hw : (optional) float, half width of mask around sky lines in A
Returns an array of same shape as frame, here mask=1 is good, 0 is bad
"""
log = get_logger()
maskfactor = np.ones_like(frame.mask, dtype=float)
maskfactor[frame.mask > 0] = 0.0
# https://github.com/desihub/desispec/blob/294cfb66428aa8be3797fd046adbd0a2267c4409/py/desispec/sky.py#L1267
skyline=np.array([5199.4,5578.4,5656.4,5891.4,5897.4,6302.4,6308.4,6365.4,6500.4,6546.4,\
6555.4,6618.4,6663.4,6679.4,6690.4,6765.4,6831.4,6836.4,6865.4,6925.4,\
6951.4,6980.4,7242.4,7247.4,7278.4,7286.4,7305.4,7318.4,7331.4,7343.4,\
7360.4,7371.4,7394.4,7404.4,7440.4,7526.4,7714.4,7719.4,7752.4,7762.4,\
7782.4,7796.4,7810.4,7823.4,7843.4,7855.4,7862.4,7873.4,7881.4,7892.4,\
7915.4,7923.4,7933.4,7951.4,7966.4,7982.4,7995.4,8016.4,8028.4,8064.4,\
8280.4,8284.4,8290.4,8298.4,8301.4,8313.4,8346.4,8355.4,8367.4,8384.4,\
8401.4,8417.4,8432.4,8454.4,8467.4,8495.4,8507.4,8627.4,8630.4,8634.4,\
8638.4,8652.4,8657.4,8662.4,8667.4,8672.4,8677.4,8683.4,8763.4,8770.4,\
8780.4,8793.4,8829.4,8835.4,8838.4,8852.4,8870.4,8888.4,8905.4,8922.4,\
8945.4,8960.4,8990.4,9003.4,9040.4,9052.4,9105.4,9227.4,9309.4,9315.4,\
9320.4,9326.4,9340.4,9378.4,9389.4,9404.4,9422.4,9442.4,9461.4,9479.4,\
9505.4,9521.4,9555.4,9570.4,9610.4,9623.4,9671.4,9684.4,9693.4,9702.4,\
9714.4,9722.4,9740.4,9748.4,9793.4,9802.4,9814.4,9820.4])
maskfactor *= (skymodel.ivar > 0.0)
maskfactor *= (frame.ivar > 0.0)
if hw > 0.0:
log.info('TSNR Masking bright lines in alpha calc. (half width: {:.3f})'.format(hw))
for line in skyline :
if line<=frame.wave[0] or line>=frame.wave[-1]:
continue
ii=np.where((frame.wave>=line-hw)&(frame.wave<=line+hw))[0]
maskfactor[:,ii]=0.0
# Mask collimator, [4300-4500A]
ii=np.where((frame.wave>=4300.)&(frame.wave<=4500.))[0]
maskfactor[:,ii]=0.0
return maskfactor
def calc_alpha(frame, fibermap, rdnoise_sigma, npix_1d, angperpix, angperspecbin, fiberflat, skymodel):
'''
Model Var = alpha * rdnoise component + sky.
Calculate the best-fit alpha using the sky fibers
available to the frame.
input:
frame: desispec frame instance (should be uncalibrated, i.e. e/A).
fibermap: desispec fibermap instance.
rdnoise_sigma: e.g. RDNOISE value per Quadrant (float).
npix_1d: equivalent to 1D nea [pixels], calculated using read_nea().
angperpix: angstroms per pixel (float),
fiberflat: desispec fiberflat instance.
skymodel: desispec Sky instance.
alpha: nuisanve parameter to reweight rdnoise vs sky contribution to variance (float).
components: if True, return individual contributions to variance, else return total variance.
returns:
alpha: nuisance parameter to reweight rdnoise vs sky contribution to variance (float), obtained
as the best fit to the uncalibrated sky fibers VAR.
'''
log = get_logger()
sky_indx = np.where(fibermap['OBJTYPE'] == 'SKY')[0]
rd_var, sky_var = var_model(rdnoise_sigma, npix_1d, angperpix, angperspecbin, fiberflat, skymodel, alpha=1.0, components=True)
maskfactor = gen_mask(frame, skymodel)
maskfactor = maskfactor[sky_indx,:]
def calc_alphavar(alpha):
return alpha * rd_var[sky_indx,:] + sky_var[sky_indx,:]
def alpha_X2(alpha):
_var = calc_alphavar(alpha)
_ivar = 1. / _var
X2 = np.abs(frame.ivar[sky_indx,:] - _ivar)
return np.sum(X2 * maskfactor)
res = minimize(alpha_X2, x0=[1.])
alpha = res.x[0]
#- From JG PR #1164:
# Noisy values of alpha can occur for observations dominated by sky noise
# where it is not possible to calibrated the read noise. For those
# exposures, the precise value of alpha does not impact the SNR estimation.
if alpha < 0.8 :
log.warning(f'tSNR forcing best fit alpha = {alpha:.4f} to 0.8')
alpha = 0.8
return alpha
#- Cache files from desimodel to avoid reading them N>>1 times
_camera_nea_angperpix = None
_band_ensemble = None
def calc_tsnr_fiberfracs(fibermap, etc_fiberfracs, no_offsets=False):
'''
Nominal fiberfracs for effective depths. See:
https://desi.lbl.gov/trac/wiki/SurveyOps/SurveySpeed
Args:
fibermap: desispec instance.
etc_fiberfracs: propragated etc fiberfracs per tracer, json or fits
header derived (dictionary).
no_offsets: ignore throughput loss due to fiber offsets.
Returns:
dict of the nominal fiberloss of given type and seeing.
'''
log = get_logger()
for k in ["FIBER_X","FIBER_Y"] :
if k not in fibermap.dtype.names :
log.warning("no column '{}' in fibermap, cannot do the tsnr fiberfrac correction, returning 1".format(k))
return np.ones(len(fibermap))
fa = FastFiberAcceptance()
# Empty dictionaries evaluate to False.
ratio_fiberfrac_lrg_bgs = (0.2502/0.19365) # fa.value("BULGE",1.1/2.35*107./1.52,hlradii=1.0)/fa.value("BULGE",1.1/2.35*107./1.52,hlradii=1.0) (only 3% variation of this ratio with seeing from 0.8 to 1.5 arcsec)
if not etc_fiberfracs:
log.warning('Failed to inherit from etc json. Assuming median values from tsnr_refset_etc.csv.')
# mean values in the calibration table desispec/data/tsnr/tsnr_refset_etc.csv
# and scale factor to match the TSNR and exptime values of SV1 where we did not have the ETC info.
etc_fiberfracs['psf'] = 0.537283
etc_fiberfracs['elg'] = 0.973*0.391030
etc_fiberfracs['bgs'] = 0.985*0.174810
etc_fiberfracs['lrg'] = 0.965*0.174810*ratio_fiberfrac_lrg_bgs
else :
if 'lrg' not in etc_fiberfracs :
etc_fiberfracs['lrg'] = etc_fiberfracs['bgs']*ratio_fiberfrac_lrg_bgs
# Compute the effective seeing; requires updated desimodel.
exposure_seeing_fwhm = fa.psf_seeing_fwhm(etc_fiberfracs['psf']) # [microns]
fiber_params = load_desiparams()['fibers']
fiber_dia = fiber_params['diameter_um'] # 107 um.
fiber_dia_asec = fiber_params['diameter_arcsec'] # 1.52 ''
avg_inv_platescale = fiber_dia_asec / fiber_dia # ['' / um]
exposure_seeing_fwhm *= avg_inv_platescale # ['']
log.info('Computed effective seeing of {:.6f} arcseconds (focalplane avg.) for a ETC PSF fiberfrac of {:.6f}'.format(exposure_seeing_fwhm, etc_fiberfracs['psf']))
# compute the seeing and plate scale correction
x_mm = fibermap["FIBER_X"]
y_mm = fibermap["FIBER_Y"]
bad = np.isnan(x_mm)|np.isnan(y_mm)
x_mm[bad]=0.
y_mm[bad]=0.
if "DELTA_X" in fibermap.dtype.names :
dx_mm = fibermap["DELTA_X"] # mm
else :
log.warning("no column 'DELTA_X' in fibermap, assume = zero")
dx_mm = np.zeros(len(fibermap))
if "DELTA_Y" in fibermap.dtype.names :
dy_mm = fibermap["DELTA_Y"] # mm
else :
log.warning("no column 'DELTA_Y' in fibermap, assume = zero")
dy_mm = np.zeros(len(fibermap))
bad = np.isnan(dx_mm)|np.isnan(dy_mm)
dx_mm[bad]=0.
dy_mm[bad]=0.
ps = load_platescale()
isotropic_platescale = np.interp(x_mm**2+y_mm**2,ps['radius']**2,np.sqrt(ps['radial_platescale']*ps['az_platescale'])) # um/arcsec
# we could include here a wavelength dependence on seeing, or non-Gaussian.
avg_sigmas_um = (exposure_seeing_fwhm/2.35) / avg_inv_platescale # um
iso_sigmas_um = (exposure_seeing_fwhm/2.35) * isotropic_platescale # um
offsets_um = np.sqrt(dx_mm**2+dy_mm**2)*1000. # um
nfibers = len(fibermap)
log.info('Median fiber offset {:.6f} [{:.6f} to {:.6f}]'.format(np.median(offsets_um), offsets_um.min(), offsets_um.max()))
log.info('Median avg psf microns {:.6f} [{:.6f} to {:.6f}]'.format(np.median(avg_sigmas_um), avg_sigmas_um.min(), avg_sigmas_um.max()))
log.info('Median iso psf microns {:.6f} [{:.6f} to {:.6f}]'.format(np.median(iso_sigmas_um), iso_sigmas_um.min(), iso_sigmas_um.max()))
##
## FIX ME: desispec flux calibration, in the absence of seeing values in the header, assumes 1.1'' as a default;
##
## https://github.com/desihub/desispec/blob/05cd4cdf501b9afb7376bb0ea205517246b58769/py/desispec/scripts/fluxcalibration.py#L56
##
## We do the same here until that is corrected: https://github.com/desihub/desispec/issues/1267
##
##
tsnr_fiberfracs = {}
tsnr_fiberfracs['exposure_seeing_fwhm'] = exposure_seeing_fwhm
if no_offsets:
offsets_um = np.zeros_like(offsets_um)
log.info('Zeroing fiber offsets for tsnr fiberfracs.')
# Note: https://github.com/desihub/desispec/blob/05cd4cdf501b9afb7376bb0ea205517246b58769/py/desispec/fiberfluxcorr.py#L74
nominal_seeing_fwhm = 1.1
iso_sigmas_um_1p1 = (nominal_seeing_fwhm / 2.35) * isotropic_platescale # um
avg_sigmas_um_1p1 = (nominal_seeing_fwhm / 2.35) / avg_inv_platescale # um
# obs. psf [e/A] = true psf * ebv loss * true_flux_calib * (fiberflat / plate scale**2).
# true_flux_calib = (super terrestrial transparency for psf | relative fiber frac-offset correction in seeing and ebv=0.0) x (rel. fiber frac-offset correction for psf in seeing);
# flux calib = (super terrestrial transparency for psf | relative fiber frac-offset correction in seeing 1.1 and ebv=0.0) x (rel. fiber frac-offset correction for psf in 1.1'');
#
# true_flux_calib /approx (rel. fiber frac-offset correction for psf in seeing) * flux_calib / (rel. fiber frac-offset correction for psf in 1.1'')
# if (super terrestrial transparency for psf | relative fiber frac-offset correction in seeing and ebv=0.0) \approx (super terrestrial transparency for psf | relative fiber frac-offset correction in seeing 1.1 and ebv=0.0)
# Note: we can afford to remove ps**2. factor from fiberflat as our fiber frac-offset correction is in physical units, derived from the plate scale, and hence accouting for changing angular size of fiber.
# ETC predicts psf fiber frac. for focal plane avg. fiber (== avg. platescale) with zero offset. psf loss given actual plate scale and offset:
# Flux calibration vector is exact in the case of 1.1'' seeing fwhm.
psf_loss = etc_fiberfracs['psf'] * (fa.value("POINT",iso_sigmas_um,offsets=offsets_um) / fa.value("POINT", avg_sigmas_um,offsets=np.zeros_like(avg_sigmas_um)))
mean_psf_loss = np.mean(psf_loss)
rel_psf_loss = psf_loss / mean_psf_loss
psf_loss_1p1 = etc_fiberfracs['psf'] * (fa.value("POINT",iso_sigmas_um_1p1,offsets=offsets_um) / fa.value("POINT", avg_sigmas_um_1p1,offsets=np.zeros_like(avg_sigmas_um)))
mean_psf_loss_1p1 = np.mean(psf_loss_1p1)
rel_psf_loss_1p1 = psf_loss_1p1 / mean_psf_loss_1p1
# 'fiber frac' = true_flux_calib * (fiberflat / plate scale**2) in [flux calib * fiberflat] units.
# = (rel. fiber frac-offset correction for psf in seeing) / (rel. fiber frac-offset correction for psf in 1.1'') / plate scale**2 [flux calib * fiberflat]
#
notnull = rel_psf_loss_1p1 > 0.0
for tracer in ['psf', 'mws', 'qso', 'lya', 'gpbbackup']:
tsnr_fiberfracs[tracer] = np.ones_like(rel_psf_loss)
# Flux calibration vector should be zero due to rel psf loss in 1.1''
tsnr_fiberfracs[tracer][notnull] *= (rel_psf_loss[notnull] / rel_psf_loss_1p1[notnull]) # [flux_calib * fiberflat]
# BULGE == DEV
for tracer, mtype, hl in zip(['elg', 'lrg', 'bgs'], ['DISK', 'BULGE', 'BULGE'], [0.45, 1.0, 1.50]):
tsnr_fiberfracs[tracer] = etc_fiberfracs[tracer] / fa.value(mtype, avg_sigmas_um,offsets=np.zeros_like(avg_sigmas_um),hlradii=hl * np.ones_like(avg_sigmas_um))
tsnr_fiberfracs[tracer] *= fa.value(mtype, iso_sigmas_um,offsets=offsets_um,hlradii=hl * np.ones_like(iso_sigmas_um))
# mean_psf_loss derived from flux calib.
tsnr_fiberfracs[tracer][notnull] /= mean_psf_loss
# flux calib contains rel_psf_loss_1p1.
tsnr_fiberfracs[tracer][notnull] /= rel_psf_loss_1p1[notnull]
# Flux calibration vector should be zero due to rel. psf loss == 0.0 in 1.1''
tsnr_fiberfracs[tracer][~notnull] = 1.0
# Assume same as elg.
tsnr_fiberfracs['gpbdark'] = tsnr_fiberfracs['elg']
tsnr_fiberfracs['gpbbright'] = tsnr_fiberfracs['bgs']
for tracer in ['qso', 'elg', 'lrg', 'bgs', 'gpbdark', 'gpbbright', 'gpbbackup']:
log.info("Computed median nominal {} fiber frac of {:.6f} ([{:.6f},{:.6f}]) for a seeing fwhm of: {:.6f} arcseconds.".format(tracer, np.median(tsnr_fiberfracs[tracer]),\
tsnr_fiberfracs[tracer].min(),\
tsnr_fiberfracs[tracer].max(),\
exposure_seeing_fwhm))
return tsnr_fiberfracs
def calc_tsnr2_cframe(cframe):
"""
Given cframe, calc_tsnr2 guessing frame,fiberflat,skymodel,fluxcalib to use
Args:
cframe: input cframe Frame object
Returns (results, alpha) from calc_tsnr2
"""
log = get_logger()
dirname, filename = os.path.split(cframe.filename)
framefile = os.path.join(dirname, filename.replace('cframe', 'frame'))
skyfile = os.path.join(dirname, filename.replace('cframe', 'sky'))
fluxcalibfile = os.path.join(dirname, filename.replace('cframe', 'fluxcalib'))
for testfile in (framefile, skyfile, fluxcalibfile):
if not os.path.exists(testfile):
msg = 'missing {testfile}; unable to calculate TSNR2'
log.error(msg)
raise ValueError(msg)
night = cframe.meta['NIGHT']
expid = cframe.meta['EXPID']
camera = cframe.meta['CAMERA']
fiberflatfile = desispec.io.findfile('fiberflatnight', night, camera=camera)
if not os.path.exists(fiberflatfile):
ffname = os.path.basename(fiberflatfile)
log.warning(f'{ffname} not found; using default calibs')
fiberflatfile = findcalibfile([cframe.meta,], 'FIBERFLAT')
frame = read_frame(framefile)
fiberflat = read_fiberflat(fiberflatfile)
skymodel = read_sky(skyfile)
fluxcalib = read_flux_calibration(fluxcalibfile)
return calc_tsnr2(frame, fiberflat, skymodel, fluxcalib)
def calc_tsnr2(frame, fiberflat, skymodel, fluxcalib, alpha_only=False, include_poisson=True, include_fiberfracs=True):
'''
Compute template SNR^2 values for a given frame
Args:
frame : uncalibrated Frame object for one camera
fiberflat : FiberFlat object
sky : SkyModel object
fluxcalib : FluxCalib object
returns (tsnr2, alpha):
`tsnr2` dictionary, with keys labeling tracer (bgs,elg,etc.), of values
holding nfiber length array of the tsnr^2 values for this camera, and
`alpha`, the relative weighting btwn rdnoise & sky terms to model var.
Note: Assumes DESIMODEL is set and up to date.
'''
global _camera_nea_angperpix
global _band_ensemble
t0=time.time()
log=get_logger()
if not (frame.meta["BUNIT"]=="count/Angstrom" or frame.meta["BUNIT"]=="electron/Angstrom" ) :
log.error("requires an uncalibrated frame")
raise RuntimeError("requires an uncalibrated frame")
camera=frame.meta["CAMERA"].strip().lower()
band=camera[0]
psfpath=findcalibfile([frame.meta],"PSF")
tset=read_xytraceset(psfpath)
expid=frame.meta["EXPID"]
night=frame.meta["NIGHT"]
etc_fiberfracs={}
etcpath=findfile('etc', night=night, expid=expid)
## https://github.com/desihub/desietc/blob/main/header.md
if 'ETCFRACP' in frame.meta:
## Transparency-weighted average of FFRAC over the exposure calculated for a PSF source profile. Calculated as (ETCTHRUP/ETCTRANS)*0.56198 where the constant is the nominal PSF FFRAC.
## Note: unnormalized equivalent to ETCTHRUP, etc.
etc_fiberfracs['psf'] = frame.meta['ETCFRACP']
## PSF -> ELG
etc_fiberfracs['elg'] = frame.meta['ETCFRACE']
## PSF -> BGS
etc_fiberfracs['bgs'] = frame.meta['ETCFRACB']
log.info('Retrieved etc data from frame hdr.')
elif os.path.exists(etcpath):
with open(etcpath) as f:
etcdata = json.load(f)
try:
for tracer in ['psf', 'elg', 'bgs']:
etc_fiberfracs[tracer]=etcdata['expinfo']['ffrac_{}'.format(tracer)]
log.info('Retrieved etc data from {}'.format(etcpath))
except:
log.warning('Failed to find etc expinfo/ffrac for all tracers.')
# Reset, will default to nominal values on empty dict.
etc_fiberfracs={}
tsnr_fiberfracs = calc_tsnr_fiberfracs(frame.fibermap, etc_fiberfracs, no_offsets=False)
# Returns bivariate spline to be evaluated at (fiber, wave).
if not "DESIMODEL" in os.environ :
msg = "requires $DESIMODEL to get the NEA and the SNR templates"
log.error(msg)
raise RuntimeError(msg)
if _camera_nea_angperpix is None:
_camera_nea_angperpix = dict()
if camera in _camera_nea_angperpix:
nea, angperpix = _camera_nea_angperpix[camera]
else:
neafilename=os.path.join(os.environ["DESIMODEL"],
f"data/specpsf/nea/masternea_{camera}.fits")
log.info("read NEA file {}".format(neafilename))
nea, angperpix = read_nea(neafilename)
_camera_nea_angperpix[camera] = nea, angperpix
if _band_ensemble is None:
_band_ensemble = dict()
if band in _band_ensemble:
ensemble = _band_ensemble[band]
else:
ensembledir=os.path.join(os.environ["DESIMODEL"],"data/tsnr")
log.info("read TSNR ensemble files in {}".format(ensembledir))
ensemble = get_ensemble(ensembledir, bands=[band,])
_band_ensemble[band] = ensemble
nspec, nwave = fluxcalib.calib.shape
fibers = np.arange(nspec)
rdnoise = fb_rdnoise(fibers, frame, tset)
#
ebv = frame.fibermap['EBV']
if np.sum(ebv!=0)>0 :
log.info("TSNR MEDIAN EBV = {:.3f}".format(np.median(ebv[ebv!=0])))
else :
log.info("TSNR MEDIAN EBV = 0")
# Evaluate.
npix = nea(fibers, frame.wave)
angperpix = angperpix(fibers, frame.wave)
angperspecbin = np.mean(np.gradient(frame.wave))
for label, x in zip(['RDNOISE', 'NEA', 'ANGPERPIX', 'ANGPERSPECBIN'], [rdnoise, npix, angperpix, angperspecbin]):
log.info('{} \t {:.3f} +- {:.3f}'.format(label.ljust(10), np.median(x), np.std(x)))
# Relative weighting between rdnoise & sky terms to model var.
alpha = calc_alpha(frame, fibermap=frame.fibermap,
rdnoise_sigma=rdnoise, npix_1d=npix,
angperpix=angperpix, angperspecbin=angperspecbin,
fiberflat=fiberflat, skymodel=skymodel)
log.info(f"TSNR ALPHA = {alpha:.6f}")
if alpha_only:
return {}, alpha
maskfactor = np.ones_like(frame.mask, dtype=float)
maskfactor[frame.mask > 0] = 0.0
maskfactor *= (frame.ivar > 0.0)
tsnrs = {}
for tracer in ensemble.keys():
wave = ensemble[tracer].wave[band]
dflux = ensemble[tracer].flux[band]
if len(frame.wave) != len(wave) or not np.allclose(frame.wave, wave):
log.warning(f'Resampling {tracer} ensemble wavelength to match input {camera} frame')
tmp = np.zeros([dflux.shape[0], len(frame.wave)])
for i in range(dflux.shape[0]):
tmp[i] = np.interp(frame.wave, wave, dflux[i],
left=dflux[i,0], right=dflux[i,-1])
dflux = tmp
wave = frame.wave
denom = var_model(rdnoise, npix, angperpix, angperspecbin, fiberflat, skymodel, alpha=alpha)
if include_poisson:
# TODO: Fix default seeing-fiberfrac relation.
denom += var_tracer(tracer, frame, angperspecbin, fiberflat, fluxcalib)
# Work in uncalibrated flux units (electrons per angstrom); flux_calib includes exptime. tau.
# Broadcast.
dflux = dflux * fluxcalib.calib # [e/A]
# Wavelength dependent fiber flat; Multiply or divide - check with Julien.
result = dflux * fiberflat.fiberflat
# Apply dust transmission.
result *= dust_transmission(frame.wave, ebv[:,None])
if include_fiberfracs:
if (tracer in tsnr_fiberfracs):
result *= tsnr_fiberfracs[tracer][:,None]
else:
log.critical('Missing {} tracer in tsnr fiberfracs.'.format(tracer))
result = result**2.
result /= denom
# Eqn. (1) of https://desi.lbl.gov/DocDB/cgi-bin/private/RetrieveFile?docid=4723;filename=sky-monitor-mc-study-v1.pdf;version=2
tsnrs[tracer] = np.sum(result * maskfactor, axis=1)
results=dict()
for tracer in tsnrs.keys():
key = 'TSNR2_{}_{}'.format(tracer.upper(), band.upper())
results[key]=tsnrs[tracer]
log.info('{} = {:.6f}'.format(key, np.median(tsnrs[tracer])))
log.info('computation time = {:4.2f} sec'.format(time.time()-t0))
return results, alpha
def tsnr2_to_efftime(tsnr2,target_type) :
""" Converts TSNR2 values to effective exposure time.
Args:
tsnr2: TSNR**2 values, float or numpy array
target_type: str, "ELG","BGS","LYA", or other depending on content of data/tsnr/tsnr-efftime.yaml
Returns: exptime in seconds, same type and shape if applicable as input tsnr2
"""
tracer=target_type.lower()
tsnr_ensembles = get_ensemble()
log = get_logger()
if not "SNR2TIME" in tsnr_ensembles[tracer].meta.keys() :
message = "did not find key SNR2TIME in tsnr_ensemble fits file header, the tsnr files must be deprecated, please update DESIMODEL."
log.error(message)
return np.zeros_like(tsnr2)
slope = tsnr_ensembles[tracer].meta["SNR2TIME"]
log.info("for tracer {} SNR2TIME={:f}".format(tracer,slope))
return slope*tsnr2
|
desihub/desispec
|
py/desispec/tsnr.py
|
Python
|
bsd-3-clause
| 48,790
|
[
"Galaxy",
"Gaussian"
] |
40201fe37be48faf4a829d9a06269e62bd747f1dd374b0b505f942119d9760a0
|
#!/usr/bin/python
__author__ = ('David Dunn')
__version__ = '0.1'
import cv2, os, math, sys
import numpy as np
# All measurements are in meters
#pixelDiameter = .000042333 # .0042333cm = 1/600 in (600dpi)
#pixelDiameter = .000333333333 # (3 dpmm)
#pixelDiameter = .0001 # (10 dpmm)
def strToImg(text, scale=1.0,thick=1,color=(255,255,255),backCol=(0,0,0)):
shape, baseLine = cv2.getTextSize(text,cv2.FONT_HERSHEY_PLAIN,scale,int(scale*2.5))
img = np.zeros((shape[1]*2,shape[0]+1,3))
point = (0,shape[1])
drawStr(img,point,text,scale,thick,color,backCol)
return img
# Draw a string on an image (from cv2 example common.py)
def drawStr(dst, point, text, scale=1.0,thick=1,color=(255,255,255),backCol=(0,0,0)):
x,y = point
cv2.putText(dst, text, (x+1, y+1), cv2.FONT_HERSHEY_PLAIN, scale, backCol, thickness = thick*2, lineType=cv2.LINE_AA)
cv2.putText(dst, text, (x, y), cv2.FONT_HERSHEY_PLAIN, scale, color,thickness = thick, lineType=cv2.LINE_AA)
# Convert a string to a number - only good for 1 number per string (otherwise use regex)
def strToInt(x):
return int(''.join(ele for ele in x if ele.isdigit()))
def imgToHex(img,fill=2):
return '\n'.join(['\n'.join([''.join([np.base_repr(i,16).zfill(fill) for i in j]) for j in k]) for k in img])
def fileListToHex(dir,lst):
myString = ''
for f in lst:
img = cv2.cvtColor(cv2.imread(os.path.join(dir,f)),cv2.COLOR_BGR2RGB)
myString += '%s\n'%imgToHex(img/16,1)
return myString
def bitmapToImage(myMap,tiles):
output = None
for row in myMap:
curRow = None
for i in row:
curRow = np.hstack((curRow,tiles[i])) if curRow is not None else tiles[i]
output = np.vstack((output, curRow)) if output is not None else curRow
return output
# Get the scale factor for normalization for numpy dtypes
def getBitDepthScaleFactor(typeName):
if typeName[:4] == 'uint':
return 2**strToNum(typeName)-1
elif typeName[:3] == 'int':
return 2**(strToNum(typeName)-1)-1
else:
return 1.
# Definition to create circle.
def circle(nx,ny,d,smooth=False,n=1):
y,x = np.ogrid[-nx/2: (nx/2), -ny/2: (ny/2)]
circ = (x+1)**2+(y+1)**2 <= ((d)/2.)**2
return circ
# Get the image in the proper format
def getImageFloat(imgName):
img = cv2.imread(imgName)
scale = float(getBitDepthScaleFactor(img.dtype.name))
img = np.float32(img)/scale
return img
# Get the image in the proper format
def getImageAlpha(imgName):
img = cv2.imread(imgName, cv2.IMREAD_UNCHANGED)
scale = float(getBitDepthScaleFactor(img.dtype.name))
img = np.float32(img)/scale
return img
# Composite top image over bot image using top's alpha chanel (and bot's alpha chanel) optional
def over(topC, topA, botC, botA=None, premultiply=True):
if not premultiply:
topC = cv2.multiply(topC, np.dstack((topA,topA,topA)))
if botA is not None:
botC = cv2.multiply(botC, np.dstack((botA,botA,botA)))
if botA is None:
botA = np.ones_like(topA)
outA = botA
else:
outA = cv2.add(topA, cv2.multiply(botA, cv2.subtract(1.,topA)))
outC = cv2.add(topC, cv2.multiply(botC, cv2.subtract(np.ones_like(botC),np.dstack((topA,topA,topA)))))
return outC, outA
# Calculate the circle of confusion diameter given the focal image depth, blur image depth and aperture diameter
def getCoC(aperture, focalDist, blurDist):
return aperture * abs(blurDist - focalDist) / blurDist
# Crop or exand (with one padding) the image to the given dimensions
def cropImg(img, refDim):
size = img.shape[:2]
if size[0] >= refDim[0] and size[1] >= refDim[1]:
return img[round(size[0]/2)-round(refDim[0]/2):round(size[0]/2)+refDim[0]-round(refDim[0]/2),round(size[1]/2)-round(refDim[1]/2):round(size[1]/2)+refDim[1]-round(refDim[1]/2)]
else:
toRet = np.ones([refDim[0],refDim[1],3], img.dtype.name)
xMin = min(refDim[0],size[0])
yMin = min(refDim[1],size[1])
xMinH = min(round(refDim[0]/2),round(size[0]/2))
yMinH = min(round(refDim[1]/2),round(size[1]/2))
toRet[round(refDim[0]/2)-xMinH:round(refDim[0]/2)+xMin-xMinH,round(refDim[1]/2)-yMinH:round(refDim[1]/2)+yMin-yMinH] = img[round(size[0]/2)-xMinH:round(size[0]/2)+xMin-xMinH,round(size[1]/2)-yMinH:round(size[1]/2)+yMin-yMinH]
return toRet
# Scale the image as it would appear if moved from the refDist to the imgDist, extend the image by buffer when we scale before we crop
def scaleImgDist(refDist, imgDist, img, refDim, bufferFactor=1):
scaleFactor = 1.*refDist**1/imgDist**1
if scaleFactor > 2000: scaleFactor = 2000.
#interp = cv2.INTER_LINEAR
interp = cv2.INTER_NEAREST
if scaleFactor < 1.0:
interp = cv2.INTER_AREA
newImg = img
#newImg = np.concatenate((img, img, img), axis=0)
#newImg = np.concatenate((newImg, newImg, newImg), axis=1)
newImg = cropImg(newImg, tuple([int(round(i/scaleFactor*bufferFactor)) for i in img.shape[:2]]))
else:
newImg = cropImg(img, tuple([int(round(i/scaleFactor*bufferFactor)) for i in img.shape[:2]]))
newImg = cv2.resize(newImg, tuple([int(round(i*scaleFactor)) for i in newImg.shape[:2]]), 0, 0,interp)
#print newImg.shape
#print refDim
newImg = cropImg(newImg, tuple([int(round(i)) for i in refDim]))
#print newImg.shape
return newImg
def singleAxisDistort(img, func, axis=1):
temp = np.swapaxes(img,0,1) if axis==1 else img
size = temp.shape
mid = size[0]/2
toRet = None
for idx, row in enumerate(temp):
scale = func(idx-mid)
if toRet is None:
toRet = np.zeros((size[0],int(math.ceil(size[1]*scale)),size[2]),dtype=img.dtype)
scaled = cv2.resize(row, (0,0), fx=1, fy=scale)
top = (toRet.shape[1]-scaled.shape[0])/2
bot = top + scaled.shape[0]
toRet[idx,top:bot] = np.copy(scaled)
return np.swapaxes(toRet,0,1) if axis==1 else toRet
'''
func = lambda x: 1+.000000092*x**2
simg = singleAxisDistort(img, func)
'''
# Generate the kernel for the PSF of the eye
def getPSF(focalDist, blurDist, aperture=None, pixelDiameter=.00033, **kwargs):
# should take args for distances to focal plane and blur plane and compute it properly
if aperture is None:
aperture = .0035 # Human pupil is between 1.5 and 8 mm
''' NOT a gaussian!!!
ksize = 9
sigma = 3
d1Kernel = cv2.getGaussianKernel(ksize, sigma)
kernel = d1Kernel * cv2.transpose(d1Kernel)
'''
# the kernel is similar to a tophat
# calculate diameter in pixels
CoC = getCoC(aperture, focalDist, blurDist) # this is diameter on the focal plane - but we are convolving, so - should be same on both if we have already scaled them?
d = CoC/pixelDiameter # this is size of pixels at focal plane
tophat = circle(int(round(d))/2*2+1, int(round(d))/2*2+1, d)
#if np.sum(tophat) == 0:
# tophat[round(d/2),round(d/2)] = [True]
tophat = tophat[:,~np.all(tophat == 0, axis=0)] # remove zero columns
tophat = tophat[~np.all(tophat == 0, axis=1)] # remove zero rows
if len(tophat) == 0:
tophat = np.ones([1, 1], tophat.dtype)
kernel = 1./np.sum(tophat)*tophat
return kernel
# Generate a linear motion blur kernel
def getMotionKernel(ksize, sigma=3):
d1Kernel = cv2.getGaussianKernel(ksize, sigma)
kernel = d1Kernel * np.ones([1,ksize])/ksize
return kernel
# Generate the image as seen through the mask blurred by the kernel - (mask * PSF) + image
def generateImage(img, mask, kernel):
if img.dtype.name[:5] != 'float':
scale = float(getBitDepthScaleFactor(img.dtype.name))
img = np.float32(img)/scale
if mask.dtype.name[:5] != 'float':
scale = float(getBitDepthScaleFactor(mask.dtype.name))
mask = np.float32(mask)/scale
if kernel.dtype.name[:5] != 'float':
scale = float(getBitDepthScaleFactor(kernel.dtype.name))
kernel = np.float32(kernel)/scale
dst = cv2.filter2D(mask,-1,kernel)
dst = cropImg(dst, img.shape)
final = cv2.multiply(img, dst)
return final
# Render a pre-corrected image so that when viewed through the mask with the given parameters, the original image will be visible
def preCorrectImage(img, mask, focalDist, blurDist, aperture, nsr, maskImg=None):
psf = getPSF(focalDist,blurDist,aperture)
deConMask = deconvolveWiener(mask, psf, nsr)
deConMask = cropImg(deConMask, img.shape)
preCorImg = deConMask
preCorImg = cv2.divide(img, deConMask)
return preCorImg
# Perform a convoultuion of an image by a psf kernel
def convolve(img,psf):
return cv2.filter2D(img,-1,psf)
# Deblur image using Wiener filter
# G(f) = {H^*(f)} / {|H(f)|^2 + 1/NSR}
def deconvolveWiener(img,psf,nsr):
if img.dtype.name[:5] != 'float': # convert to float if not already
scale = float(getBitDepthScaleFactor(img.dtype.name))
img = np.float32(img)/scale
if psf.dtype.name[:5] != 'float': # convert to float if not already
scale = float(getBitDepthScaleFactor(psf.dtype.name))
psf = np.float32(psf)/scale
#img = np.float32(img)/255.
if len(img.shape) < 3:
img = img[...,np.newaxis]
origRes = img.shape
img = cv2.copyMakeBorder(img, int(round(img.shape[0]/2.)),int(round(img.shape[0]/2.)),int(round(img.shape[1]/2.)),int(round(img.shape[1]/2.)),cv2.BORDER_REFLECT_101) # extend border
if len(img.shape) < 3:
img = img[...,np.newaxis]
psf_pad = np.zeros_like(img)
kh, kw = psf.shape
psf_pad[:kh, :kw,0] = psf #np.float32(psf)/255.
PSF = cv2.dft(psf_pad[:,:,0], flags=cv2.DFT_COMPLEX_OUTPUT, nonzeroRows = kh)
PSF2 = (PSF**2).sum(-1)
iPSF = PSF / (PSF2 + 1./nsr)[...,np.newaxis]
final = np.zeros_like(img)
for i in range(origRes[2]): # do each channel seperately
IMG = cv2.dft(img[:,:,i], flags=cv2.DFT_COMPLEX_OUTPUT)
RES = cv2.mulSpectrums(IMG, iPSF , 0)
final[:,:,i] = cv2.idft(RES, flags=cv2.DFT_SCALE | cv2.DFT_REAL_OUTPUT )
final = np.roll(final, -kh//2, 0)
final = np.roll(final, -kw//2, 1)
final = cropImg(final, origRes)
return final.clip(0.,1.)
if __name__ == '__main__':
cross = np.array(((0,0,0,0,255,0,0,0,0), # Create some white crosses for content
(0,0,0,0,255,0,0,0,0),
(0,0,0,0,255,0,0,0,0),
(0,0,0,0,255,0,0,0,0),
(255,255,255,255,255,255,255,255,255),
(0,0,0,0,255,0,0,0,0),
(0,0,0,0,255,0,0,0,0),
(0,0,0,0,255,0,0,0,0),
(0,0,0,0,255,0,0,0,0),
),dtype=np.uint8)
cross = np.dstack((cross,cross,cross))
spacing = 12
crosses = np.hstack((cross, np.zeros((cross.shape[0],cross.shape[1]*spacing,3),dtype=np.uint8),cross, np.zeros((cross.shape[0],cross.shape[1]*spacing,3),dtype=np.uint8),cross))
crosses = np.vstack((crosses,np.zeros((crosses.shape[0]*spacing,crosses.shape[1],3),dtype=np.uint8),crosses,np.zeros((crosses.shape[0]*spacing,crosses.shape[1],3),dtype=np.uint8),crosses))
cv2.namedWindow('HMD') # Create the windows
cv2.namedWindow('Near')
cv2.namedWindow('Far')
# Displays are an easy way to store the resolution and calculate the pixel size needed for calculating PSF
display = Display()
# Manipulate the images
crosses = cv2.resize(crosses, None, fx=2, fy=2, interpolation=cv2.INTER_LINEAR) # double size for demo
near = cropImg(crosses, display.resolution) # will shrink or grow image to resolution
far = scaleImgDist(.5, 1, crosses, display.resolution, 5) # will scale an image from one distance to another
hmd = convolve(near, getPSF(.5, 1, aperture=.004, pixelDiameter=sum(display.pixelSize())/2)) # simulated optical blur
while(True):
cv2.imshow('HMD', hmd) # Display the images
cv2.imshow('Near', near)
cv2.imshow('Far', far)
ch = cv2.waitKey() & 0xFF
if ch == 27: # escape
break
cv2.destroyAllWindows()
|
qenops/dGraph
|
util/imageManip.py
|
Python
|
apache-2.0
| 12,240
|
[
"Gaussian"
] |
70dd7e50bbad7a20a6f79dd88cbb824a942ef6e790b2790464e308e55a646f23
|
# -*- coding: utf-8 -*-
import skimage.io
import skimage.feature
import skimage.color
import skimage.transform
import skimage.util
import skimage.segmentation
import numpy
# "Selective Search for Object Recognition" by J.R.R. Uijlings et al.
#
# - Modified version with LBP extractor for texture vectorization
def _generate_segments(im_orig, scale, sigma, min_size):
"""
segment smallest regions by the algorithm of Felzenswalb and
Huttenlocher
"""
# open the Image
im_mask = skimage.segmentation.felzenszwalb(
skimage.util.img_as_float(im_orig), scale=scale, sigma=sigma,
min_size=min_size)
# merge mask channel to the image as a 4th channel
im_orig = numpy.append(
im_orig, numpy.zeros(im_orig.shape[:2])[:, :, numpy.newaxis], axis=2)
im_orig[:, :, 3] = im_mask
return im_orig
def _sim_colour(r1, r2):
"""
calculate the sum of histogram intersection of colour
"""
return sum([min(a, b) for a, b in zip(r1["hist_c"], r2["hist_c"])])
def _sim_texture(r1, r2):
"""
calculate the sum of histogram intersection of texture
"""
return sum([min(a, b) for a, b in zip(r1["hist_t"], r2["hist_t"])])
def _sim_size(r1, r2, imsize):
"""
calculate the size similarity over the image
"""
return 1.0 - (r1["size"] + r2["size"]) / imsize
def _sim_fill(r1, r2, imsize):
"""
calculate the fill similarity over the image
"""
bbsize = (
(max(r1["max_x"], r2["max_x"]) - min(r1["min_x"], r2["min_x"]))
* (max(r1["max_y"], r2["max_y"]) - min(r1["min_y"], r2["min_y"]))
)
return 1.0 - (bbsize - r1["size"] - r2["size"]) / imsize
def _calc_sim(r1, r2, imsize):
return (_sim_colour(r1, r2) + _sim_texture(r1, r2)
+ _sim_size(r1, r2, imsize) + _sim_fill(r1, r2, imsize))
def _calc_colour_hist(img):
"""
calculate colour histogram for each region
the size of output histogram will be BINS * COLOUR_CHANNELS(3)
number of bins is 25 as same as [uijlings_ijcv2013_draft.pdf]
extract HSV
"""
BINS = 25
hist = numpy.array([])
for colour_channel in (0, 1, 2):
# extracting one colour channel
c = img[:, colour_channel]
# calculate histogram for each colour and join to the result
hist = numpy.concatenate(
[hist] + [numpy.histogram(c, BINS, (0.0, 255.0))[0]])
# L1 normalize
hist = hist / len(img)
return hist
def _calc_texture_gradient(img):
"""
calculate texture gradient for entire image
The original SelectiveSearch algorithm proposed Gaussian derivative
for 8 orientations, but we use LBP instead.
output will be [height(*)][width(*)]
"""
ret = numpy.zeros((img.shape[0], img.shape[1], img.shape[2]))
for colour_channel in (0, 1, 2):
ret[:, :, colour_channel] = skimage.feature.local_binary_pattern(
img[:, :, colour_channel], 8, 1.0)
return ret
def _calc_texture_hist(img):
"""
calculate texture histogram for each region
calculate the histogram of gradient for each colours
the size of output histogram will be
BINS * ORIENTATIONS * COLOUR_CHANNELS(3)
"""
BINS = 10
hist = numpy.array([])
for colour_channel in (0, 1, 2):
# mask by the colour channel
fd = img[:, colour_channel]
# calculate histogram for each orientation and concatenate them all
# and join to the result
hist = numpy.concatenate(
[hist] + [numpy.histogram(fd, BINS, (0.0, 1.0))[0]])
# L1 Normalize
hist = hist / len(img)
return hist
def _extract_regions(img):
R = {}
# get hsv image
hsv = skimage.color.rgb2hsv(img[:, :, :3])
# pass 1: count pixel positions
for y, i in enumerate(img):
for x, (r, g, b, l) in enumerate(i):
# initialize a new region
if l not in R:
R[l] = {
"min_x": 0xffff, "min_y": 0xffff,
"max_x": 0, "max_y": 0, "labels": [l]}
# bounding box
if R[l]["min_x"] > x:
R[l]["min_x"] = x
if R[l]["min_y"] > y:
R[l]["min_y"] = y
if R[l]["max_x"] < x:
R[l]["max_x"] = x
if R[l]["max_y"] < y:
R[l]["max_y"] = y
# pass 2: calculate texture gradient
tex_grad = _calc_texture_gradient(img)
# pass 3: calculate colour histogram of each region
for k, v in R.items():
# colour histogram
masked_pixels = hsv[:, :, :][img[:, :, 3] == k]
R[k]["size"] = len(masked_pixels / 4)
R[k]["hist_c"] = _calc_colour_hist(masked_pixels)
# texture histogram
R[k]["hist_t"] = _calc_texture_hist(tex_grad[:, :][img[:, :, 3] == k])
return R
def _extract_neighbours(regions):
def intersect(a, b):
if (a["min_x"] < b["min_x"] < a["max_x"]
and a["min_y"] < b["min_y"] < a["max_y"]) or (
a["min_x"] < b["max_x"] < a["max_x"]
and a["min_y"] < b["max_y"] < a["max_y"]) or (
a["min_x"] < b["min_x"] < a["max_x"]
and a["min_y"] < b["max_y"] < a["max_y"]) or (
a["min_x"] < b["min_x"] < a["max_x"]
and a["min_y"] < b["max_y"] < a["max_y"]):
return True
return False
R = list(regions.items())
neighbours = []
for cur, a in enumerate(R[:-1]):
for b in R[int(cur) + 1:]:
if intersect(a[1], b[1]):
neighbours.append((a, b))
return neighbours
def _merge_regions(r1, r2):
new_size = r1["size"] + r2["size"]
rt = {
"min_x": min(r1["min_x"], r2["min_x"]),
"min_y": min(r1["min_y"], r2["min_y"]),
"max_x": max(r1["max_x"], r2["max_x"]),
"max_y": max(r1["max_y"], r2["max_y"]),
"size": new_size,
"hist_c": (
r1["hist_c"] * r1["size"] + r2["hist_c"] * r2["size"]) / new_size,
"hist_t": (
r1["hist_t"] * r1["size"] + r2["hist_t"] * r2["size"]) / new_size,
"labels": r1["labels"] + r2["labels"]
}
return rt
def selective_search(
im_orig, scale=1.0, sigma=0.8, min_size=50):
'''Selective Search
Parameters
----------
im_orig : ndarray
Input image
scale : int
Free parameter. Higher means larger clusters in felzenszwalb segmentation. Inverse relation with num pixels.
sigma : float
Width of Gaussian kernel for felzenszwalb segmentation.
min_size : int
Minimum component size for felzenszwalb segmentation.
Returns
-------
img : ndarray
image with region label
region label is stored in the 4th value of each pixel [r,g,b,(region)]
regions : array of dict
[
{
'rect': (left, top, right, bottom),
'labels': [...]
},
...
]
'''
assert im_orig.shape[2] == 3, "3ch image is expected"
# load image and get smallest regions
# region label is stored in the 4th value of each pixel [r,g,b,(region)]
img = _generate_segments(im_orig, scale, sigma, min_size)
if img is None:
return None, {}
imsize = img.shape[0] * img.shape[1]
R = _extract_regions(img)
# extract neighbouring information
neighbours = _extract_neighbours(R)
# calculate initial similarities
S = {}
for (ai, ar), (bi, br) in neighbours:
S[(ai, bi)] = _calc_sim(ar, br, imsize)
# hierarchal search
while S != {}:
# get highest similarity
i, j = sorted(list(S.items()), key=lambda tup: tup[1])[-1][0]
# merge corresponding regions
t = max(R.keys()) + 1.0
R[t] = _merge_regions(R[i], R[j])
# mark similarities for regions to be removed
key_to_delete = []
for k, v in S.items():
if (i in k) or (j in k):
key_to_delete.append(k)
# remove old similarities of related regions
for k in key_to_delete:
del S[k]
# calculate similarity set with the new region
for k in filter(lambda a: a != (i, j), key_to_delete):
n = k[1] if k[0] in (i, j) else k[0]
S[(t, n)] = _calc_sim(R[t], R[n], imsize)
regions = []
for k, r in R.items():
regions.append({
'rect': (
r['min_x'], r['min_y'],
r['max_x'] - r['min_x'], r['max_y'] - r['min_y']),
'size': r['size'],
'labels': r['labels']
})
return img, regions
|
tpsatish95/OCR-on-Indus-Seals
|
code/Test/selectivesearch.py
|
Python
|
apache-2.0
| 8,799
|
[
"Gaussian"
] |
8a4afc71082768de48a7b5e2afbe1295f87e391e56eee67744d7d65d50842eab
|
#!/usr/bin/env python
import vtk
import numpy as np
from vmtk import vmtkscripts
import argparse
import copy
# creates lines normal to surface for evaluation in the probe image with surface
def warp_surface(args):
print("get lines along normal of surface")
reader = vmtkscripts.vmtkSurfaceReader()
reader.InputFileName = args.surface
reader.Execute()
Surface = reader.Surface
narrays = Surface.GetPointData().GetNumberOfArrays()
has_normals = False
for i in range(narrays):
if ( Surface.GetPointData().GetArrayName(i) == "Normals"):
has_normals = True
break
if(has_normals):
normals = Surface
else:
get_normals = vtk.vtkPolyDataNormals()
get_normals.SetInputData(Surface)
get_normals.SetFeatureAngle(30.0) # default
get_normals.SetSplitting(True)
get_normals.Update()
get_normals.GetOutput().GetPointData().SetActiveVectors("Normals")
normals = get_normals.GetOutput()
dx=args.slice_thickness
print(dx)
n_pts = normals.GetNumberOfPoints()
# Create a vtkCellArray container and store the lines in it
lines = vtk.vtkCellArray()
#Create a vtkPoints container and store the points in it
pts = vtk.vtkPoints()
count = 0
sublayer = args.sublayers # no visual difference between 2 and 3 (5 layers vs 7 layers)
subdx = dx/sublayer
pts_tot = 2*sublayer + 1
for i in range(n_pts):
pt = np.array(normals.GetPoint(i))
vec = np.array(normals.GetPointData().GetArray("Normals").GetTuple(i))
pt1 = pt + dx*vec
polyLine = vtk.vtkPolyLine()
polyLine.GetPointIds().SetNumberOfIds(pts_tot)
for j in range(pts_tot):
pt2 = pt1 - j*subdx*vec
pts.InsertNextPoint(pt2)
polyLine.GetPointIds().SetId(j, count)
count +=1
lines.InsertNextCell(polyLine)
linesPolyData = vtk.vtkPolyData()
# Add the points to the polydata container
linesPolyData.SetPoints(pts)
# Add the lines to the polydata container
linesPolyData.SetLines(lines)
writer = vmtkscripts.vmtkSurfaceWriter()
writer.OutputFileName = args.file_out
writer.Input = linesPolyData
writer.Execute()
if __name__=='__main__':
parser = argparse.ArgumentParser(description='estimate vertices for uniform point distribution')
parser.add_argument("-i", dest="surface", required=True, help="input surface file", metavar="FILE")
parser.add_argument("-o", dest="file_out", required=True, help="output surface file", metavar="FILE")
parser.add_argument("-t", '--thickness', dest="slice_thickness", type=float, help='half thickness of lines ', default=0.5625)
parser.add_argument("-l", '--sublayers', dest="sublayers", type=int, help='number of sublayers for lines', default=2)
args = parser.parse_args()
#print(args)
warp_surface(args)
#surface_out = vtk.vtkPolyData()
#surface_out.DeepCopy(normals.GetOutput())
##surface_out.GetPointData().SetActiveVectors("Normals")
#surface_in = vtk.vtkPolyData()
#surface_in.DeepCopy(normals.GetOutput())
##surface_in.GetPointData().SetActiveVectors("Normals")
#warp1 = vtk.vtkWarpVector()
#warp1.SetScaleFactor(0.5625)
#warp1.SetInputData(surface_out)
#warp1.Update()
#out_largest = vtk.vtkPolyDataConnectivityFilter()
#out_largest.SetInputConnection(warp1.SetOutputPort()
#out_largest.SetExtractionModeToLargestRegion()
#warp2 = vtk.vtkWarpVector()
#warp2.SetScaleFactor(-0.5625)
#warp2.SetInputData(surface_in)
#warp2.Update()
#in_largest = vtk.vtkPolyDataConnectivityFilter()
#in_largest.SetInputConnection(warp2.SetOutputPort()
#in_largest.SetExtractionModeToLargestRegion()
|
kayarre/Tools
|
vmtk/warpsurface.py
|
Python
|
bsd-2-clause
| 3,846
|
[
"VTK"
] |
9b1e8744a014261ca9659d931f2e1caf4799782a1ca41ed308099453a8fcd66b
|
"""
Acceptance tests for studio related to the outline page.
"""
import json
from datetime import datetime, timedelta
import itertools
from pytz import UTC
from bok_choy.promise import EmptyPromise
from nose.plugins.attrib import attr
from ...pages.studio.settings_advanced import AdvancedSettingsPage
from ...pages.studio.overview import CourseOutlinePage, ContainerPage, ExpandCollapseLinkState
from ...pages.studio.utils import add_discussion, drag, verify_ordering
from ...pages.lms.courseware import CoursewarePage
from ...pages.lms.course_nav import CourseNavPage
from ...pages.lms.staff_view import StaffPage
from ...fixtures.config import ConfigModelFixture
from ...fixtures.course import XBlockFixtureDesc
from base_studio_test import StudioCourseTest
from ..helpers import load_data_str
from ...pages.lms.progress import ProgressPage
SECTION_NAME = 'Test Section'
SUBSECTION_NAME = 'Test Subsection'
UNIT_NAME = 'Test Unit'
class CourseOutlineTest(StudioCourseTest):
"""
Base class for all course outline tests
"""
def setUp(self):
"""
Install a course with no content using a fixture.
"""
super(CourseOutlineTest, self).setUp()
self.course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
self.advanced_settings = AdvancedSettingsPage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
def populate_course_fixture(self, course_fixture):
""" Install a course with sections/problems, tabs, updates, and handouts """
course_fixture.add_children(
XBlockFixtureDesc('chapter', SECTION_NAME).add_children(
XBlockFixtureDesc('sequential', SUBSECTION_NAME).add_children(
XBlockFixtureDesc('vertical', UNIT_NAME).add_children(
XBlockFixtureDesc('problem', 'Test Problem 1', data=load_data_str('multiple_choice.xml')),
XBlockFixtureDesc('html', 'Test HTML Component'),
XBlockFixtureDesc('discussion', 'Test Discussion Component')
)
)
)
)
def do_action_and_verify(self, outline_page, action, expected_ordering):
"""
Perform the supplied action and then verify the resulting ordering.
"""
if outline_page is None:
outline_page = self.course_outline_page.visit()
action(outline_page)
verify_ordering(self, outline_page, expected_ordering)
# Reload the page and expand all subsections to see that the change was persisted.
outline_page = self.course_outline_page.visit()
outline_page.q(css='.outline-item.outline-subsection.is-collapsed .ui-toggle-expansion').click()
verify_ordering(self, outline_page, expected_ordering)
@attr('shard_3')
class CourseOutlineDragAndDropTest(CourseOutlineTest):
"""
Tests of drag and drop within the outline page.
"""
__test__ = True
def populate_course_fixture(self, course_fixture):
"""
Create a course with one section, two subsections, and four units
"""
# with collapsed outline
self.chap_1_handle = 0
self.chap_1_seq_1_handle = 1
# with first sequential expanded
self.seq_1_vert_1_handle = 2
self.seq_1_vert_2_handle = 3
self.chap_1_seq_2_handle = 4
course_fixture.add_children(
XBlockFixtureDesc('chapter', "1").add_children(
XBlockFixtureDesc('sequential', '1.1').add_children(
XBlockFixtureDesc('vertical', '1.1.1'),
XBlockFixtureDesc('vertical', '1.1.2')
),
XBlockFixtureDesc('sequential', '1.2').add_children(
XBlockFixtureDesc('vertical', '1.2.1'),
XBlockFixtureDesc('vertical', '1.2.2')
)
)
)
def drag_and_verify(self, source, target, expected_ordering, outline_page=None):
self.do_action_and_verify(
outline_page,
lambda (outline): drag(outline, source, target),
expected_ordering
)
def test_drop_unit_in_collapsed_subsection(self):
"""
Drag vertical "1.1.2" from subsection "1.1" into collapsed subsection "1.2" which already
have its own verticals.
"""
course_outline_page = self.course_outline_page.visit()
# expand first subsection
course_outline_page.q(css='.outline-item.outline-subsection.is-collapsed .ui-toggle-expansion').first.click()
expected_ordering = [{"1": ["1.1", "1.2"]},
{"1.1": ["1.1.1"]},
{"1.2": ["1.1.2", "1.2.1", "1.2.2"]}]
self.drag_and_verify(self.seq_1_vert_2_handle, self.chap_1_seq_2_handle, expected_ordering, course_outline_page)
@attr('shard_3')
class WarningMessagesTest(CourseOutlineTest):
"""
Feature: Warning messages on sections, subsections, and units
"""
__test__ = True
STAFF_ONLY_WARNING = 'Contains staff only content'
LIVE_UNPUBLISHED_WARNING = 'Unpublished changes to live content'
FUTURE_UNPUBLISHED_WARNING = 'Unpublished changes to content that will release in the future'
NEVER_PUBLISHED_WARNING = 'Unpublished units will not be released'
class PublishState(object):
"""
Default values for representing the published state of a unit
"""
NEVER_PUBLISHED = 1
UNPUBLISHED_CHANGES = 2
PUBLISHED = 3
VALUES = [NEVER_PUBLISHED, UNPUBLISHED_CHANGES, PUBLISHED]
class UnitState(object):
""" Represents the state of a unit """
def __init__(self, is_released, publish_state, is_locked):
""" Creates a new UnitState with the given properties """
self.is_released = is_released
self.publish_state = publish_state
self.is_locked = is_locked
@property
def name(self):
""" Returns an appropriate name based on the properties of the unit """
result = "Released " if self.is_released else "Unreleased "
if self.publish_state == WarningMessagesTest.PublishState.NEVER_PUBLISHED:
result += "Never Published "
elif self.publish_state == WarningMessagesTest.PublishState.UNPUBLISHED_CHANGES:
result += "Unpublished Changes "
else:
result += "Published "
result += "Locked" if self.is_locked else "Unlocked"
return result
def populate_course_fixture(self, course_fixture):
""" Install a course with various configurations that could produce warning messages """
# Define the dimensions that map to the UnitState constructor
features = [
[True, False], # Possible values for is_released
self.PublishState.VALUES, # Possible values for publish_state
[True, False] # Possible values for is_locked
]
# Add a fixture for every state in the product of features
course_fixture.add_children(*[
self._build_fixture(self.UnitState(*state)) for state in itertools.product(*features)
])
def _build_fixture(self, unit_state):
""" Returns an XBlockFixtureDesc with a section, subsection, and possibly unit that has the given state. """
name = unit_state.name
start = (datetime(1984, 3, 4) if unit_state.is_released else datetime.now(UTC) + timedelta(1)).isoformat()
subsection = XBlockFixtureDesc('sequential', name, metadata={'start': start})
# Children of never published subsections will be added on demand via _ensure_unit_present
return XBlockFixtureDesc('chapter', name).add_children(
subsection if unit_state.publish_state == self.PublishState.NEVER_PUBLISHED
else subsection.add_children(
XBlockFixtureDesc('vertical', name, metadata={
'visible_to_staff_only': True if unit_state.is_locked else None
})
)
)
def test_released_never_published_locked(self):
""" Tests that released never published locked units display staff only warnings """
self._verify_unit_warning(
self.UnitState(is_released=True, publish_state=self.PublishState.NEVER_PUBLISHED, is_locked=True),
self.STAFF_ONLY_WARNING
)
def test_released_never_published_unlocked(self):
""" Tests that released never published unlocked units display 'Unpublished units will not be released' """
self._verify_unit_warning(
self.UnitState(is_released=True, publish_state=self.PublishState.NEVER_PUBLISHED, is_locked=False),
self.NEVER_PUBLISHED_WARNING
)
def test_released_unpublished_changes_locked(self):
""" Tests that released unpublished changes locked units display staff only warnings """
self._verify_unit_warning(
self.UnitState(is_released=True, publish_state=self.PublishState.UNPUBLISHED_CHANGES, is_locked=True),
self.STAFF_ONLY_WARNING
)
def test_released_unpublished_changes_unlocked(self):
""" Tests that released unpublished changes unlocked units display 'Unpublished changes to live content' """
self._verify_unit_warning(
self.UnitState(is_released=True, publish_state=self.PublishState.UNPUBLISHED_CHANGES, is_locked=False),
self.LIVE_UNPUBLISHED_WARNING
)
def test_released_published_locked(self):
""" Tests that released published locked units display staff only warnings """
self._verify_unit_warning(
self.UnitState(is_released=True, publish_state=self.PublishState.PUBLISHED, is_locked=True),
self.STAFF_ONLY_WARNING
)
def test_released_published_unlocked(self):
""" Tests that released published unlocked units display no warnings """
self._verify_unit_warning(
self.UnitState(is_released=True, publish_state=self.PublishState.PUBLISHED, is_locked=False),
None
)
def test_unreleased_never_published_locked(self):
""" Tests that unreleased never published locked units display staff only warnings """
self._verify_unit_warning(
self.UnitState(is_released=False, publish_state=self.PublishState.NEVER_PUBLISHED, is_locked=True),
self.STAFF_ONLY_WARNING
)
def test_unreleased_never_published_unlocked(self):
""" Tests that unreleased never published unlocked units display 'Unpublished units will not be released' """
self._verify_unit_warning(
self.UnitState(is_released=False, publish_state=self.PublishState.NEVER_PUBLISHED, is_locked=False),
self.NEVER_PUBLISHED_WARNING
)
def test_unreleased_unpublished_changes_locked(self):
""" Tests that unreleased unpublished changes locked units display staff only warnings """
self._verify_unit_warning(
self.UnitState(is_released=False, publish_state=self.PublishState.UNPUBLISHED_CHANGES, is_locked=True),
self.STAFF_ONLY_WARNING
)
def test_unreleased_unpublished_changes_unlocked(self):
"""
Tests that unreleased unpublished changes unlocked units display 'Unpublished changes to content that will
release in the future'
"""
self._verify_unit_warning(
self.UnitState(is_released=False, publish_state=self.PublishState.UNPUBLISHED_CHANGES, is_locked=False),
self.FUTURE_UNPUBLISHED_WARNING
)
def test_unreleased_published_locked(self):
""" Tests that unreleased published locked units display staff only warnings """
self._verify_unit_warning(
self.UnitState(is_released=False, publish_state=self.PublishState.PUBLISHED, is_locked=True),
self.STAFF_ONLY_WARNING
)
def test_unreleased_published_unlocked(self):
""" Tests that unreleased published unlocked units display no warnings """
self._verify_unit_warning(
self.UnitState(is_released=False, publish_state=self.PublishState.PUBLISHED, is_locked=False),
None
)
def _verify_unit_warning(self, unit_state, expected_status_message):
"""
Verifies that the given unit's messages match the expected messages.
If expected_status_message is None, then the unit status message is expected to not be present.
"""
self._ensure_unit_present(unit_state)
self.course_outline_page.visit()
section = self.course_outline_page.section(unit_state.name)
subsection = section.subsection_at(0)
subsection.expand_subsection()
unit = subsection.unit_at(0)
if expected_status_message == self.STAFF_ONLY_WARNING:
self.assertEqual(section.status_message, self.STAFF_ONLY_WARNING)
self.assertEqual(subsection.status_message, self.STAFF_ONLY_WARNING)
self.assertEqual(unit.status_message, self.STAFF_ONLY_WARNING)
else:
self.assertFalse(section.has_status_message)
self.assertFalse(subsection.has_status_message)
if expected_status_message:
self.assertEqual(unit.status_message, expected_status_message)
else:
self.assertFalse(unit.has_status_message)
def _ensure_unit_present(self, unit_state):
""" Ensures that a unit with the given state is present on the course outline """
if unit_state.publish_state == self.PublishState.PUBLISHED:
return
name = unit_state.name
self.course_outline_page.visit()
subsection = self.course_outline_page.section(name).subsection(name)
subsection.expand_subsection()
if unit_state.publish_state == self.PublishState.UNPUBLISHED_CHANGES:
unit = subsection.unit(name).go_to()
add_discussion(unit)
elif unit_state.publish_state == self.PublishState.NEVER_PUBLISHED:
subsection.add_unit()
unit = ContainerPage(self.browser, None)
unit.wait_for_page()
if unit.is_staff_locked != unit_state.is_locked:
unit.toggle_staff_lock()
@attr('shard_3')
class EditingSectionsTest(CourseOutlineTest):
"""
Feature: Editing Release date, Due date and grading type.
"""
__test__ = True
def test_can_edit_subsection(self):
"""
Scenario: I can edit settings of subsection.
Given that I have created a subsection
Then I see release date, due date and grading policy of subsection in course outline
When I click on the configuration icon
Then edit modal window is shown
And release date, due date and grading policy fields present
And they have correct initial values
Then I set new values for these fields
And I click save button on the modal
Then I see release date, due date and grading policy of subsection in course outline
"""
self.course_outline_page.visit()
subsection = self.course_outline_page.section(SECTION_NAME).subsection(SUBSECTION_NAME)
# Verify that Release date visible by default
self.assertTrue(subsection.release_date)
# Verify that Due date and Policy hidden by default
self.assertFalse(subsection.due_date)
self.assertFalse(subsection.policy)
modal = subsection.edit()
# Verify fields
self.assertTrue(modal.has_release_date())
self.assertTrue(modal.has_release_time())
self.assertTrue(modal.has_due_date())
self.assertTrue(modal.has_due_time())
self.assertTrue(modal.has_policy())
# Verify initial values
self.assertEqual(modal.release_date, u'1/1/1970')
self.assertEqual(modal.release_time, u'00:00')
self.assertEqual(modal.due_date, u'')
self.assertEqual(modal.due_time, u'')
self.assertEqual(modal.policy, u'Not Graded')
# Set new values
modal.release_date = '3/12/1972'
modal.release_time = '04:01'
modal.due_date = '7/21/2014'
modal.due_time = '23:39'
modal.policy = 'Lab'
modal.save()
self.assertIn(u'Released: Mar 12, 1972', subsection.release_date)
self.assertIn(u'04:01', subsection.release_date)
self.assertIn(u'Due: Jul 21, 2014', subsection.due_date)
self.assertIn(u'23:39', subsection.due_date)
self.assertIn(u'Lab', subsection.policy)
def test_can_edit_section(self):
"""
Scenario: I can edit settings of section.
Given that I have created a section
Then I see release date of section in course outline
When I click on the configuration icon
Then edit modal window is shown
And release date field present
And it has correct initial value
Then I set new value for this field
And I click save button on the modal
Then I see release date of section in course outline
"""
self.course_outline_page.visit()
section = self.course_outline_page.section(SECTION_NAME)
# Verify that Release date visible by default
self.assertTrue(section.release_date)
# Verify that Due date and Policy are not present
self.assertFalse(section.due_date)
self.assertFalse(section.policy)
modal = section.edit()
# Verify fields
self.assertTrue(modal.has_release_date())
self.assertFalse(modal.has_due_date())
self.assertFalse(modal.has_policy())
# Verify initial value
self.assertEqual(modal.release_date, u'1/1/1970')
# Set new value
modal.release_date = '5/14/1969'
modal.save()
self.assertIn(u'Released: May 14, 1969', section.release_date)
# Verify that Due date and Policy are not present
self.assertFalse(section.due_date)
self.assertFalse(section.policy)
def test_subsection_is_graded_in_lms(self):
"""
Scenario: I can grade subsection from course outline page.
Given I visit progress page
And I see that problem in subsection has grading type "Practice"
Then I visit course outline page
And I click on the configuration icon of subsection
And I set grading policy to "Lab"
And I click save button on the modal
Then I visit progress page
And I see that problem in subsection has grading type "Problem"
"""
progress_page = ProgressPage(self.browser, self.course_id)
progress_page.visit()
progress_page.wait_for_page()
self.assertEqual(u'Practice', progress_page.grading_formats[0])
self.course_outline_page.visit()
subsection = self.course_outline_page.section(SECTION_NAME).subsection(SUBSECTION_NAME)
modal = subsection.edit()
# Set new values
modal.policy = 'Lab'
modal.save()
progress_page.visit()
self.assertEqual(u'Problem', progress_page.grading_formats[0])
def test_unchanged_release_date_is_not_saved(self):
"""
Scenario: Saving a subsection without changing the release date will not override the release date
Given that I have created a section with a subsection
When I open the settings modal for the subsection
And I pressed save
And I open the settings modal for the section
And I change the release date to 07/20/1969
And I press save
Then the subsection and the section have the release date 07/20/1969
"""
self.course_outline_page.visit()
modal = self.course_outline_page.section_at(0).subsection_at(0).edit()
modal.save()
modal = self.course_outline_page.section_at(0).edit()
modal.release_date = '7/20/1969'
modal.save()
release_text = 'Released: Jul 20, 1969'
self.assertIn(release_text, self.course_outline_page.section_at(0).release_date)
self.assertIn(release_text, self.course_outline_page.section_at(0).subsection_at(0).release_date)
@attr('shard_3')
class StaffLockTest(CourseOutlineTest):
"""
Feature: Sections, subsections, and units can be locked and unlocked from the course outline.
"""
__test__ = True
def populate_course_fixture(self, course_fixture):
""" Create a course with one section, two subsections, and four units """
course_fixture.add_children(
XBlockFixtureDesc('chapter', '1').add_children(
XBlockFixtureDesc('sequential', '1.1').add_children(
XBlockFixtureDesc('vertical', '1.1.1'),
XBlockFixtureDesc('vertical', '1.1.2')
),
XBlockFixtureDesc('sequential', '1.2').add_children(
XBlockFixtureDesc('vertical', '1.2.1'),
XBlockFixtureDesc('vertical', '1.2.2')
)
)
)
def _verify_descendants_are_staff_only(self, item):
"""Verifies that all the descendants of item are staff only"""
self.assertTrue(item.is_staff_only)
if hasattr(item, 'children'):
for child in item.children():
self._verify_descendants_are_staff_only(child)
def _remove_staff_lock_and_verify_warning(self, outline_item, expect_warning):
"""Removes staff lock from a course outline item and checks whether or not a warning appears."""
modal = outline_item.edit()
modal.is_explicitly_locked = False
if expect_warning:
self.assertTrue(modal.shows_staff_lock_warning())
else:
self.assertFalse(modal.shows_staff_lock_warning())
modal.save()
def _toggle_lock_on_unlocked_item(self, outline_item):
"""Toggles outline_item's staff lock on and then off, verifying the staff lock warning"""
self.assertFalse(outline_item.has_staff_lock_warning)
outline_item.set_staff_lock(True)
self.assertTrue(outline_item.has_staff_lock_warning)
self._verify_descendants_are_staff_only(outline_item)
outline_item.set_staff_lock(False)
self.assertFalse(outline_item.has_staff_lock_warning)
def _verify_explicit_staff_lock_remains_after_unlocking_parent(self, child_item, parent_item):
"""Verifies that child_item's explicit staff lock remains after removing parent_item's staff lock"""
child_item.set_staff_lock(True)
parent_item.set_staff_lock(True)
self.assertTrue(parent_item.has_staff_lock_warning)
self.assertTrue(child_item.has_staff_lock_warning)
parent_item.set_staff_lock(False)
self.assertFalse(parent_item.has_staff_lock_warning)
self.assertTrue(child_item.has_staff_lock_warning)
def test_units_can_be_locked(self):
"""
Scenario: Units can be locked and unlocked from the course outline page
Given I have a course with a unit
When I click on the configuration icon
And I enable explicit staff locking
And I click save
Then the unit shows a staff lock warning
And when I click on the configuration icon
And I disable explicit staff locking
And I click save
Then the unit does not show a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
unit = self.course_outline_page.section_at(0).subsection_at(0).unit_at(0)
self._toggle_lock_on_unlocked_item(unit)
def test_subsections_can_be_locked(self):
"""
Scenario: Subsections can be locked and unlocked from the course outline page
Given I have a course with a subsection
When I click on the subsection's configuration icon
And I enable explicit staff locking
And I click save
Then the subsection shows a staff lock warning
And all its descendants are staff locked
And when I click on the subsection's configuration icon
And I disable explicit staff locking
And I click save
Then the the subsection does not show a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
subsection = self.course_outline_page.section_at(0).subsection_at(0)
self._toggle_lock_on_unlocked_item(subsection)
def test_sections_can_be_locked(self):
"""
Scenario: Sections can be locked and unlocked from the course outline page
Given I have a course with a section
When I click on the section's configuration icon
And I enable explicit staff locking
And I click save
Then the section shows a staff lock warning
And all its descendants are staff locked
And when I click on the section's configuration icon
And I disable explicit staff locking
And I click save
Then the section does not show a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
section = self.course_outline_page.section_at(0)
self._toggle_lock_on_unlocked_item(section)
def test_explicit_staff_lock_remains_after_unlocking_section(self):
"""
Scenario: An explicitly locked unit is still locked after removing an inherited lock from a section
Given I have a course with sections, subsections, and units
And I have enabled explicit staff lock on a section and one of its units
When I click on the section's configuration icon
And I disable explicit staff locking
And I click save
Then the unit still shows a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
section = self.course_outline_page.section_at(0)
unit = section.subsection_at(0).unit_at(0)
self._verify_explicit_staff_lock_remains_after_unlocking_parent(unit, section)
def test_explicit_staff_lock_remains_after_unlocking_subsection(self):
"""
Scenario: An explicitly locked unit is still locked after removing an inherited lock from a subsection
Given I have a course with sections, subsections, and units
And I have enabled explicit staff lock on a subsection and one of its units
When I click on the subsection's configuration icon
And I disable explicit staff locking
And I click save
Then the unit still shows a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
subsection = self.course_outline_page.section_at(0).subsection_at(0)
unit = subsection.unit_at(0)
self._verify_explicit_staff_lock_remains_after_unlocking_parent(unit, subsection)
def test_section_displays_lock_when_all_subsections_locked(self):
"""
Scenario: All subsections in section are explicitly locked, section should display staff only warning
Given I have a course one section and two subsections
When I enable explicit staff lock on all the subsections
Then the section shows a staff lock warning
"""
self.course_outline_page.visit()
section = self.course_outline_page.section_at(0)
section.subsection_at(0).set_staff_lock(True)
section.subsection_at(1).set_staff_lock(True)
self.assertTrue(section.has_staff_lock_warning)
def test_section_displays_lock_when_all_units_locked(self):
"""
Scenario: All units in a section are explicitly locked, section should display staff only warning
Given I have a course with one section, two subsections, and four units
When I enable explicit staff lock on all the units
Then the section shows a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
section = self.course_outline_page.section_at(0)
section.subsection_at(0).unit_at(0).set_staff_lock(True)
section.subsection_at(0).unit_at(1).set_staff_lock(True)
section.subsection_at(1).unit_at(0).set_staff_lock(True)
section.subsection_at(1).unit_at(1).set_staff_lock(True)
self.assertTrue(section.has_staff_lock_warning)
def test_subsection_displays_lock_when_all_units_locked(self):
"""
Scenario: All units in subsection are explicitly locked, subsection should display staff only warning
Given I have a course with one subsection and two units
When I enable explicit staff lock on all the units
Then the subsection shows a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
subsection = self.course_outline_page.section_at(0).subsection_at(0)
subsection.unit_at(0).set_staff_lock(True)
subsection.unit_at(1).set_staff_lock(True)
self.assertTrue(subsection.has_staff_lock_warning)
def test_section_does_not_display_lock_when_some_subsections_locked(self):
"""
Scenario: Only some subsections in section are explicitly locked, section should NOT display staff only warning
Given I have a course with one section and two subsections
When I enable explicit staff lock on one subsection
Then the section does not show a staff lock warning
"""
self.course_outline_page.visit()
section = self.course_outline_page.section_at(0)
section.subsection_at(0).set_staff_lock(True)
self.assertFalse(section.has_staff_lock_warning)
def test_section_does_not_display_lock_when_some_units_locked(self):
"""
Scenario: Only some units in section are explicitly locked, section should NOT display staff only warning
Given I have a course with one section, two subsections, and four units
When I enable explicit staff lock on three units
Then the section does not show a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
section = self.course_outline_page.section_at(0)
section.subsection_at(0).unit_at(0).set_staff_lock(True)
section.subsection_at(0).unit_at(1).set_staff_lock(True)
section.subsection_at(1).unit_at(1).set_staff_lock(True)
self.assertFalse(section.has_staff_lock_warning)
def test_subsection_does_not_display_lock_when_some_units_locked(self):
"""
Scenario: Only some units in subsection are explicitly locked, subsection should NOT display staff only warning
Given I have a course with one subsection and two units
When I enable explicit staff lock on one unit
Then the subsection does not show a staff lock warning
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
subsection = self.course_outline_page.section_at(0).subsection_at(0)
subsection.unit_at(0).set_staff_lock(True)
self.assertFalse(subsection.has_staff_lock_warning)
def test_locked_sections_do_not_appear_in_lms(self):
"""
Scenario: A locked section is not visible to students in the LMS
Given I have a course with two sections
When I enable explicit staff lock on one section
And I click the View Live button to switch to staff view
Then I see two sections in the sidebar
And when I switch the view mode to student view
Then I see one section in the sidebar
"""
self.course_outline_page.visit()
self.course_outline_page.add_section_from_top_button()
self.course_outline_page.section_at(1).set_staff_lock(True)
self.course_outline_page.view_live()
courseware = CoursewarePage(self.browser, self.course_id)
courseware.wait_for_page()
self.assertEqual(courseware.num_sections, 2)
StaffPage(self.browser, self.course_id).set_staff_view_mode('Student')
self.assertEqual(courseware.num_sections, 1)
def test_locked_subsections_do_not_appear_in_lms(self):
"""
Scenario: A locked subsection is not visible to students in the LMS
Given I have a course with two subsections
When I enable explicit staff lock on one subsection
And I click the View Live button to switch to staff view
Then I see two subsections in the sidebar
And when I switch the view mode to student view
Then I see one section in the sidebar
"""
self.course_outline_page.visit()
self.course_outline_page.section_at(0).subsection_at(1).set_staff_lock(True)
self.course_outline_page.view_live()
courseware = CoursewarePage(self.browser, self.course_id)
courseware.wait_for_page()
self.assertEqual(courseware.num_subsections, 2)
StaffPage(self.browser, self.course_id).set_staff_view_mode('Student')
self.assertEqual(courseware.num_subsections, 1)
def test_toggling_staff_lock_on_section_does_not_publish_draft_units(self):
"""
Scenario: Locking and unlocking a section will not publish its draft units
Given I have a course with a section and unit
And the unit has a draft and published version
When I enable explicit staff lock on the section
And I disable explicit staff lock on the section
And I click the View Live button to switch to staff view
Then I see the published version of the unit
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
unit = self.course_outline_page.section_at(0).subsection_at(0).unit_at(0).go_to()
add_discussion(unit)
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
section = self.course_outline_page.section_at(0)
section.set_staff_lock(True)
section.set_staff_lock(False)
unit = section.subsection_at(0).unit_at(0).go_to()
unit.view_published_version()
courseware = CoursewarePage(self.browser, self.course_id)
courseware.wait_for_page()
self.assertEqual(courseware.num_xblock_components, 0)
def test_toggling_staff_lock_on_subsection_does_not_publish_draft_units(self):
"""
Scenario: Locking and unlocking a subsection will not publish its draft units
Given I have a course with a subsection and unit
And the unit has a draft and published version
When I enable explicit staff lock on the subsection
And I disable explicit staff lock on the subsection
And I click the View Live button to switch to staff view
Then I see the published version of the unit
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
unit = self.course_outline_page.section_at(0).subsection_at(0).unit_at(0).go_to()
add_discussion(unit)
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
subsection = self.course_outline_page.section_at(0).subsection_at(0)
subsection.set_staff_lock(True)
subsection.set_staff_lock(False)
unit = subsection.unit_at(0).go_to()
unit.view_published_version()
courseware = CoursewarePage(self.browser, self.course_id)
courseware.wait_for_page()
self.assertEqual(courseware.num_xblock_components, 0)
def test_removing_staff_lock_from_unit_without_inherited_lock_shows_warning(self):
"""
Scenario: Removing explicit staff lock from a unit which does not inherit staff lock displays a warning.
Given I have a course with a subsection and unit
When I enable explicit staff lock on the unit
And I disable explicit staff lock on the unit
Then I see a modal warning.
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
unit = self.course_outline_page.section_at(0).subsection_at(0).unit_at(0)
unit.set_staff_lock(True)
self._remove_staff_lock_and_verify_warning(unit, True)
def test_removing_staff_lock_from_subsection_without_inherited_lock_shows_warning(self):
"""
Scenario: Removing explicit staff lock from a subsection which does not inherit staff lock displays a warning.
Given I have a course with a section and subsection
When I enable explicit staff lock on the subsection
And I disable explicit staff lock on the subsection
Then I see a modal warning.
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
subsection = self.course_outline_page.section_at(0).subsection_at(0)
subsection.set_staff_lock(True)
self._remove_staff_lock_and_verify_warning(subsection, True)
def test_removing_staff_lock_from_unit_with_inherited_lock_shows_no_warning(self):
"""
Scenario: Removing explicit staff lock from a unit which also inherits staff lock displays no warning.
Given I have a course with a subsection and unit
When I enable explicit staff lock on the subsection
And I enable explicit staff lock on the unit
When I disable explicit staff lock on the unit
Then I do not see a modal warning.
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
subsection = self.course_outline_page.section_at(0).subsection_at(0)
unit = subsection.unit_at(0)
subsection.set_staff_lock(True)
unit.set_staff_lock(True)
self._remove_staff_lock_and_verify_warning(unit, False)
def test_removing_staff_lock_from_subsection_with_inherited_lock_shows_no_warning(self):
"""
Scenario: Removing explicit staff lock from a subsection which also inherits staff lock displays no warning.
Given I have a course with a section and subsection
When I enable explicit staff lock on the section
And I enable explicit staff lock on the subsection
When I disable explicit staff lock on the subsection
Then I do not see a modal warning.
"""
self.course_outline_page.visit()
self.course_outline_page.expand_all_subsections()
section = self.course_outline_page.section_at(0)
subsection = section.subsection_at(0)
section.set_staff_lock(True)
subsection.set_staff_lock(True)
self._remove_staff_lock_and_verify_warning(subsection, False)
@attr('shard_3')
class EditNamesTest(CourseOutlineTest):
"""
Feature: Click-to-edit section/subsection names
"""
__test__ = True
def set_name_and_verify(self, item, old_name, new_name, expected_name):
"""
Changes the display name of item from old_name to new_name, then verifies that its value is expected_name.
"""
self.assertEqual(item.name, old_name)
item.change_name(new_name)
self.assertFalse(item.in_editable_form())
self.assertEqual(item.name, expected_name)
def test_edit_section_name(self):
"""
Scenario: Click-to-edit section name
Given that I have created a section
When I click on the name of section
Then the section name becomes editable
And given that I have edited the section name
When I click outside of the edited section name
Then the section name saves
And becomes non-editable
"""
self.course_outline_page.visit()
self.set_name_and_verify(
self.course_outline_page.section_at(0),
'Test Section',
'Changed',
'Changed'
)
def test_edit_subsection_name(self):
"""
Scenario: Click-to-edit subsection name
Given that I have created a subsection
When I click on the name of subsection
Then the subsection name becomes editable
And given that I have edited the subsection name
When I click outside of the edited subsection name
Then the subsection name saves
And becomes non-editable
"""
self.course_outline_page.visit()
self.set_name_and_verify(
self.course_outline_page.section_at(0).subsection_at(0),
'Test Subsection',
'Changed',
'Changed'
)
def test_edit_empty_section_name(self):
"""
Scenario: Click-to-edit section name, enter empty name
Given that I have created a section
And I have clicked to edit the name of the section
And I have entered an empty section name
When I click outside of the edited section name
Then the section name does not change
And becomes non-editable
"""
self.course_outline_page.visit()
self.set_name_and_verify(
self.course_outline_page.section_at(0),
'Test Section',
'',
'Test Section'
)
def test_edit_empty_subsection_name(self):
"""
Scenario: Click-to-edit subsection name, enter empty name
Given that I have created a subsection
And I have clicked to edit the name of the subsection
And I have entered an empty subsection name
When I click outside of the edited subsection name
Then the subsection name does not change
And becomes non-editable
"""
self.course_outline_page.visit()
self.set_name_and_verify(
self.course_outline_page.section_at(0).subsection_at(0),
'Test Subsection',
'',
'Test Subsection'
)
def test_editing_names_does_not_expand_collapse(self):
"""
Scenario: A section stays in the same expand/collapse state while its name is edited
Given that I have created a section
And the section is collapsed
When I click on the name of the section
Then the section is collapsed
And given that I have entered a new name
Then the section is collapsed
And given that I press ENTER to finalize the name
Then the section is collapsed
"""
self.course_outline_page.visit()
self.course_outline_page.section_at(0).expand_subsection()
self.assertFalse(self.course_outline_page.section_at(0).in_editable_form())
self.assertTrue(self.course_outline_page.section_at(0).is_collapsed)
self.course_outline_page.section_at(0).edit_name()
self.assertTrue(self.course_outline_page.section_at(0).in_editable_form())
self.assertTrue(self.course_outline_page.section_at(0).is_collapsed)
self.course_outline_page.section_at(0).enter_name('Changed')
self.assertTrue(self.course_outline_page.section_at(0).is_collapsed)
self.course_outline_page.section_at(0).finalize_name()
self.assertTrue(self.course_outline_page.section_at(0).is_collapsed)
@attr('shard_3')
class CreateSectionsTest(CourseOutlineTest):
"""
Feature: Create new sections/subsections/units
"""
__test__ = True
def populate_course_fixture(self, course_fixture):
""" Start with a completely empty course to easily test adding things to it """
pass
def test_create_new_section_from_top_button(self):
"""
Scenario: Create new section from button at top of page
Given that I am on the course outline
When I click the "+ Add section" button at the top of the page
Then I see a new section added to the bottom of the page
And the display name is in its editable form.
"""
self.course_outline_page.visit()
self.course_outline_page.add_section_from_top_button()
self.assertEqual(len(self.course_outline_page.sections()), 1)
self.assertTrue(self.course_outline_page.section_at(0).in_editable_form())
def test_create_new_section_from_bottom_button(self):
"""
Scenario: Create new section from button at bottom of page
Given that I am on the course outline
When I click the "+ Add section" button at the bottom of the page
Then I see a new section added to the bottom of the page
And the display name is in its editable form.
"""
self.course_outline_page.visit()
self.course_outline_page.add_section_from_bottom_button()
self.assertEqual(len(self.course_outline_page.sections()), 1)
self.assertTrue(self.course_outline_page.section_at(0).in_editable_form())
def test_create_new_section_from_bottom_button_plus_icon(self):
"""
Scenario: Create new section from button plus icon at bottom of page
Given that I am on the course outline
When I click the plus icon in "+ Add section" button at the bottom of the page
Then I see a new section added to the bottom of the page
And the display name is in its editable form.
"""
self.course_outline_page.visit()
self.course_outline_page.add_section_from_bottom_button(click_child_icon=True)
self.assertEqual(len(self.course_outline_page.sections()), 1)
self.assertTrue(self.course_outline_page.section_at(0).in_editable_form())
def test_create_new_subsection(self):
"""
Scenario: Create new subsection
Given that I have created a section
When I click the "+ Add subsection" button in that section
Then I see a new subsection added to the bottom of the section
And the display name is in its editable form.
"""
self.course_outline_page.visit()
self.course_outline_page.add_section_from_top_button()
self.assertEqual(len(self.course_outline_page.sections()), 1)
self.course_outline_page.section_at(0).add_subsection()
subsections = self.course_outline_page.section_at(0).subsections()
self.assertEqual(len(subsections), 1)
self.assertTrue(subsections[0].in_editable_form())
def test_create_new_unit(self):
"""
Scenario: Create new unit
Given that I have created a section
And that I have created a subsection within that section
When I click the "+ Add unit" button in that subsection
Then I am redirected to a New Unit page
And the display name is in its editable form.
"""
self.course_outline_page.visit()
self.course_outline_page.add_section_from_top_button()
self.assertEqual(len(self.course_outline_page.sections()), 1)
self.course_outline_page.section_at(0).add_subsection()
self.assertEqual(len(self.course_outline_page.section_at(0).subsections()), 1)
self.course_outline_page.section_at(0).subsection_at(0).add_unit()
unit_page = ContainerPage(self.browser, None)
EmptyPromise(unit_page.is_browser_on_page, 'Browser is on the unit page').fulfill()
self.assertTrue(unit_page.is_inline_editing_display_name())
@attr('shard_3')
class DeleteContentTest(CourseOutlineTest):
"""
Feature: Deleting sections/subsections/units
"""
__test__ = True
def test_delete_section(self):
"""
Scenario: Delete section
Given that I am on the course outline
When I click the delete button for a section on the course outline
Then I should receive a confirmation message, asking me if I really want to delete the section
When I click "Yes, I want to delete this component"
Then the confirmation message should close
And the section should immediately be deleted from the course outline
"""
self.course_outline_page.visit()
self.assertEqual(len(self.course_outline_page.sections()), 1)
self.course_outline_page.section_at(0).delete()
self.assertEqual(len(self.course_outline_page.sections()), 0)
def test_cancel_delete_section(self):
"""
Scenario: Cancel delete of section
Given that I clicked the delte button for a section on the course outline
And I received a confirmation message, asking me if I really want to delete the component
When I click "Cancel"
Then the confirmation message should close
And the section should remain in the course outline
"""
self.course_outline_page.visit()
self.assertEqual(len(self.course_outline_page.sections()), 1)
self.course_outline_page.section_at(0).delete(cancel=True)
self.assertEqual(len(self.course_outline_page.sections()), 1)
def test_delete_subsection(self):
"""
Scenario: Delete subsection
Given that I am on the course outline
When I click the delete button for a subsection on the course outline
Then I should receive a confirmation message, asking me if I really want to delete the subsection
When I click "Yes, I want to delete this component"
Then the confiramtion message should close
And the subsection should immediately be deleted from the course outline
"""
self.course_outline_page.visit()
self.assertEqual(len(self.course_outline_page.section_at(0).subsections()), 1)
self.course_outline_page.section_at(0).subsection_at(0).delete()
self.assertEqual(len(self.course_outline_page.section_at(0).subsections()), 0)
def test_cancel_delete_subsection(self):
"""
Scenario: Cancel delete of subsection
Given that I clicked the delete button for a subsection on the course outline
And I received a confirmation message, asking me if I really want to delete the subsection
When I click "cancel"
Then the confirmation message should close
And the subsection should remain in the course outline
"""
self.course_outline_page.visit()
self.assertEqual(len(self.course_outline_page.section_at(0).subsections()), 1)
self.course_outline_page.section_at(0).subsection_at(0).delete(cancel=True)
self.assertEqual(len(self.course_outline_page.section_at(0).subsections()), 1)
def test_delete_unit(self):
"""
Scenario: Delete unit
Given that I am on the course outline
When I click the delete button for a unit on the course outline
Then I should receive a confirmation message, asking me if I really want to delete the unit
When I click "Yes, I want to delete this unit"
Then the confirmation message should close
And the unit should immediately be deleted from the course outline
"""
self.course_outline_page.visit()
self.course_outline_page.section_at(0).subsection_at(0).expand_subsection()
self.assertEqual(len(self.course_outline_page.section_at(0).subsection_at(0).units()), 1)
self.course_outline_page.section_at(0).subsection_at(0).unit_at(0).delete()
self.assertEqual(len(self.course_outline_page.section_at(0).subsection_at(0).units()), 0)
def test_cancel_delete_unit(self):
"""
Scenario: Cancel delete of unit
Given that I clicked the delete button for a unit on the course outline
And I received a confirmation message, asking me if I really want to delete the unit
When I click "Cancel"
Then the confirmation message should close
And the unit should remain in the course outline
"""
self.course_outline_page.visit()
self.course_outline_page.section_at(0).subsection_at(0).expand_subsection()
self.assertEqual(len(self.course_outline_page.section_at(0).subsection_at(0).units()), 1)
self.course_outline_page.section_at(0).subsection_at(0).unit_at(0).delete(cancel=True)
self.assertEqual(len(self.course_outline_page.section_at(0).subsection_at(0).units()), 1)
def test_delete_all_no_content_message(self):
"""
Scenario: Delete all sections/subsections/units in a course, "no content" message should appear
Given that I delete all sections, subsections, and units in a course
When I visit the course outline
Then I will see a message that says, "You haven't added any content to this course yet"
Add see a + Add Section button
"""
self.course_outline_page.visit()
self.assertFalse(self.course_outline_page.has_no_content_message)
self.course_outline_page.section_at(0).delete()
self.assertEqual(len(self.course_outline_page.sections()), 0)
self.assertTrue(self.course_outline_page.has_no_content_message)
@attr('shard_3')
class ExpandCollapseMultipleSectionsTest(CourseOutlineTest):
"""
Feature: Courses with multiple sections can expand and collapse all sections.
"""
__test__ = True
def populate_course_fixture(self, course_fixture):
""" Start with a course with two sections """
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit')
)
),
XBlockFixtureDesc('chapter', 'Test Section 2').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection 2').add_children(
XBlockFixtureDesc('vertical', 'Test Unit 2')
)
)
)
def verify_all_sections(self, collapsed):
"""
Verifies that all sections are collapsed if collapsed is True, otherwise all expanded.
"""
for section in self.course_outline_page.sections():
self.assertEqual(collapsed, section.is_collapsed)
def toggle_all_sections(self):
"""
Toggles the expand collapse state of all sections.
"""
for section in self.course_outline_page.sections():
section.expand_subsection()
def test_expanded_by_default(self):
"""
Scenario: The default layout for the outline page is to show sections in expanded view
Given I have a course with sections
When I navigate to the course outline page
Then I see the "Collapse All Sections" link
And all sections are expanded
"""
self.course_outline_page.visit()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.COLLAPSE)
self.verify_all_sections(collapsed=False)
def test_no_expand_link_for_empty_course(self):
"""
Scenario: Collapse link is removed after last section of a course is deleted
Given I have a course with multiple sections
And I navigate to the course outline page
When I will confirm all alerts
And I press the "section" delete icon
Then I do not see the "Collapse All Sections" link
And I will see a message that says "You haven't added any content to this course yet"
"""
self.course_outline_page.visit()
for section in self.course_outline_page.sections():
section.delete()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.MISSING)
self.assertTrue(self.course_outline_page.has_no_content_message)
def test_collapse_all_when_all_expanded(self):
"""
Scenario: Collapse all sections when all sections are expanded
Given I navigate to the outline page of a course with sections
And all sections are expanded
When I click the "Collapse All Sections" link
Then I see the "Expand All Sections" link
And all sections are collapsed
"""
self.course_outline_page.visit()
self.verify_all_sections(collapsed=False)
self.course_outline_page.toggle_expand_collapse()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.EXPAND)
self.verify_all_sections(collapsed=True)
def test_collapse_all_when_some_expanded(self):
"""
Scenario: Collapsing all sections when 1 or more sections are already collapsed
Given I navigate to the outline page of a course with sections
And all sections are expanded
When I collapse the first section
And I click the "Collapse All Sections" link
Then I see the "Expand All Sections" link
And all sections are collapsed
"""
self.course_outline_page.visit()
self.verify_all_sections(collapsed=False)
self.course_outline_page.section_at(0).expand_subsection()
self.course_outline_page.toggle_expand_collapse()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.EXPAND)
self.verify_all_sections(collapsed=True)
def test_expand_all_when_all_collapsed(self):
"""
Scenario: Expanding all sections when all sections are collapsed
Given I navigate to the outline page of a course with multiple sections
And I click the "Collapse All Sections" link
When I click the "Expand All Sections" link
Then I see the "Collapse All Sections" link
And all sections are expanded
"""
self.course_outline_page.visit()
self.course_outline_page.toggle_expand_collapse()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.EXPAND)
self.course_outline_page.toggle_expand_collapse()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.COLLAPSE)
self.verify_all_sections(collapsed=False)
def test_expand_all_when_some_collapsed(self):
"""
Scenario: Expanding all sections when 1 or more sections are already expanded
Given I navigate to the outline page of a course with multiple sections
And I click the "Collapse All Sections" link
When I expand the first section
And I click the "Expand All Sections" link
Then I see the "Collapse All Sections" link
And all sections are expanded
"""
self.course_outline_page.visit()
self.course_outline_page.toggle_expand_collapse()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.EXPAND)
self.course_outline_page.section_at(0).expand_subsection()
self.course_outline_page.toggle_expand_collapse()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.COLLAPSE)
self.verify_all_sections(collapsed=False)
@attr('shard_3')
class ExpandCollapseSingleSectionTest(CourseOutlineTest):
"""
Feature: Courses with a single section can expand and collapse all sections.
"""
__test__ = True
def test_no_expand_link_for_empty_course(self):
"""
Scenario: Collapse link is removed after last section of a course is deleted
Given I have a course with one section
And I navigate to the course outline page
When I will confirm all alerts
And I press the "section" delete icon
Then I do not see the "Collapse All Sections" link
And I will see a message that says "You haven't added any content to this course yet"
"""
self.course_outline_page.visit()
self.course_outline_page.section_at(0).delete()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.MISSING)
self.assertTrue(self.course_outline_page.has_no_content_message)
def test_old_subsection_stays_collapsed_after_creation(self):
"""
Scenario: Collapsed subsection stays collapsed after creating a new subsection
Given I have a course with one section and subsection
And I navigate to the course outline page
Then the subsection is collapsed
And when I create a new subsection
Then the first subsection is collapsed
And the second subsection is expanded
"""
self.course_outline_page.visit()
self.assertTrue(self.course_outline_page.section_at(0).subsection_at(0).is_collapsed)
self.course_outline_page.section_at(0).add_subsection()
self.assertTrue(self.course_outline_page.section_at(0).subsection_at(0).is_collapsed)
self.assertFalse(self.course_outline_page.section_at(0).subsection_at(1).is_collapsed)
@attr('shard_3')
class ExpandCollapseEmptyTest(CourseOutlineTest):
"""
Feature: Courses with no sections initially can expand and collapse all sections after addition.
"""
__test__ = True
def populate_course_fixture(self, course_fixture):
""" Start with an empty course """
pass
def test_no_expand_link_for_empty_course(self):
"""
Scenario: Expand/collapse for a course with no sections
Given I have a course with no sections
When I navigate to the course outline page
Then I do not see the "Collapse All Sections" link
"""
self.course_outline_page.visit()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.MISSING)
def test_link_appears_after_section_creation(self):
"""
Scenario: Collapse link appears after creating first section of a course
Given I have a course with no sections
When I navigate to the course outline page
And I add a section
Then I see the "Collapse All Sections" link
And all sections are expanded
"""
self.course_outline_page.visit()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.MISSING)
self.course_outline_page.add_section_from_top_button()
self.assertEquals(self.course_outline_page.expand_collapse_link_state, ExpandCollapseLinkState.COLLAPSE)
self.assertFalse(self.course_outline_page.section_at(0).is_collapsed)
@attr('shard_3')
class DefaultStatesEmptyTest(CourseOutlineTest):
"""
Feature: Misc course outline default states/actions when starting with an empty course
"""
__test__ = True
def populate_course_fixture(self, course_fixture):
""" Start with an empty course """
pass
def test_empty_course_message(self):
"""
Scenario: Empty course state
Given that I am in a course with no sections, subsections, nor units
When I visit the course outline
Then I will see a message that says "You haven't added any content to this course yet"
And see a + Add Section button
"""
self.course_outline_page.visit()
self.assertTrue(self.course_outline_page.has_no_content_message)
self.assertTrue(self.course_outline_page.bottom_add_section_button.is_present())
@attr('shard_3')
class DefaultStatesContentTest(CourseOutlineTest):
"""
Feature: Misc course outline default states/actions when starting with a course with content
"""
__test__ = True
def test_view_live(self):
"""
Scenario: View Live version from course outline
Given that I am on the course outline
When I click the "View Live" button
Then a new tab will open to the course on the LMS
"""
self.course_outline_page.visit()
self.course_outline_page.view_live()
courseware = CoursewarePage(self.browser, self.course_id)
courseware.wait_for_page()
self.assertEqual(courseware.num_xblock_components, 3)
self.assertEqual(courseware.xblock_component_type(0), 'problem')
self.assertEqual(courseware.xblock_component_type(1), 'html')
self.assertEqual(courseware.xblock_component_type(2), 'discussion')
@attr('shard_3')
class UnitNavigationTest(CourseOutlineTest):
"""
Feature: Navigate to units
"""
__test__ = True
def test_navigate_to_unit(self):
"""
Scenario: Click unit name to navigate to unit page
Given that I have expanded a section/subsection so I can see unit names
When I click on a unit name
Then I will be taken to the appropriate unit page
"""
self.course_outline_page.visit()
self.course_outline_page.section_at(0).subsection_at(0).expand_subsection()
unit = self.course_outline_page.section_at(0).subsection_at(0).unit_at(0).go_to()
self.assertTrue(unit.is_browser_on_page)
@attr('shard_3')
class PublishSectionTest(CourseOutlineTest):
"""
Feature: Publish sections.
"""
__test__ = True
def populate_course_fixture(self, course_fixture):
"""
Sets up a course structure with 2 subsections inside a single section.
The first subsection has 2 units, and the second subsection has one unit.
"""
self.courseware = CoursewarePage(self.browser, self.course_id)
self.course_nav = CourseNavPage(self.browser)
course_fixture.add_children(
XBlockFixtureDesc('chapter', SECTION_NAME).add_children(
XBlockFixtureDesc('sequential', SUBSECTION_NAME).add_children(
XBlockFixtureDesc('vertical', UNIT_NAME),
XBlockFixtureDesc('vertical', 'Test Unit 2'),
),
XBlockFixtureDesc('sequential', 'Test Subsection 2').add_children(
XBlockFixtureDesc('vertical', 'Test Unit 3'),
),
),
)
def test_unit_publishing(self):
"""
Scenario: Can publish a unit and see published content in LMS
Given I have a section with 2 subsections and 3 unpublished units
When I go to the course outline
Then I see publish button for the first unit, subsection, section
When I publish the first unit
Then I see that publish button for the first unit disappears
And I see publish buttons for subsection, section
And I see the changed content in LMS
"""
self._add_unpublished_content()
self.course_outline_page.visit()
section, subsection, unit = self._get_items()
self.assertTrue(unit.publish_action)
self.assertTrue(subsection.publish_action)
self.assertTrue(section.publish_action)
unit.publish()
self.assertFalse(unit.publish_action)
self.assertTrue(subsection.publish_action)
self.assertTrue(section.publish_action)
self.courseware.visit()
self.assertEqual(1, self.courseware.num_xblock_components)
def test_subsection_publishing(self):
"""
Scenario: Can publish a subsection and see published content in LMS
Given I have a section with 2 subsections and 3 unpublished units
When I go to the course outline
Then I see publish button for the unit, subsection, section
When I publish the first subsection
Then I see that publish button for the first subsection disappears
And I see that publish buttons disappear for the child units of the subsection
And I see publish button for section
And I see the changed content in LMS
"""
self._add_unpublished_content()
self.course_outline_page.visit()
section, subsection, unit = self._get_items()
self.assertTrue(unit.publish_action)
self.assertTrue(subsection.publish_action)
self.assertTrue(section.publish_action)
self.course_outline_page.section(SECTION_NAME).subsection(SUBSECTION_NAME).publish()
self.assertFalse(unit.publish_action)
self.assertFalse(subsection.publish_action)
self.assertTrue(section.publish_action)
self.courseware.visit()
self.assertEqual(1, self.courseware.num_xblock_components)
self.course_nav.go_to_sequential_position(2)
self.assertEqual(1, self.courseware.num_xblock_components)
def test_section_publishing(self):
"""
Scenario: Can publish a section and see published content in LMS
Given I have a section with 2 subsections and 3 unpublished units
When I go to the course outline
Then I see publish button for the unit, subsection, section
When I publish the section
Then I see that publish buttons disappears
And I see the changed content in LMS
"""
self._add_unpublished_content()
self.course_outline_page.visit()
section, subsection, unit = self._get_items()
self.assertTrue(subsection.publish_action)
self.assertTrue(section.publish_action)
self.assertTrue(unit.publish_action)
self.course_outline_page.section(SECTION_NAME).publish()
self.assertFalse(subsection.publish_action)
self.assertFalse(section.publish_action)
self.assertFalse(unit.publish_action)
self.courseware.visit()
self.assertEqual(1, self.courseware.num_xblock_components)
self.course_nav.go_to_sequential_position(2)
self.assertEqual(1, self.courseware.num_xblock_components)
self.course_nav.go_to_section(SECTION_NAME, 'Test Subsection 2')
self.assertEqual(1, self.courseware.num_xblock_components)
def _add_unpublished_content(self):
"""
Adds unpublished HTML content to first three units in the course.
"""
for index in xrange(3):
self.course_fixture.create_xblock(
self.course_fixture.get_nested_xblocks(category="vertical")[index].locator,
XBlockFixtureDesc('html', 'Unpublished HTML Component ' + str(index)),
)
def _get_items(self):
"""
Returns first section, subsection, and unit on the page.
"""
section = self.course_outline_page.section(SECTION_NAME)
subsection = section.subsection(SUBSECTION_NAME)
unit = subsection.expand_subsection().unit(UNIT_NAME)
return (section, subsection, unit)
@attr('shard_3')
class DeprecationWarningMessageTest(CourseOutlineTest):
"""
Feature: Verify deprecation warning message.
"""
HEADING_TEXT = 'This course uses features that are no longer supported.'
COMPONENT_LIST_HEADING = 'You must delete or replace the following components.'
ADVANCE_MODULES_REMOVE_TEXT = ('To avoid errors, edX strongly recommends that you remove unsupported features '
'from the course advanced settings. To do this, go to the Advanced Settings '
'page, locate the "Advanced Module List" setting, and then delete the following '
'modules from the list.')
DEFAULT_DISPLAYNAME = "Deprecated Component"
def _add_deprecated_advance_modules(self, block_types):
"""
Add `block_types` into `Advanced Module List`
Arguments:
block_types (list): list of block types
"""
self.advanced_settings.visit()
self.advanced_settings.set_values({"Advanced Module List": json.dumps(block_types)})
def _create_deprecated_components(self):
"""
Create deprecated components.
"""
parent_vertical = self.course_fixture.get_nested_xblocks(category="vertical")[0]
self.course_fixture.create_xblock(
parent_vertical.locator,
XBlockFixtureDesc('poll', "Poll", data=load_data_str('poll_markdown.xml'))
)
self.course_fixture.create_xblock(parent_vertical.locator, XBlockFixtureDesc('survey', 'Survey'))
def _verify_deprecation_warning_info(
self,
deprecated_blocks_present,
components_present,
components_display_name_list=None,
deprecated_modules_list=None
):
"""
Verify deprecation warning
Arguments:
deprecated_blocks_present (bool): deprecated blocks remove text and
is list is visible if True else False
components_present (bool): components list shown if True else False
components_display_name_list (list): list of components display name
deprecated_modules_list (list): list of deprecated advance modules
"""
self.assertTrue(self.course_outline_page.deprecated_warning_visible)
self.assertEqual(self.course_outline_page.warning_heading_text, self.HEADING_TEXT)
self.assertEqual(self.course_outline_page.modules_remove_text_shown, deprecated_blocks_present)
if deprecated_blocks_present:
self.assertEqual(self.course_outline_page.modules_remove_text, self.ADVANCE_MODULES_REMOVE_TEXT)
self.assertEqual(self.course_outline_page.deprecated_advance_modules, deprecated_modules_list)
self.assertEqual(self.course_outline_page.components_visible, components_present)
if components_present:
self.assertEqual(self.course_outline_page.components_list_heading, self.COMPONENT_LIST_HEADING)
self.assertItemsEqual(self.course_outline_page.components_display_names, components_display_name_list)
def test_no_deprecation_warning_message_present(self):
"""
Scenario: Verify that deprecation warning message is not shown if no deprecated
advance modules are not present and also no deprecated component exist in
course outline.
When I goto course outline
Then I don't see any deprecation warning
"""
self.course_outline_page.visit()
self.assertFalse(self.course_outline_page.deprecated_warning_visible)
def test_deprecation_warning_message_present(self):
"""
Scenario: Verify deprecation warning message if deprecated modules
and components are present.
Given I have "poll" advance modules present in `Advanced Module List`
And I have created 2 poll components
When I go to course outline
Then I see poll deprecated warning
And I see correct poll deprecated warning heading text
And I see correct poll deprecated warning advance modules remove text
And I see list of poll components with correct display names
"""
self._add_deprecated_advance_modules(block_types=['poll', 'survey'])
self._create_deprecated_components()
self.course_outline_page.visit()
self._verify_deprecation_warning_info(
deprecated_blocks_present=True,
components_present=True,
components_display_name_list=['Poll', 'Survey'],
deprecated_modules_list=['poll', 'survey']
)
def test_deprecation_warning_with_no_displayname(self):
"""
Scenario: Verify deprecation warning message if poll components are present.
Given I have created 1 poll deprecated component
When I go to course outline
Then I see poll deprecated warning
And I see correct poll deprecated warning heading text
And I see list of poll components with correct message
"""
parent_vertical = self.course_fixture.get_nested_xblocks(category="vertical")[0]
# Create a deprecated component with display_name to be empty and make sure
# the deprecation warning is displayed with
self.course_fixture.create_xblock(
parent_vertical.locator,
XBlockFixtureDesc(category='poll', display_name="", data=load_data_str('poll_markdown.xml'))
)
self.course_outline_page.visit()
self._verify_deprecation_warning_info(
deprecated_blocks_present=False,
components_present=True,
components_display_name_list=[self.DEFAULT_DISPLAYNAME],
)
def test_warning_with_poll_advance_modules_only(self):
"""
Scenario: Verify that deprecation warning message is shown if only
poll advance modules are present and no poll component exist.
Given I have poll advance modules present in `Advanced Module List`
When I go to course outline
Then I see poll deprecated warning
And I see correct poll deprecated warning heading text
And I see correct poll deprecated warning advance modules remove text
And I don't see list of poll components
"""
self._add_deprecated_advance_modules(block_types=['poll', 'survey'])
self.course_outline_page.visit()
self._verify_deprecation_warning_info(
deprecated_blocks_present=True,
components_present=False,
deprecated_modules_list=['poll', 'survey']
)
def test_warning_with_poll_components_only(self):
"""
Scenario: Verify that deprecation warning message is shown if only
poll component exist and no poll advance modules are present.
Given I have created two poll components
When I go to course outline
Then I see poll deprecated warning
And I see correct poll deprecated warning heading text
And I don't see poll deprecated warning advance modules remove text
And I see list of poll components with correct display names
"""
self._create_deprecated_components()
self.course_outline_page.visit()
self._verify_deprecation_warning_info(
deprecated_blocks_present=False,
components_present=True,
components_display_name_list=['Poll', 'Survey']
)
@attr('shard_4')
class SelfPacedOutlineTest(CourseOutlineTest):
"""Test the course outline for a self-paced course."""
def populate_course_fixture(self, course_fixture):
course_fixture.add_children(
XBlockFixtureDesc('chapter', SECTION_NAME).add_children(
XBlockFixtureDesc('sequential', SUBSECTION_NAME).add_children(
XBlockFixtureDesc('vertical', UNIT_NAME)
)
),
)
self.course_fixture.add_course_details({
'self_paced': True,
'start_date': datetime.now() + timedelta(days=1)
})
ConfigModelFixture('/config/self_paced', {'enabled': True}).install()
def test_release_dates_not_shown(self):
"""
Scenario: Ensure that block release dates are not shown on the
course outline page of a self-paced course.
Given I am the author of a self-paced course
When I go to the course outline
Then I should not see release dates for course content
"""
self.course_outline_page.visit()
section = self.course_outline_page.section(SECTION_NAME)
self.assertEqual(section.release_date, '')
subsection = section.subsection(SUBSECTION_NAME)
self.assertEqual(subsection.release_date, '')
def test_edit_section_and_subsection(self):
"""
Scenario: Ensure that block release/due dates are not shown
in their settings modals.
Given I am the author of a self-paced course
When I go to the course outline
And I click on settings for a section or subsection
Then I should not see release or due date settings
"""
self.course_outline_page.visit()
section = self.course_outline_page.section(SECTION_NAME)
modal = section.edit()
self.assertFalse(modal.has_release_date())
self.assertFalse(modal.has_due_date())
modal.cancel()
subsection = section.subsection(SUBSECTION_NAME)
modal = subsection.edit()
self.assertFalse(modal.has_release_date())
self.assertFalse(modal.has_due_date())
|
solashirai/edx-platform
|
common/test/acceptance/tests/studio/test_studio_outline.py
|
Python
|
agpl-3.0
| 81,678
|
[
"VisIt"
] |
51f610717c9c07630bc5b4d402c378e65336e33554f3ab5287844bf2accf71f4
|
"""
The ``fatiando`` package contains all the subpackages and modules required for
most tasks.
Modules for each geophysical method are group in subpackages:
* :mod:`gravmag <fatiando.gravmag>`:
Gravity and magnetics (i.e., potential fields)
* :mod:`seismic <fatiando.seismic>`:
Seismics and seismology
* :mod:`geothermal <fatiando.geothermal>`:
Geothermal heat transfer modeling
Modules for gridding, meshing, visualization, user interface, input/output etc:
* :mod:`mesher <fatiando.mesher>`:
Mesh generation and definition of geometric elements
* :mod:`gridder <fatiando.gridder>`:
Grid generation and operations (e.g., interpolation)
* :mod:`vis <fatiando.vis>`:
Plotting utilities for 2D (using matplotlib) and 3D (using mayavi)
* :mod:`datasets <fatiando.datasets>`:
Fetch and load datasets and models from web repositories
* :mod:`gui <fatiando.gui>`:
Graphical user interfaces (still very primitive)
* :mod:`utils <fatiando.utils>`:
Miscelaneous utilities, like mathematical functions, unit conversion, etc
* :mod:`~fatiando.constants`:
Physical constants and unit conversions
Also included is the :mod:`fatiando.inversion` package with utilities for
implementing inverse problems. There you'll find:
* :mod:`~fatiando.inversion.regularization`: Common regularizing functions and
base classes for building custom ones
* :mod:`~fatiando.inversion.base`: Base classes to implement your inverse
problem. They do most of the heavy lifting for you!
* :mod:`~fatiando.inversion.solvers`: Functions for optimization (used by
:mod:`~fatiando.inversion.base` classes)
Inversion methods in Fatiando leverage :mod:`fatiando.inversion`, providing a
common interface and usage patters. For examples, see modules
:mod:`fatiando.seismic.epic2d`,
:mod:`fatiando.seismic.srtomo`,
:mod:`fatiando.gravmag.basin2d`,
:mod:`fatiando.gravmag.euler`,
:mod:`fatiando.gravmag.eqlayer`,
etc.
The design of :mod:`fatiando.inversion` was inspired by `scikit-learn`_, an
amazing machine-learning library.
.. _scikit-learn: http://scikit-learn.org
----
"""
version = '0.2'
|
seancug/python-example
|
fatiando-0.2/fatiando/__init__.py
|
Python
|
gpl-2.0
| 2,087
|
[
"Mayavi"
] |
b7ec935d188be614b8be75d49cb804e4822b3d5d5b58fc9d72c2620b255dc11d
|
"""
Tests for geography support in PostGIS 1.5+
"""
import os
from django.contrib.gis import gdal
from django.contrib.gis.measure import D
from django.test import TestCase
from models import City, County, Zipcode
class GeographyTest(TestCase):
def test01_fixture_load(self):
"Ensure geography features loaded properly."
self.assertEqual(8, City.objects.count())
def test02_distance_lookup(self):
"Testing GeoQuerySet distance lookup support on non-point geography fields."
z = Zipcode.objects.get(code='77002')
cities1 = list(City.objects
.filter(point__distance_lte=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
cities2 = list(City.objects
.filter(point__dwithin=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
for cities in [cities1, cities2]:
self.assertEqual(['Dallas', 'Houston', 'Oklahoma City'], cities)
def test03_distance_method(self):
"Testing GeoQuerySet.distance() support on non-point geography fields."
# `GeoQuerySet.distance` is not allowed geometry fields.
htown = City.objects.get(name='Houston')
qs = Zipcode.objects.distance(htown.point)
def test04_invalid_operators_functions(self):
"Ensuring exceptions are raised for operators & functions invalid on geography fields."
# Only a subset of the geometry functions & operator are available
# to PostGIS geography types. For more information, visit:
# http://postgis.refractions.net/documentation/manual-1.5/ch08.html#PostGIS_GeographyFunctions
z = Zipcode.objects.get(code='77002')
# ST_Within not available.
self.assertRaises(ValueError, City.objects.filter(point__within=z.poly).count)
# `@` operator not available.
self.assertRaises(ValueError, City.objects.filter(point__contained=z.poly).count)
def test05_geography_layermapping(self):
"Testing LayerMapping support on models with geography fields."
# There is a similar test in `layermap` that uses the same data set,
# but the County model here is a bit different.
if not gdal.HAS_GDAL: return
from django.contrib.gis.utils import LayerMapping
# Getting the shapefile and mapping dictionary.
shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data'))
co_shp = os.path.join(shp_path, 'counties', 'counties.shp')
co_mapping = {'name' : 'Name',
'state' : 'State',
'mpoly' : 'MULTIPOLYGON',
}
# Reference county names, number of polygons, and state names.
names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo']
num_polys = [1, 2, 1, 19, 1] # Number of polygons for each.
st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']
lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name')
lm.save(silent=True, strict=True)
for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names):
self.assertEqual(4326, c.mpoly.srid)
self.assertEqual(num_poly, len(c.mpoly))
self.assertEqual(name, c.name)
self.assertEqual(state, c.state)
|
nycholas/ask-undrgz
|
src/ask-undrgz/django/contrib/gis/tests/geogapp/tests.py
|
Python
|
bsd-3-clause
| 3,472
|
[
"VisIt"
] |
60c4716f9018953455c6d6fe602af59679cde32ca94ca8c9bc4e09ad84a9b81b
|
import click
import os
def get_host(host):
"""
Based on the host agrumement passed in, determine which host to pass back.
"""
# WIP: Only aws for now!
from skipper.aws.host import host
return host
class BaseHost(object):
def check_file(self, message):
exists = False
while not exists:
path = os.path.expanduser(click.prompt(message))
exists = os.path.isfile(path)
if not exists:
click.echo("Sorry, that doesn't appear to be a valid file.")
return path
def get_keys_paths(self):
public = self.creds['SSH'].get('PUBLIC_KEY')
if not public:
public = self.check_file('Please enter the path to a SSH public key')
self.creds['SSH']['PUBLIC_KEY'] = public
self.creds.save()
private = self.creds['SSH'].get('PRIVATE_KEY')
if not private:
private = self.check_file('Please enter the path to a SSH private key')
self.creds['SSH']['PRIVATE_KEY'] = private
self.creds.save()
return {
"private": private,
"public": public
}
def get_etcd_token(self):
if not self.creds['COREOS']['ETCD_TOKEN']:
click.echo('\nNo token set for your etcd cluster.')
self.creds['COREOS']['ETCD_TOKEN'] = click.prompt(
'Please visit https://discovery.etcd.io/new to generate a new'
' one, or enter your existing one'
)
self.creds.save()
click.echo('\n')
return self.creds['COREOS']['ETCD_TOKEN']
|
cameronmaske/skipper
|
skipper/hosts.py
|
Python
|
bsd-2-clause
| 1,632
|
[
"VisIt"
] |
e9900d4d51ce44bb5a8c6dbf47b0f5b3a06ff7c45a15a171c46c8870677b8fd7
|
"""
Module to set up run time parameters for Clawpack.
The values set in the function setrun are then written out to data files
that will be read in by the Fortran code.
"""
import os
import numpy as np
#------------------------------
def setrun(claw_pkg='amrclaw'):
#------------------------------
"""
Define the parameters used for running Clawpack.
INPUT:
claw_pkg expected to be "amrclaw" for this setrun.
OUTPUT:
rundata - object of class ClawRunData
"""
from clawpack.clawutil import data
assert claw_pkg.lower() == 'amrclaw', "Expected claw_pkg = 'amrclaw'"
num_dim = 2
rundata = data.ClawRunData(claw_pkg, num_dim)
#------------------------------------------------------------------
# Problem-specific parameters to be written to setprob.data:
#------------------------------------------------------------------
probdata = rundata.new_UserData(name='probdata',fname='setprob.data')
probdata.add_param('tau', 0.2, 'relaxation parameter')
probdata.add_param('uW', 1.0, 'wall velocity')
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
# (or to amr2ez.data for AMR)
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# Set single grid parameters first.
# See below for AMR parameters.
# ---------------
# Spatial domain:
# ---------------
# Number of space dimensions:
clawdata.num_dim = num_dim
# Lower and upper edge of computational domain:
clawdata.lower[0] = 0. # xlower
clawdata.upper[0] = 1. # xupper
clawdata.lower[1] = 0. # ylower
clawdata.upper[1] = 1. # yupper
# Number of grid cells:
clawdata.num_cells[0] = 40 # mx
clawdata.num_cells[1] = 40 # my
# ---------------
# Size of system:
# ---------------
# Number of equations in the system:
clawdata.num_eqn = 7
# Number of auxiliary variables in the aux array (initialized in setaux)
clawdata.num_aux = 0
# Index of aux array corresponding to capacity function, if there is one:
clawdata.capa_index = 0
# -------------
# Initial time:
# -------------
clawdata.t0 = 0.0
# Restart from checkpoint file of a previous run?
# Note: If restarting, you must also change the Makefile to set:
# RESTART = True
# If restarting, t0 above should be from original run, and the
# restart_file 'fort.chkNNNNN' specified below should be in
# the OUTDIR indicated in Makefile.
clawdata.restart = False # True to restart from prior results
clawdata.restart_file = 'fort.chk00006' # File to use for restart data
# -------------
# Output times:
#--------------
# Specify at what times the results should be written to fort.q files.
# Note that the time integration stops after the final output time.
clawdata.output_style = 1
if clawdata.output_style==1:
# Output ntimes frames at equally spaced times up to tfinal:
# Can specify num_output_times = 0 for no output
clawdata.num_output_times = 40
clawdata.tfinal = 4.0
clawdata.output_t0 = True # output at initial (or restart) time?
elif clawdata.output_style == 2:
# Specify a list or numpy array of output times:
# Include t0 if you want output at the initial time.
clawdata.output_times = [0., 0.1]
elif clawdata.output_style == 3:
# Output every step_interval timesteps over total_steps timesteps:
clawdata.output_step_interval = 2
clawdata.total_steps = 4
clawdata.output_t0 = True # output at initial (or restart) time?
clawdata.output_format = 'ascii' # 'ascii', 'binary', 'netcdf'
clawdata.output_q_components = 'all' # could be list such as [True,True]
clawdata.output_aux_components = 'none' # could be list
clawdata.output_aux_onlyonce = True # output aux arrays only at t0
# ---------------------------------------------------
# Verbosity of messages to screen during integration:
# ---------------------------------------------------
# The current t, dt, and cfl will be printed every time step
# at AMR levels <= verbosity. Set verbosity = 0 for no printing.
# (E.g. verbosity == 2 means print only on levels 1 and 2.)
clawdata.verbosity = 0
# --------------
# Time stepping:
# --------------
# if dt_variable==True: variable time steps used based on cfl_desired,
# if dt_variable==False: fixed time steps dt = dt_initial always used.
clawdata.dt_variable = True
# Initial time step for variable dt.
# (If dt_variable==0 then dt=dt_initial for all steps)
clawdata.dt_initial = 0.016
# Max time step to be allowed if variable dt used:
clawdata.dt_max = 1e+99
# Desired Courant number if variable dt used
clawdata.cfl_desired = 0.9
# max Courant number to allow without retaking step with a smaller dt:
clawdata.cfl_max = 1.0
# Maximum number of time steps to allow between output times:
clawdata.steps_max = 100000
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = 2
# Use dimensional splitting?
clawdata.dimensional_split = 1
# For unsplit method, transverse_waves can be
# 0 or 'none' ==> donor cell
# 1 or 'increment' ==> corner transport of waves
# 2 or 'all' ==> corner transport of 2nd order corrections too
clawdata.transverse_waves = 'none'
# Number of waves in the Riemann solution:
clawdata.num_waves = 4
# List of limiters to use for each wave family:
# Required: len(limiter) == num_waves
# Some options:
# 0 or 'none' ==> no limiter (Lax-Wendroff)
# 1 or 'minmod' ==> minmod
# 2 or 'superbee' ==> superbee
# 3 or 'vanleer' ==> van Leer
# 4 or 'mc' ==> MC limiter
clawdata.limiter = ['vanleer','vanleer','vanleer','vanleer']
clawdata.use_fwaves = False # True ==> use f-wave version of algorithms
# Source terms splitting:
# src_split == 0 or 'none' ==> no source term (src routine never called)
# src_split == 1 or 'godunov' ==> Godunov (1st order) splitting used,
# src_split == 2 or 'strang' ==> Strang (2nd order) splitting used, not recommended.
clawdata.source_split = 'godunov'
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.num_ghost = 2
# Choice of BCs at xlower and xupper:
# 0 or 'user' => user specified (must modify bcNamr.f to use this option)
# 1 or 'extrap' => extrapolation (non-reflecting outflow)
# 2 or 'periodic' => periodic (must specify this at both boundaries)
# 3 or 'wall' => solid wall for systems where q(2) is normal velocity
clawdata.bc_lower[0] = 'user' # at xlower
clawdata.bc_upper[0] = 'user' # at xupper
clawdata.bc_lower[1] = 'user' # at ylower
clawdata.bc_upper[1] = 'user' # at yupper
# ---------------
# Gauges:
# ---------------
rundata.gaugedata.gauges = []
# for gauges append lines of the form [gaugeno, x, y, t1, t2]
rundata.gaugedata.gauges.append([1, 0.6, 0.6, 0., 10.])
rundata.gaugedata.gauges.append([2, 0.7, 0.3, 0., 10.])
# --------------
# Checkpointing:
# --------------
# Specify when checkpoint files should be created that can be
# used to restart a computation.
clawdata.checkpt_style = 0
if clawdata.checkpt_style == 0:
# Do not checkpoint at all
pass
elif clawdata.checkpt_style == 1:
# Checkpoint only at tfinal.
pass
elif clawdata.checkpt_style == 2:
# Specify a list of checkpoint times.
clawdata.checkpt_times = [0.1,0.15]
elif clawdata.checkpt_style == 3:
# Checkpoint every checkpt_interval timesteps (on Level 1)
# and at the final time.
clawdata.checkpt_interval = 5
# ---------------
# AMR parameters:
# ---------------
amrdata = rundata.amrdata
# max number of refinement levels:
amrdata.amr_levels_max = 4
# List of refinement ratios at each level (length at least amr_level_max-1)
amrdata.refinement_ratios_x = [2,2,2]
amrdata.refinement_ratios_y = [2,2,2]
amrdata.refinement_ratios_t = [2,2,2]
# Specify type of each aux variable in amrdata.auxtype.
# This must be a list of length num_aux, each element of which is one of:
# 'center', 'capacity', 'xleft', or 'yleft' (see documentation).
amrdata.aux_type = []
# Flag for refinement based on Richardson error estimater:
amrdata.flag_richardson = False # use Richardson?
amrdata.flag_richardson_tol = 0.1 # Richardson tolerance
# Flag for refinement using routine flag2refine:
amrdata.flag2refine = True # use this?
amrdata.flag2refine_tol = 0.05 # tolerance used in this routine
# User can modify flag2refine to change the criterion for flagging.
# Default: check max-norm of difference between q in a cell and
# each of its neighbors.
# steps to take on each level L between regriddings of level L+1:
amrdata.regrid_interval = 2
# width of buffer zone around flagged points:
# (typically the same as regrid_interval so waves don't escape):
amrdata.regrid_buffer_width = 3
# clustering alg. cutoff for (# flagged pts) / (total # of cells refined)
# (closer to 1.0 => more small grids may be needed to cover flagged cells)
amrdata.clustering_cutoff = 0.7
# print info about each regridding up to this level:
amrdata.verbosity_regrid = 3
# ---------------
# Regions:
# ---------------
regions = rundata.regiondata.regions
# to specify regions of refinement append lines of the form
# [minlevel,maxlevel,t1,t2,x1,x2,y1,y2]
# ----- For developers -----
# Toggle debugging print statements:
amrdata.dprint = False # print domain flags
amrdata.eprint = False # print err est flags
amrdata.edebug = False # even more err est flags
amrdata.gprint = False # grid bisection/clustering
amrdata.nprint = False # proper nesting output
amrdata.pprint = False # proj. of tagged points
amrdata.rprint = False # print regridding summary
amrdata.sprint = False # space/memory output
amrdata.tprint = False # time step reporting each level
amrdata.uprint = False # update/upbnd reporting
return rundata
# end of function setrun
# ----------------------
if __name__ == '__main__':
# Set up run-time parameters and write all data files.
import sys
rundata = setrun(*sys.argv[1:])
rundata.write()
|
torrilhon/MyClawpack
|
examples/stokes_2d_drivencavity/setrun.py
|
Python
|
gpl-3.0
| 11,383
|
[
"NetCDF"
] |
20f79b1e456b46b4e15b0d1e2c7ce92a54645fcb1453d4e21e8e336bede1e61c
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the PyNeurActiv project, which aims at providing tools
# to study and model the activity of neuronal cultures.
# Copyright (C) 2017 SENeC Initiative
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This particular file is taken from the `atroML <http://www.astroml.org/>`
# project and is provided under a BSD license.
#
# Modified by Tanguy Fardet (6/26/2017): added min/max_width keywords
"""
Bayesian Block implementation
=============================
Dynamic programming algorithm for finding the optimal adaptive-width histogram.
Based on Scargle et al 2012 [1]_
References
----------
.. [1] http://adsabs.harvard.edu/abs/2012arXiv1207.5578S
"""
import numpy as np
class FitnessFunc(object):
'''
Base class for fitness functions
Each fitness function class has the following:
- fitness(...) : compute fitness function.
Arguments accepted by fitness must be among [T_k, N_k, a_k, b_k, c_k]
- prior(N, Ntot) : compute prior on N given a total number of points Ntot
'''
def __init__(self, p0=0.05, gamma=None):
self.p0 = p0
self.gamma = gamma
def validate_input(self, t, x, sigma):
'''
Check that input is valid
'''
pass
def fitness(**kwargs):
raise NotImplementedError()
def prior(self, N, Ntot):
if self.gamma is None:
return self.p0_prior(N, Ntot)
else:
return self.gamma_prior(N, Ntot)
def p0_prior(self, N, Ntot):
# eq. 21 from Scargle 2012
return 4 - np.log(73.53 * self.p0 * (N ** -0.478))
def gamma_prior(self, N, Ntot):
'''
Basic prior, parametrized by gamma (eq. 3 in Scargle 2012)
'''
if self.gamma == 1:
return 0
else:
return (np.log(1 - self.gamma)
- np.log(1 - self.gamma ** (Ntot + 1))
+ N * np.log(self.gamma))
# the fitness_args property will return the list of arguments accepted by
# the method fitness(). This allows more efficient computation below.
@property
def args(self):
try:
# Python 2
return self.fitness.func_code.co_varnames[1:]
except AttributeError:
return self.fitness.__code__.co_varnames[1:]
class Events(FitnessFunc):
"""
Fitness for binned or unbinned events
Parameters
----------
p0 : float
False alarm probability, used to compute the prior on N
(see eq. 21 of Scargle 2012). Default prior is for p0 = 0.
gamma : float or None
If specified, then use this gamma to compute the general prior form,
p ~ gamma^N. If gamma is specified, p0 is ignored.
"""
def fitness(self, N_k, T_k):
# eq. 19 from Scargle 2012
pos = (N_k != 0) * (T_k != 0)
res = np.zeros(len(N_k))
res[pos] = N_k[pos] * (np.log(N_k[pos]) - np.log(T_k[pos]))
return res
def prior(self, N, Ntot):
if self.gamma is not None:
return self.gamma_prior(N, Ntot)
else:
# eq. 21 from Scargle 2012
return 4 - np.log(73.53 * self.p0 * (N ** -0.478))
class RegularEvents(FitnessFunc):
"""
Fitness for regular events
This is for data which has a fundamental "tick" length, so that all
measured values are multiples of this tick length. In each tick, there
are either zero or one counts.
Parameters
----------
dt : float
tick rate for data
gamma : float
specifies the prior on the number of bins: p ~ gamma^N
"""
def __init__(self, dt, p0=0.05, gamma=None):
self.dt = dt
self.p0 = p0
self.gamma = gamma
def validate_input(self, t, x, sigma):
unique_x = np.unique(x)
if list(unique_x) not in ([0], [1], [0, 1]):
raise ValueError("Regular events must have only 0 and 1 in x")
def fitness(self, T_k, N_k):
# Eq. 75 of Scargle 2012
M_k = T_k / self.dt
N_over_M = N_k * 1. / M_k
eps = 1E-8
if np.any(N_over_M > 1 + eps):
import warnings
warnings.warn('regular events: N/M > 1. '
'Is the time step correct?')
one_m_NM = 1 - N_over_M
N_over_M[N_over_M <= 0] = 1
one_m_NM[one_m_NM <= 0] = 1
return N_k * np.log(N_over_M) + (M_k - N_k) * np.log(one_m_NM)
class PointMeasures(FitnessFunc):
"""
Fitness for point measures
Parameters
----------
gamma : float
specifies the prior on the number of bins: p ~ gamma^N
if gamma is not specified, then a prior based on simulations
will be used (see sec 3.3 of Scargle 2012)
"""
def __init__(self, p0=None, gamma=None):
self.p0 = p0
self.gamma = gamma
def fitness(self, a_k, b_k):
# eq. 41 from Scargle 2012
return (b_k * b_k) / (4 * a_k)
def prior(self, N, Ntot):
if self.gamma is not None:
return self.gamma_prior(N, Ntot)
elif self.p0 is not None:
return self.p0_prior(N, Ntot)
else:
# eq. at end of sec 3.3 in Scargle 2012
return 1.32 + 0.577 * np.log10(N)
def bayesian_blocks(t, x=None, sigma=None, fitness='events', min_width=0.,
max_width=np.inf, **kwargs):
"""
Bayesian Blocks Implementation
This is a flexible implementation of the Bayesian Blocks algorithm
described in Scargle 2012 [1]_
Parameters
----------
t : array_like
data times (one dimensional, length N)
x : array_like (optional)
data values
sigma : array_like or float (optional)
data errors
fitness : str or object
the fitness function to use.
If a string, the following options are supported:
- 'events' : binned or unbinned event data
extra arguments are `p0`, which gives the false alarm probability
to compute the prior, or `gamma` which gives the slope of the
prior on the number of bins.
For this method, additional `min_width` and `max_width` keywords
can be provided to prevent excessively large or small bins.
- 'regular_events' : non-overlapping events measured at multiples
of a fundamental tick rate, `dt`, which must be specified as an
additional argument. The prior can be specified through `gamma`,
which gives the slope of the prior on the number of bins.
- 'measures' : fitness for a measured sequence with Gaussian errors
The prior can be specified using `gamma`, which gives the slope
of the prior on the number of bins. If `gamma` is not specified,
then a simulation-derived prior will be used.
Alternatively, the fitness can be a user-specified object of
type derived from the FitnessFunc class.
min_width : float, optional (default: 0.)
Minimum width accepted for a bin.
max_width : float, optional (default: infinity)
Maximum acceptable width for a bin.
Returns
-------
edges : ndarray
array containing the (N+1) bin edges
Examples
--------
Event data:
>>> t = np.random.normal(size=100)
>>> bins = bayesian_blocks(t, fitness='events', p0=0.01)
Event data with repeats:
>>> t = np.random.normal(size=100)
>>> t[80:] = t[:20]
>>> bins = bayesian_blocks(t, fitness='events', p0=0.01)
Regular event data:
>>> dt = 0.01
>>> t = dt * np.arange(1000)
>>> x = np.zeros(len(t))
>>> x[np.random.randint(0, len(t), len(t) / 10)] = 1
>>> bins = bayesian_blocks(t, fitness='regular_events', dt=dt, gamma=0.9)
Measured point data with errors:
>>> t = 100 * np.random.random(100)
>>> x = np.exp(-0.5 * (t - 50) ** 2)
>>> sigma = 0.1
>>> x_obs = np.random.normal(x, sigma)
>>> bins = bayesian_blocks(t, fitness='measures')
References
----------
.. [1] Scargle, J `et al.` (2012)
http://adsabs.harvard.edu/abs/2012arXiv1207.5578S
See Also
--------
:func:`astroML.plotting.hist` : histogram plotting function which can make
use of bayesian blocks.
"""
# validate array input
t = np.asarray(t, dtype=float)
if x is not None:
x = np.asarray(x)
if sigma is not None:
sigma = np.asarray(sigma)
# verify the fitness function
if fitness == 'events':
if x is not None and np.any(x % 1 > 0):
raise ValueError("x must be integer counts for fitness='events'")
fitfunc = Events(**kwargs)
elif fitness == 'regular_events':
if x is not None and (np.any(x % 1 > 0) or np.any(x > 1)):
raise ValueError("x must be 0 or 1 for fitness='regular_events'")
fitfunc = RegularEvents(**kwargs)
elif fitness == 'measures':
if x is None:
raise ValueError("x must be specified for fitness='measures'")
fitfunc = PointMeasures(**kwargs)
else:
if not (hasattr(fitness, 'args') and
hasattr(fitness, 'fitness') and
hasattr(fitness, 'prior')):
raise ValueError("fitness not understood")
fitfunc = fitness
# find unique values of t
t = np.array(t, dtype=float)
assert t.ndim == 1
unq_t, unq_ind, unq_inv = np.unique(t, return_index=True,
return_inverse=True)
# if x is not specified, x will be counts at each time
if x is None:
if sigma is not None:
raise ValueError("If sigma is specified, x must be specified")
if len(unq_t) == len(t):
x = np.ones_like(t)
else:
x = np.bincount(unq_inv)
t = unq_t
sigma = 1
# if x is specified, then we need to sort t and x together
else:
x = np.asarray(x)
if len(t) != len(x):
raise ValueError("Size of t and x does not match")
if len(unq_t) != len(t):
raise ValueError("Repeated values in t not supported when "
"x is specified")
t = unq_t
x = x[unq_ind]
# verify the given sigma value
N = t.size
if sigma is not None:
sigma = np.asarray(sigma)
if sigma.shape not in [(), (1,), (N,)]:
raise ValueError('sigma does not match the shape of x')
else:
sigma = 1
# validate the input
fitfunc.validate_input(t, x, sigma)
# compute values needed for computation, below
if 'a_k' in fitfunc.args:
ak_raw = np.ones_like(x) / sigma / sigma
if 'b_k' in fitfunc.args:
bk_raw = x / sigma / sigma
if 'c_k' in fitfunc.args:
ck_raw = x * x / sigma / sigma
# create length-(N + 1) array of cell edges
edges = np.concatenate([t[:1],
0.5 * (t[1:] + t[:-1]),
t[-1:]])
block_length = t[-1] - edges
# arrays to store the best configuration
best = np.zeros(N, dtype=float)
last = np.zeros(N, dtype=int)
#-----------------------------------------------------------------
# Start with first data cell; add one cell at each iteration
#-----------------------------------------------------------------
for R in range(N):
# Compute fit_vec : fitness of putative last block (end at R)
kwds = {}
# T_k: width/duration of each block
if 'T_k' in fitfunc.args:
kwds['T_k'] = np.clip(
block_length[:R + 1] - block_length[R + 1],
min_width, max_width)
# N_k: number of elements in each block
if 'N_k' in fitfunc.args:
kwds['N_k'] = np.cumsum(x[:R + 1][::-1])[::-1]
# a_k: eq. 31
if 'a_k' in fitfunc.args:
kwds['a_k'] = 0.5 * np.cumsum(ak_raw[:R + 1][::-1])[::-1]
# b_k: eq. 32
if 'b_k' in fitfunc.args:
kwds['b_k'] = - np.cumsum(bk_raw[:R + 1][::-1])[::-1]
# c_k: eq. 33
if 'c_k' in fitfunc.args:
kwds['c_k'] = 0.5 * np.cumsum(ck_raw[:R + 1][::-1])[::-1]
# evaluate fitness function
fit_vec = fitfunc.fitness(**kwds)
A_R = fit_vec - fitfunc.prior(R + 1, N)
A_R[1:] += best[:R]
i_max = np.argmax(A_R)
last[R] = i_max
best[R] = A_R[i_max]
#-----------------------------------------------------------------
# Now find changepoints by iteratively peeling off the last block
#-----------------------------------------------------------------
change_points = np.zeros(N, dtype=int)
i_cp = N
ind = N
while True:
i_cp -= 1
change_points[i_cp] = ind
if ind == 0:
break
ind = last[ind - 1]
change_points = change_points[i_cp:]
return edges[change_points]
|
SENeC-Initiative/PyNeurActiv
|
analysis/bayesian_blocks.py
|
Python
|
gpl-3.0
| 13,573
|
[
"Gaussian"
] |
67d70e8aa8e7485a86dece8e03fd6790b1afdcb489d84a7dd48b1470bb75619a
|
import math
import numpy as np
__all__ = ["plot_disks_1d", "plot_disks"]
def plot_disks_1d(plt, pts, weights, total_area):
"""Plot a circles at quadrature points according to weights. The diameters
sum up to the total area.
"""
radii = 0.5 * abs(weights) / math.fsum(weights) * total_area
colors = ["tab:blue" if weight >= 0 else "tab:red" for weight in weights]
_plot_disks_helpers(plt, pts, radii, colors)
def plot_disks(plt, pts, weights, total_area):
"""Plot a circles at quadrature points according to weights."""
flt = np.vectorize(float)
pts = flt(pts)
weights = flt(weights)
radii = np.sqrt(abs(weights) / math.fsum(weights) * total_area / math.pi)
colors = [
# use matplotlib 2.0's color scheme
"tab:blue" if weight >= 0 else "tab:red"
for weight in weights
]
_plot_disks_helpers(plt, pts, radii, colors)
def _plot_disks_helpers(plt, pts, radii, colors):
for pt, radius, color in zip(pts, radii, colors):
# highlight circle center
plt.plot([pt[0]], [pt[1]], linestyle="None", marker=".", color=color)
# Choose radius such that the sum of areas of the circles equals total_area.
# Make sure to set the line width to 0,
# <https://github.com/matplotlib/matplotlib/issues/17421>.
circ = plt.Circle((pt[0], pt[1]), radius, color=color, alpha=0.5, linewidth=0)
plt.gca().add_patch(circ)
def show_mpl(points, weights, volume, edges, balls=None):
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
flt = np.vectorize(float)
points = flt(points)
weights = flt(weights)
def plot_spheres(plt, ax, pts, radii, colors):
h = 1.0e-2
for tp, r, color in zip(pts, radii, colors):
# https://matplotlib.org/examples/mplot3d/surface3d_demo2.html
# Compute sphere for every point anew. This is more costly on the
# numerical side, but gives the flexibility of drawing sphere of
# different size with different number of points. Another options
# would be to precompute x, y, z before the loop, but this can be
# heavy on the graphics output. See
# <https://stackoverflow.com/q/45324258/353337>.
u = np.linspace(0, 2 * np.pi, int(2 * np.pi / h * r) + 1)
v = np.linspace(0, np.pi, int(np.pi / h * r) + 1)
_x = np.outer(np.cos(u), np.sin(v))
_y = np.outer(np.sin(u), np.sin(v))
_z = np.outer(np.ones(np.size(u)), np.cos(v))
# highlight ball center
plt.plot(
[tp[0]], [tp[1]], [tp[2]], linestyle="None", marker=".", color=color
)
ax.plot_surface(
r * _x + tp[0],
r * _y + tp[1],
r * _z + tp[2],
color=color,
alpha=0.3,
linewidth=1,
)
ax.set_axis_off()
return
balls = [] if balls is None else balls
ax = plt.axes(projection=Axes3D.name)
# ax.set_aspect("equal")
ax.set_axis_off()
for edge in edges:
plt.plot(*edge, color="k", linestyle="-")
plot_spheres(
plt,
ax,
points,
# Choose radius such that the sum of volumes of the balls equals
# total_volume.
radii=np.cbrt(abs(weights) / math.fsum(weights) * volume / (4.0 / 3.0 * np.pi)),
colors=["tab:blue" if weight >= 0 else "tab:red" for weight in weights],
)
for ball in balls:
plot_spheres(plt, ax, [ball[0]], [ball[1]], ["#dddddd"])
plt.show()
return plt
# def show_mayavi(points, weights, volume, edges, balls=None):
# import mayavi.mlab as mlab
#
# mlab.figure(bgcolor=(1.0, 1.0, 1.0))
#
# for edge in edges:
# mlab.plot3d(*edge, tube_radius=0.5e-2, color=(0.0, 0.0, 0.0))
#
# blue = (31.0 / 255.0, 119.0 / 255.0, 180.0 / 255.0)
# red = (84.0 / 255.0, 15.0 / 255.0, 16.0 / 255.0)
#
# h = 1.0e-2
# sum_weights = math.fsum(weights)
# for tp, weight in zip(points, weights):
# # Choose radius such that the sum of volumes of the balls equals
# # total_volume.
# r = (abs(weight) / sum_weights * volume / (4.0 / 3.0 * np.pi)) ** (1.0 / 3.0)
#
# # Create a sphere
# u = np.linspace(0, 2 * np.pi, int(2 * np.pi / h * r) + 1)
# v = np.linspace(0, np.pi, int(np.pi / h * r) + 1)
# sin_u, cos_u = np.sin(u), np.cos(u)
# sin_v, cos_v = np.sin(v), np.cos(v)
# _x = np.outer(cos_u, sin_v)
# _y = np.outer(sin_u, sin_v)
# _z = np.outer(np.ones(np.size(u)), cos_v)
#
# mlab.mesh(
# r * _x + tp[0],
# r * _y + tp[1],
# r * _z + tp[2],
# color=blue if weight >= 0 else red,
# opacity=1.0,
# )
#
# balls = [] if balls is None else balls
# for ball in balls:
# tp = ball[0]
# r = ball[1]
#
# # Create a sphere
# u = np.linspace(0, 2 * np.pi, int(2 * np.pi / h * r) + 1)
# v = np.linspace(0, np.pi, int(np.pi / h * r) + 1)
# sin_u, cos_u = np.sin(u), np.cos(u)
# sin_v, cos_v = np.sin(v), np.cos(v)
# _x = np.outer(cos_u, sin_v)
# _y = np.outer(sin_u, sin_v)
# _z = np.outer(np.ones(np.size(u)), cos_v)
#
# mlab.mesh(
# r * _x + tp[0], r * _y + tp[1], r * _z + tp[2], color=[0, 0, 0], opacity=1.0
# )
#
# mlab.show()
# return
def show_vtk(points, weights, volume, edges, balls=None, render=True):
import vtk
def get_line_actor(x0, x1):
source = vtk.vtkLineSource()
source.SetPoint1(x0)
source.SetPoint2(x1)
# mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
# actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# color actor
actor.GetProperty().SetColor(0, 0, 0)
return actor
def get_sphere_actor(x0, r, color, opacity=1.0):
# Generate polygon data for a sphere
sphere = vtk.vtkSphereSource()
sphere.SetCenter(x0)
sphere.SetRadius(r)
sphere.SetPhiResolution(100)
sphere.SetThetaResolution(100)
# Create a mapper for the sphere data
sphere_mapper = vtk.vtkPolyDataMapper()
# sphere_mapper.SetInput(sphere.GetOutput())
sphere_mapper.SetInputConnection(sphere.GetOutputPort())
# Connect the mapper to an actor
sphere_actor = vtk.vtkActor()
sphere_actor.SetMapper(sphere_mapper)
sphere_actor.GetProperty().SetColor(color)
sphere_actor.GetProperty().SetOpacity(opacity)
return sphere_actor
flt = np.vectorize(float)
points = flt(points)
weights = flt(weights)
balls = [] if balls is None else balls
line_actors = [get_line_actor(edge[:, 0], edge[:, 1]) for edge in edges]
blue = np.array([31.0, 119.0, 180.0]) / 255.0
red = np.array([84.0, 15.0, 16.0]) / 255.0
radii = np.cbrt(abs(weights) / math.fsum(weights) * volume / (4.0 / 3.0 * np.pi))
sphere_actors = [
get_sphere_actor(pt, radius, color=blue if weight > 0.0 else red)
for pt, weight, radius in zip(points.T, weights, radii)
]
sphere_actors.extend(
[
get_sphere_actor(
np.array(ball[0]),
ball[1],
color=np.array([0.0, 0.0, 0.0]) / 255.0,
opacity=0.5,
)
for ball in balls
]
)
# Create a renderer and add the sphere actor to it
renderer = vtk.vtkRenderer()
renderer.SetBackground(1.0, 1.0, 1.0)
# Available in more recent versions of VTK
# <https://vtk.org/doc/nightly/html/classvtkViewport.html#aed4374e05dbbea1692f7c9c865407664>
# renderer.SetBackgroundAlpha(1.0)
for sphere_actor in sphere_actors:
renderer.AddActor(sphere_actor)
for line_actor in line_actors:
renderer.AddActor(line_actor)
# Create a render window
render_window = vtk.vtkRenderWindow()
render_window.AddRenderer(renderer)
# Create an interactor
interactor = vtk.vtkRenderWindowInteractor()
interactor.SetRenderWindow(render_window)
if render:
# Initialize the interactor and start the rendering loop
interactor.Initialize()
render_window.Render()
interactor.Start()
# # Screenshot
# TODO transparent background
# w2if = vtk.vtkWindowToImageFilter()
# w2if.SetInput(render_window)
# w2if.Update()
# writer = vtk.vtkPNGWriter()
# writer.SetFileName('screenshot.png')
# writer.SetInputConnection(w2if.GetOutputPort())
# writer.Write()
return
backend_to_function = {
# "mayavi": show_mayavi,
"mpl": show_mpl,
"vtk": show_vtk,
}
|
nschloe/quadpy
|
src/quadpy/helpers/plot.py
|
Python
|
mit
| 8,907
|
[
"Mayavi",
"VTK"
] |
3817a6aa42f46108402137960fc86185cf9a4367c3a309a3b9637e3c77ee5735
|
"""
Test rdkit_grid_featurizer module.
"""
import os
import unittest
import numpy as np
np.random.seed(123)
from deepchem.feat import rdkit_grid_featurizer as rgf
def random_string(length, chars=None):
import string
if chars is None:
chars = list(string.ascii_letters + string.ascii_letters + '()[]+-.=#@/\\')
return ''.join(np.random.choice(chars, length))
class TestHelperFunctions(unittest.TestCase):
"""
Test helper functions defined in rdkit_grid_featurizer module.
"""
def setUp(self):
# TODO test more formats for ligand
current_dir = os.path.dirname(os.path.realpath(__file__))
self.protein_file = os.path.join(current_dir,
'3ws9_protein_fixer_rdkit.pdb')
self.ligand_file = os.path.join(current_dir, '3ws9_ligand.sdf')
def test_load_molecule(self):
# adding hydrogens and charges is tested in dc.utils
from rdkit.Chem.AllChem import Mol
for add_hydrogens in (True, False):
for calc_charges in (True, False):
mol_xyz, mol_rdk = rgf.load_molecule(self.ligand_file, add_hydrogens,
calc_charges)
num_atoms = mol_rdk.GetNumAtoms()
self.assertIsInstance(mol_xyz, np.ndarray)
self.assertIsInstance(mol_rdk, Mol)
self.assertEqual(mol_xyz.shape, (num_atoms, 3))
def test_generate_random_unit_vector(self):
for _ in range(100):
u = rgf.generate_random__unit_vector()
# 3D vector with unit length
self.assertEqual(u.shape, (3,))
self.assertAlmostEqual(np.linalg.norm(u), 1.0)
def test_generate_random_rotation_matrix(self):
# very basic test, we check if rotations actually work in test_rotate_molecules
for _ in range(100):
m = rgf.generate_random_rotation_matrix()
self.assertEqual(m.shape, (3, 3))
def test_rotate_molecules(self):
# check if distances do not change
vectors = np.random.rand(4, 2, 3)
norms = np.linalg.norm(vectors[:, 1] - vectors[:, 0], axis=1)
vectors_rot = np.array(rgf.rotate_molecules(vectors))
norms_rot = np.linalg.norm(vectors_rot[:, 1] - vectors_rot[:, 0], axis=1)
self.assertTrue(np.allclose(norms, norms_rot))
# check if it works for molecules with different numbers of atoms
coords = [np.random.rand(n, 3) for n in (10, 20, 40, 100)]
coords_rot = rgf.rotate_molecules(coords)
self.assertEqual(len(coords), len(coords_rot))
def test_compute_pairwise_distances(self):
n1 = 10
n2 = 50
coords1 = np.random.rand(n1, 3)
coords2 = np.random.rand(n2, 3)
distance = rgf.compute_pairwise_distances(coords1, coords2)
self.assertEqual(distance.shape, (n1, n2))
self.assertTrue((distance >= 0).all())
# random coords between 0 and 1, so the max possible distance in sqrt(2)
self.assertTrue((distance <= 2.0**0.5).all())
# check if correct distance metric was used
coords1 = np.array([[0, 0, 0], [1, 0, 0]])
coords2 = np.array([[1, 0, 0], [2, 0, 0], [3, 0, 0]])
distance = rgf.compute_pairwise_distances(coords1, coords2)
self.assertTrue((distance == [[1, 2, 3], [0, 1, 2]]).all())
def test_unit_vector(self):
for _ in range(10):
vector = np.random.rand(3)
norm_vector = rgf.unit_vector(vector)
self.assertAlmostEqual(np.linalg.norm(norm_vector), 1.0)
def test_angle_between(self):
for _ in range(10):
v1 = np.random.rand(3,)
v2 = np.random.rand(3,)
angle = rgf.angle_between(v1, v2)
self.assertLessEqual(angle, np.pi)
self.assertGreaterEqual(angle, 0.0)
self.assertAlmostEqual(rgf.angle_between(v1, v1), 0.0)
self.assertAlmostEqual(rgf.angle_between(v1, -v1), np.pi)
def test_hash_ecfp(self):
from six import integer_types
for power in (2, 16, 64):
for _ in range(10):
string = random_string(10)
string_hash = rgf.hash_ecfp(string, power)
self.assertIsInstance(string_hash, integer_types)
self.assertLess(string_hash, 2**power)
self.assertGreaterEqual(string_hash, 0)
def test_hash_ecfp_pair(self):
from six import integer_types
for power in (2, 16, 64):
for _ in range(10):
string1 = random_string(10)
string2 = random_string(10)
pair_hash = rgf.hash_ecfp_pair((string1, string2), power)
self.assertIsInstance(pair_hash, integer_types)
self.assertLess(pair_hash, 2**power)
self.assertGreaterEqual(pair_hash, 0)
def test_convert_atom_to_voxel(self):
# 20 points with coords between -5 and 5, centered at 0
coords_range = 10
xyz = (np.random.rand(20, 3) - 0.5) * coords_range
for idx in np.random.choice(20, 6):
for box_width in (10, 20, 40):
for voxel_width in (0.5, 1, 2):
voxel = rgf.convert_atom_to_voxel(xyz, idx, box_width, voxel_width)
self.assertIsInstance(voxel, list)
self.assertEqual(len(voxel), 1)
self.assertIsInstance(voxel[0], np.ndarray)
self.assertEqual(voxel[0].shape, (3,))
self.assertIs(voxel[0].dtype, np.dtype('int'))
# indices are positive
self.assertTrue((voxel[0] >= 0).all())
# coordinates were properly translated and scaled
self.assertTrue(
(voxel[0] < (box_width + coords_range) / 2.0 / voxel_width).all())
self.assertTrue(
np.allclose(voxel[0],
np.floor((xyz[idx] + box_width / 2.0) / voxel_width)))
# for coordinates outside of the box function should properly transform them
# to indices and warn the user
for args in ((np.array([[0, 1, 6]]), 0, 10, 1.0), (np.array([[0, 4, -6]]),
0, 10, 1.0)):
# TODO check if function warns. There is assertWarns method in unittest,
# but it is not implemented in 2.7 and buggy in 3.5 (issue 29620)
voxel = rgf.convert_atom_to_voxel(*args)
self.assertTrue(
np.allclose(voxel[0], np.floor((args[0] + args[2] / 2.0) / args[3])))
def test_convert_atom_pair_to_voxel(self):
# 20 points with coords between -5 and 5, centered at 0
coords_range = 10
xyz1 = (np.random.rand(20, 3) - 0.5) * coords_range
xyz2 = (np.random.rand(20, 3) - 0.5) * coords_range
# 3 pairs of indices
for idx1, idx2 in np.random.choice(20, (3, 2)):
for box_width in (10, 20, 40):
for voxel_width in (0.5, 1, 2):
v1 = rgf.convert_atom_to_voxel(xyz1, idx1, box_width, voxel_width)
v2 = rgf.convert_atom_to_voxel(xyz2, idx2, box_width, voxel_width)
v_pair = rgf.convert_atom_pair_to_voxel((xyz1, xyz2), (idx1, idx2),
box_width, voxel_width)
self.assertEqual(len(v_pair), 2)
self.assertTrue((v1 == v_pair[0]).all())
self.assertTrue((v2 == v_pair[1]).all())
def test_compute_charge_dictionary(self):
from rdkit.Chem.AllChem import ComputeGasteigerCharges
for fname in (self.ligand_file, self.protein_file):
_, mol = rgf.load_molecule(fname)
ComputeGasteigerCharges(mol)
charge_dict = rgf.compute_charge_dictionary(mol)
self.assertEqual(len(charge_dict), mol.GetNumAtoms())
for i in range(mol.GetNumAtoms()):
self.assertIn(i, charge_dict)
self.assertIsInstance(charge_dict[i], (float, int))
class TestPiInteractions(unittest.TestCase):
def setUp(self):
current_dir = os.path.dirname(os.path.realpath(__file__))
# simple flat ring
from rdkit.Chem import MolFromSmiles
self.cycle4 = MolFromSmiles('C1CCC1')
self.cycle4.Compute2DCoords()
# load and sanitize two real molecules
_, self.prot = rgf.load_molecule(
os.path.join(current_dir, '3ws9_protein_fixer_rdkit.pdb'),
add_hydrogens=False,
calc_charges=False,
sanitize=True)
_, self.lig = rgf.load_molecule(
os.path.join(current_dir, '3ws9_ligand.sdf'),
add_hydrogens=False,
calc_charges=False,
sanitize=True)
def test_compute_ring_center(self):
# FIXME might break with different version of rdkit
self.assertTrue(
np.allclose(rgf.compute_ring_center(self.cycle4, range(4)), 0))
def test_compute_ring_normal(self):
# FIXME might break with different version of rdkit
normal = rgf.compute_ring_normal(self.cycle4, range(4))
self.assertTrue(
np.allclose(np.abs(normal / np.linalg.norm(normal)), [0, 0, 1]))
def test_is_pi_parallel(self):
ring1_center = np.array([0.0, 0.0, 0.0])
ring2_center_true = np.array([4.0, 0.0, 0.0])
ring2_center_false = np.array([10.0, 0.0, 0.0])
ring1_normal_true = np.array([1.0, 0.0, 0.0])
ring1_normal_false = np.array([0.0, 1.0, 0.0])
for ring2_normal in (np.array([2.0, 0, 0]), np.array([-3.0, 0, 0])):
# parallel normals
self.assertTrue(
rgf.is_pi_parallel(ring1_center, ring1_normal_true, ring2_center_true,
ring2_normal))
# perpendicular normals
self.assertFalse(
rgf.is_pi_parallel(ring1_center, ring1_normal_false,
ring2_center_true, ring2_normal))
# too far away
self.assertFalse(
rgf.is_pi_parallel(ring1_center, ring1_normal_true,
ring2_center_false, ring2_normal))
def test_is_pi_t(self):
ring1_center = np.array([0.0, 0.0, 0.0])
ring2_center_true = np.array([4.0, 0.0, 0.0])
ring2_center_false = np.array([10.0, 0.0, 0.0])
ring1_normal_true = np.array([0.0, 1.0, 0.0])
ring1_normal_false = np.array([1.0, 0.0, 0.0])
for ring2_normal in (np.array([2.0, 0, 0]), np.array([-3.0, 0, 0])):
# perpendicular normals
self.assertTrue(
rgf.is_pi_t(ring1_center, ring1_normal_true, ring2_center_true,
ring2_normal))
# parallel normals
self.assertFalse(
rgf.is_pi_t(ring1_center, ring1_normal_false, ring2_center_true,
ring2_normal))
# too far away
self.assertFalse(
rgf.is_pi_t(ring1_center, ring1_normal_true, ring2_center_false,
ring2_normal))
def test_compute_pi_stack(self):
# order of the molecules shouldn't matter
dicts1 = rgf.compute_pi_stack(self.prot, self.lig)
dicts2 = rgf.compute_pi_stack(self.lig, self.prot)
for i, j in ((0, 2), (1, 3)):
self.assertEqual(dicts1[i], dicts2[j])
self.assertEqual(dicts1[j], dicts2[i])
# with this criteria we should find both types of stacking
for d in rgf.compute_pi_stack(
self.lig, self.prot, dist_cutoff=7, angle_cutoff=40.):
self.assertGreater(len(d), 0)
def test_is_cation_pi(self):
cation_position = np.array([[2.0, 0.0, 0.0]])
ring_center_true = np.array([4.0, 0.0, 0.0])
ring_center_false = np.array([10.0, 0.0, 0.0])
ring_normal_true = np.array([1.0, 0.0, 0.0])
ring_normal_false = np.array([0.0, 1.0, 0.0])
# parallel normals
self.assertTrue(
rgf.is_cation_pi(cation_position, ring_center_true, ring_normal_true))
# perpendicular normals
self.assertFalse(
rgf.is_cation_pi(cation_position, ring_center_true, ring_normal_false))
# too far away
self.assertFalse(
rgf.is_cation_pi(cation_position, ring_center_false, ring_normal_true))
def test_compute_cation_pi(self):
# TODO find better example, currently dicts are empty
dicts1 = rgf.compute_cation_pi(self.prot, self.lig)
dicts2 = rgf.compute_cation_pi(self.lig, self.prot)
def test_compute_binding_pocket_cation_pi(self):
# TODO find better example, currently dicts are empty
prot_dict, lig_dict = rgf.compute_binding_pocket_cation_pi(
self.prot, self.lig)
exp_prot_dict, exp_lig_dict = rgf.compute_cation_pi(self.prot, self.lig)
add_lig, add_prot = rgf.compute_cation_pi(self.lig, self.prot)
for exp_dict, to_add in ((exp_prot_dict, add_prot), (exp_lig_dict,
add_lig)):
for atom_idx, count in to_add.items():
if atom_idx not in exp_dict:
exp_dict[atom_idx] = count
else:
exp_dict[atom_idx] += count
self.assertEqual(prot_dict, exp_prot_dict)
self.assertEqual(lig_dict, exp_lig_dict)
class TestFeaturizationFunctions(unittest.TestCase):
"""
Test functions calculating features defined in rdkit_grid_featurizer module.
"""
def setUp(self):
current_dir = os.path.dirname(os.path.realpath(__file__))
self.protein_file = os.path.join(current_dir,
'3ws9_protein_fixer_rdkit.pdb')
self.ligand_file = os.path.join(current_dir, '3ws9_ligand.sdf')
def test_compute_all_ecfp(self):
_, mol = rgf.load_molecule(self.ligand_file)
num_atoms = mol.GetNumAtoms()
for degree in range(1, 4):
# TODO test if dict contains smiles
ecfp_all = rgf.compute_all_ecfp(mol, degree=degree)
self.assertIsInstance(ecfp_all, dict)
self.assertEqual(len(ecfp_all), num_atoms)
self.assertEqual(list(ecfp_all.keys()), list(range(num_atoms)))
num_ind = np.random.choice(range(1, num_atoms))
indices = list(np.random.choice(num_atoms, num_ind, replace=False))
ecfp_selected = rgf.compute_all_ecfp(mol, indices=indices, degree=degree)
self.assertIsInstance(ecfp_selected, dict)
self.assertEqual(len(ecfp_selected), num_ind)
self.assertEqual(sorted(ecfp_selected.keys()), sorted(indices))
def test_featurize_binding_pocket_ecfp(self):
prot_xyz, prot_rdk = rgf.load_molecule(self.protein_file)
lig_xyz, lig_rdk = rgf.load_molecule(self.ligand_file)
distance = rgf.compute_pairwise_distances(
protein_xyz=prot_xyz, ligand_xyz=lig_xyz)
# check if results are the same if we provide precomputed distances
prot_dict, lig_dict = rgf.featurize_binding_pocket_ecfp(
prot_xyz,
prot_rdk,
lig_xyz,
lig_rdk,
)
prot_dict_dist, lig_dict_dist = rgf.featurize_binding_pocket_ecfp(
prot_xyz, prot_rdk, lig_xyz, lig_rdk, pairwise_distances=distance)
# ...but first check if we actually got two dicts
self.assertIsInstance(prot_dict, dict)
self.assertIsInstance(lig_dict, dict)
self.assertEqual(prot_dict, prot_dict_dist)
self.assertEqual(lig_dict, lig_dict_dist)
# check if we get less features with smaller distance cutoff
prot_dict_d2, lig_dict_d2 = rgf.featurize_binding_pocket_ecfp(
prot_xyz,
prot_rdk,
lig_xyz,
lig_rdk,
cutoff=2.0,
)
prot_dict_d6, lig_dict_d6 = rgf.featurize_binding_pocket_ecfp(
prot_xyz,
prot_rdk,
lig_xyz,
lig_rdk,
cutoff=6.0,
)
self.assertLess(len(prot_dict_d2), len(prot_dict))
# ligands are typically small so all atoms might be present
self.assertLessEqual(len(lig_dict_d2), len(lig_dict))
self.assertGreater(len(prot_dict_d6), len(prot_dict))
self.assertGreaterEqual(len(lig_dict_d6), len(lig_dict))
# check if using different ecfp_degree changes anything
prot_dict_e3, lig_dict_e3 = rgf.featurize_binding_pocket_ecfp(
prot_xyz,
prot_rdk,
lig_xyz,
lig_rdk,
ecfp_degree=3,
)
self.assertNotEqual(prot_dict_e3, prot_dict)
self.assertNotEqual(lig_dict_e3, lig_dict)
def test_compute_splif_features_in_range(self):
prot_xyz, prot_rdk = rgf.load_molecule(self.protein_file)
lig_xyz, lig_rdk = rgf.load_molecule(self.ligand_file)
prot_num_atoms = prot_rdk.GetNumAtoms()
lig_num_atoms = lig_rdk.GetNumAtoms()
distance = rgf.compute_pairwise_distances(
protein_xyz=prot_xyz, ligand_xyz=lig_xyz)
for bins in ((0, 2), (2, 3)):
splif_dict = rgf.compute_splif_features_in_range(
prot_rdk,
lig_rdk,
distance,
bins,
)
self.assertIsInstance(splif_dict, dict)
for (prot_idx, lig_idx), ecfp_pair in splif_dict.items():
for idx in (prot_idx, lig_idx):
self.assertIsInstance(idx, (int, np.int64))
self.assertGreaterEqual(prot_idx, 0)
self.assertLess(prot_idx, prot_num_atoms)
self.assertGreaterEqual(lig_idx, 0)
self.assertLess(lig_idx, lig_num_atoms)
for ecfp in ecfp_pair:
ecfp_idx, ecfp_frag = ecfp.split(',')
ecfp_idx = int(ecfp_idx)
self.assertGreaterEqual(ecfp_idx, 0)
# TODO upperbound?
def test_featurize_splif(self):
prot_xyz, prot_rdk = rgf.load_molecule(self.protein_file)
lig_xyz, lig_rdk = rgf.load_molecule(self.ligand_file)
distance = rgf.compute_pairwise_distances(
protein_xyz=prot_xyz, ligand_xyz=lig_xyz)
bins = [(1, 2), (2, 3)]
dicts = rgf.featurize_splif(
prot_xyz,
prot_rdk,
lig_xyz,
lig_rdk,
contact_bins=bins,
pairwise_distances=distance,
ecfp_degree=2)
expected_dicts = [
rgf.compute_splif_features_in_range(
prot_rdk, lig_rdk, distance, c_bin, ecfp_degree=2) for c_bin in bins
]
self.assertIsInstance(dicts, list)
self.assertEqual(dicts, expected_dicts)
class TestRdkitGridFeaturizer(unittest.TestCase):
"""
Test RdkitGridFeaturizer class defined in rdkit_grid_featurizer module.
"""
def setUp(self):
current_dir = os.path.dirname(os.path.realpath(__file__))
package_dir = os.path.dirname(os.path.dirname(current_dir))
self.protein_file = os.path.join(package_dir, 'dock', 'tests',
'1jld_protein.pdb')
self.ligand_file = os.path.join(package_dir, 'dock', 'tests',
'1jld_ligand.sdf')
def test_default_featurizer(self):
# test if default parameters work
featurizer = rgf.RdkitGridFeaturizer()
self.assertIsInstance(featurizer, rgf.RdkitGridFeaturizer)
feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file],
[self.protein_file])
self.assertIsInstance(feature_tensor, np.ndarray)
def test_example_featurizer(self):
# check if use-case from examples works
featurizer = rgf.RdkitGridFeaturizer(
voxel_width=16.0,
feature_types=['ecfp', 'splif', 'hbond', 'salt_bridge'],
ecfp_power=9,
splif_power=9,
flatten=True)
feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file],
[self.protein_file])
self.assertIsInstance(feature_tensor, np.ndarray)
def test_force_flatten(self):
# test if input is flattened when flat features are used
featurizer = rgf.RdkitGridFeaturizer(
feature_types=['ecfp_hashed'], flatten=False)
featurizer.flatten = True # False should be ignored with ecfp_hashed
feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file],
[self.protein_file])
self.assertIsInstance(feature_tensor, np.ndarray)
self.assertEqual(feature_tensor.shape, (1, 2 * 2**featurizer.ecfp_power))
def test_combined(self):
ecfp_power = 5
splif_power = 5
# test voxel features
featurizer = rgf.RdkitGridFeaturizer(
voxel_width=1.0,
box_width=20.0,
feature_types=['voxel_combined'],
ecfp_power=ecfp_power,
splif_power=splif_power,
flatten=False,
sanitize=True)
feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file],
[self.protein_file])
self.assertIsInstance(feature_tensor, np.ndarray)
voxel_total_len = (
2**ecfp_power +
len(featurizer.cutoffs['splif_contact_bins']) * 2**splif_power + len(
featurizer.cutoffs['hbond_dist_bins']) + 5)
self.assertEqual(feature_tensor.shape, (1, 20, 20, 20, voxel_total_len))
# test flat features
featurizer = rgf.RdkitGridFeaturizer(
voxel_width=1.0,
feature_types=['flat_combined'],
ecfp_power=ecfp_power,
splif_power=splif_power,
sanitize=True)
feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file],
[self.protein_file])
self.assertIsInstance(feature_tensor, np.ndarray)
flat_total_len = (
3 * 2**ecfp_power +
len(featurizer.cutoffs['splif_contact_bins']) * 2**splif_power + len(
featurizer.cutoffs['hbond_dist_bins']))
self.assertEqual(feature_tensor.shape, (1, flat_total_len))
# check if aromatic features are ignores if sanitize=False
featurizer = rgf.RdkitGridFeaturizer(
voxel_width=16.0,
feature_types=['all_combined'],
ecfp_power=ecfp_power,
splif_power=splif_power,
flatten=True,
sanitize=False)
self.assertTrue('pi_stack' not in featurizer.feature_types)
self.assertTrue('cation_pi' not in featurizer.feature_types)
feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file],
[self.protein_file])
self.assertIsInstance(feature_tensor, np.ndarray)
total_len = voxel_total_len + flat_total_len - 3 - 2**ecfp_power
self.assertEqual(feature_tensor.shape, (1, total_len))
def test_custom_cutoffs(self):
custom_cutoffs = {
'hbond_dist_bins': [(2., 3.), (3., 3.5)],
'hbond_angle_cutoffs': [5, 90],
'splif_contact_bins': [(0, 3.5), (3.5, 6.0)],
'ecfp_cutoff': 5.0,
'sybyl_cutoff': 3.0,
'salt_bridges_cutoff': 4.0,
'pi_stack_dist_cutoff': 5.0,
'pi_stack_angle_cutoff': 15.0,
'cation_pi_dist_cutoff': 5.5,
'cation_pi_angle_cutoff': 20.0,
}
rgf_featurizer = rgf.RdkitGridFeaturizer(**custom_cutoffs)
self.assertEqual(rgf_featurizer.cutoffs, custom_cutoffs)
def test_rotations(self):
featurizer = rgf.RdkitGridFeaturizer(
nb_rotations=3,
feature_types=['voxel_combined'],
flatten=False,
sanitize=True)
feature_tensors, _ = featurizer.featurize_complexes([self.ligand_file],
[self.protein_file])
self.assertEqual(feature_tensors.shape, (1, 4, 16, 16, 16, 40))
def test_voxelize(self):
prot_xyz, prot_rdk = rgf.load_molecule(self.protein_file)
lig_xyz, lig_rdk = rgf.load_molecule(self.ligand_file)
centroid = rgf.compute_centroid(lig_xyz)
prot_xyz = rgf.subtract_centroid(prot_xyz, centroid)
lig_xyz = rgf.subtract_centroid(lig_xyz, centroid)
prot_ecfp_dict, lig_ecfp_dict = rgf.featurize_binding_pocket_ecfp(
prot_xyz, prot_rdk, lig_xyz, lig_rdk)
box_w = 20
f_power = 5
rgf_featurizer = rgf.RdkitGridFeaturizer(
box_width=box_w,
ecfp_power=f_power,
feature_types=['all_combined'],
flatten=True,
sanitize=True)
prot_tensor = rgf_featurizer._voxelize(
rgf.convert_atom_to_voxel,
rgf.hash_ecfp,
prot_xyz,
feature_dict=prot_ecfp_dict,
channel_power=f_power)
self.assertEqual(prot_tensor.shape, tuple([box_w] * 3 + [2**f_power]))
all_features = prot_tensor.sum()
# protein is too big for the box, some features should be missing
self.assertGreater(all_features, 0)
self.assertLess(all_features, prot_rdk.GetNumAtoms())
lig_tensor = rgf_featurizer._voxelize(
rgf.convert_atom_to_voxel,
rgf.hash_ecfp,
lig_xyz,
feature_dict=lig_ecfp_dict,
channel_power=f_power)
self.assertEqual(lig_tensor.shape, tuple([box_w] * 3 + [2**f_power]))
all_features = lig_tensor.sum()
# whole ligand should fit in the box
self.assertEqual(all_features, lig_rdk.GetNumAtoms())
|
ktaneishi/deepchem
|
deepchem/feat/tests/test_rdkit_grid_features.py
|
Python
|
mit
| 23,883
|
[
"RDKit"
] |
a5dac4af989526c372177aef735f2d91c5c8d51e4fb5b7fc2bf2df2c29e79265
|
''' Volume processing for next round of refinement.
This pySPIDER batch file (`spi-prepvol`) takes sets of three volumes (full volume and two half volumes)
produced by alignment/refinement and then does the following:
#. Masks the half volumes
#. Calculates the resolution between the half volumes
#. Filters the full volume
#. Masks the full filtered volume
Tips
====
#. The input files should be the full volume, followed by the two half volumes.
#. If the raw volumes follow the default |spi| naming scheme (e.g. raw_vol01.spi raw1_vol01.spi raw2_vol01.spi) then
you may use the following as the input file: raw*_vol01.spi (rather than raw_vol01.spi raw1_vol01.spi raw2_vol01.spi)
Examples
========
.. sourcecode :: sh
# Source AutoPart - FrankLab only
$ source /guam.raid.cluster.software/arachnid/arachnid.rc
# Calculate the resolution of two half volumes and filter the input raw volume to the resolution
$ spi-prepvol raw_vol01.spi raw1_vol01.spi raw2_vol01.spi -p params.spi -o filt_vol_0001.spi
# Do the same as the first example, but apply a Gaussian mask of width 3 pixels to the output volume
$ spi-prepvol raw_vol01.spi raw1_vol01.spi raw2_vol01.spi -p params.spi -o filt_vol_0001.spi --volume-mask G --mask-edge-width 3
# Do the same as the first example, but apply a user-defined mask to the half volumes before resolution calculation
$ spi-prepvol raw_vol01.spi raw1_vol01.spi raw2_vol01.spi -p params.spi -o filt_vol_0001.spi --resolution-mask user_defined_mask.spi
.. todo::
#. Ensure mask files exist
#. Append local root in setup_options to mask files
#. Decimate mask files
Critical Options
================
.. program:: spi-prepvol
.. option:: -i <FILENAME1,FILENAME2>, --input-files <FILENAME1,FILENAME2>, FILENAME1 FILENAME2
List of input filenames containing volumes triples, full_vol, half_vol_1, half_vol_2
If you use the parameters `-i` or `--inputfiles` they must be comma separated
(no spaces). If you do not use a flag, then separate by spaces. For a
very large number of files (>5000) use `-i "filename*"`
.. option:: -o <FILENAME>, --output <FILENAME>
Output filename for the filtered, masked volume as well as base output name for FSC curve (`res_$output`)
.. option:: -p <FILENAME>, --param-file <FILENAME>
Path to SPIDER params file
.. option:: --bin-factor <FLOAT>
Number of times to decimate params file
.. option:: volume-mask (A, C, G or <FILENAME>)
Set the type of mask: C for cosine and G for Gaussian and N for no mask and A for adaptive tight mask or a filepath for external mask
Low-pass Filter Options
=======================
.. option:: --filter-type <INT>
Type of low-pass filter to use with resolution: [1] Fermi(SP, fermi_temp) [2] Butterworth (SP-bp_pass, SP+bp_stop) [3] Gaussian (SP)
.. option:: --fermi-temp <FLOAT>
Fall off for Fermi filter (both high pass and low pass)
.. option:: --bw-pass <FLOAT>
Offset for pass band of the butterworth lowpass filter (sp-bw_pass)
.. option:: --bw-stop <FLOAT>
Offset for stop band of the butterworth lowpass filter (sp+bw_stop)
High-pass Filter Options
========================
.. option:: --hp-radius <FLOAT>
The spatial frequency to high-pass filter (if > 0.5, then assume its resolution and calculate spatial frequency, if 0 the filter is disabled)
.. option:: --hp-type <INT>
Type of high-pass filter to use with resolution: [0] None [1] Fermi(hp_radius, fermi_temp) [2] Butterworth (hp_radius-bp_pass, hp_radius+bp_stop) [3] Gaussian (hp_radius)
.. option:: --hp-bw_pass <FLOAT>
Offset for the pass band of the butterworth highpass filter (hp_radius-bw_pass)
.. option:: --hp-bw_stop <FLOAT>
Offset for the stop band of the butterworth highpass filter (hp_radius+bw_stop)
.. option:: --hp-temp <FLOAT>
Temperature factor for the fermi filter
Spherical Mask Options
=======================
.. option:: --mask-edge-width <INT>
Set edge with of the mask (for Gaussian this is the half-width)
Adaptive Tight Mask Options
===========================
.. option:: --threshold <STR or FLOAT>
Threshold for density or 'A' for auto threshold
.. option:: --ndilate <INT>
Number of times to dilate the mask
.. option:: --gk-size <INT>
Size of the real space Gaussian kernel (must be odd!)
.. option:: --gk-sigma <FLOAT>
Width of the real space Gaussian kernel
Resolution Mask Options
=======================
.. option:: --resolution-mask : str
Set the type of mask: C for cosine and G for Gaussian and N for no mask and A for adaptive tight mask or a filepath for external mask
.. option:: --res-edge-width : int
Set edge with of the mask (for Gaussian this is the half-width)
.. option:: --res-threshold : str
Threshold for density or 'A' for auto threshold
.. option:: --res-ndilate : int
Number of times to dilate the mask
.. option:: --res-gk-size : int
Size of the real space Gaussian kernel (must be odd!)
.. option:: --res-gk-sigma : float
Width of the real space Gaussian kernel
Resolution Options
==================
These options are passed to SPIDER's `rf_3` command, the default values are generally fine for
most experiments.
.. option:: --ring-width <float>
Shell thickness in reciprocal space sampling units (Default: 0.5)
.. option:: --lower-scale <float>
Lower range of scale factors by which the second Fourier must be multiplied for the comparison (Default: 0.2)
.. option:: --upper-scale <float>
Upper range of scale factors by which the second Fourier must be multiplied for the comparison (Default: 2.0)
.. option:: --missing-ang <choice('C' or 'W')>
'C' if you have a missing cone and 'W' if you have a missing wedge (Default: 'C')
.. option:: --max-tilt <float>
Angle of maximum tilt angle in degrees (Default: 90.0)
.. option:: --noise-factor <float>
Factor given here determines the FSCCRIT. Here 3.0 corresponds to the 3 sigma criterion i.e., 3/SQRT(N),
where N is number of voxels for a given shell.
Other Options
=============
This is not a complete list of options available to this script, for additional options see:
#. :ref:`Options shared by all scripts ... <shared-options>`
#. :ref:`Options shared by |spi| scripts... <spider-options>`
#. :ref:`Options shared by MPI-enabled scripts... <mpi-options>`
#. :ref:`Options shared by file processor scripts... <file-proc-options>`
#. :ref:`Options shared by SPIDER params scripts... <param-options>`
.. todo:: filter_volume_highpass what to do about it?
.. Created on Jul 15, 2011
.. codeauthor:: Robert Langlois <rl2528@columbia.edu>
'''
from ..core.app import program
from ..core.metadata import spider_params, spider_utility, format_utility
from ..core.parallel import mpi_utility
from ..core.spider import spider
import filter_volume, resolution, mask_volume, enhance_volume
import logging
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
def process(filename, output, **extra):
''' Create a reference from from a given density map
:Parameters:
filename : str
Input volume file
output : str
Output reference file
extra : dict
Unused key word arguments
:Returns:
filename : str
Filename for correct location
'''
if spider_utility.is_spider_filename(filename):
output = spider_utility.spider_filename(output, filename)
res = post_process(filename, output=output, output_volume=format_utility.add_prefix(output, "vol_"), **extra)
_logger.info("Resolution = %f"%res)
return filename
def post_process(files, spi, output, output_volume="", min_resolution=0.0, add_resolution=0.0, enhance=False, **extra):
''' Postprocess reconstructed volumes for next round of refinement
:Parameters:
filename : str
Filename of the input volume
spi : spider.Session
Current SPIDER session
output : str
Output filename base for postprocessed volume
min_resolution : float
Minimum resolution for filtering the structure
add_resolution : float
Additional amount to add to resolution before filtering the next reference
output_volume : str
Output filename for the reconstructed volume (if empty, `vol_$output` will be used). half volumes will be prefixed with `h1_` and `h2_` and the raw volume, `raw_`
enhance : bool
Output an enhanced density map
extra : dict
Unused keyword arguments
:Returns:
resolution : float
Current resolution for the reconstruction
'''
if output_volume == "": output_volume = format_utility.add_prefix(output, "vol_")
sp, fsc, apix = resolution.estimate_resolution(files[1], files[2], spi, format_utility.add_prefix(output, "dres_"), **extra)
extra['pixel_diameter'] *= extra['apix']/apix
extra['window'] *= int(extra['apix']/apix)
extra['apix']=apix
res = extra['apix']/sp
if add_resolution > 0.0:
sp = extra['apix'] / (add_resolution+res)
if (add_resolution+res) < min_resolution: sp = extra['apix']/min_resolution
if extra['pre_filter'] > 0 and res > extra['pre_filter']: extra['pre_filter'] = res
filename = files[0]
filename = filter_volume.filter_volume_highpass(filename, spi, outputfile=output_volume, **extra)
filename = filter_volume.filter_volume_lowpass(filename, spi, sp, outputfile=output_volume, **extra)
#filename = center_volume(filename, spi, output_volume)
filename = mask_volume.mask_volume(filename, output_volume, spi, **extra)
if enhance:
enhance_volume.enhance_volume(filename, spi, extra['apix'] / res, output, prefix="enh_", **extra)
return res
def center_volume(filename, spi, output):
''' Center the volume in the box
:Parameters:
filename : str
Input volume file
spi : spider.Session
Current SPIDER session
output : str
Output centered volume file
:Returns:
output : str
Output centered volume file
'''
if filename == output: filename = spi.cp(filename)
coords = spi.cg_ph(filename)
return spi.sh_f(filename, coords[3:], outputfile=output)
def initialize(files, param):
# Initialize global parameters for the script
param['spi'] = spider.open_session(files, **param)
spider_params.read(param['spi'].replace_ext(param['param_file']), param)
pfiles = []
for i in xrange(0, len(files), 3):
pfiles.append((files[i], files[i+1], files[i+2]))
if len(pfiles) > 1 and param['worker_count'] > 1:
param['spi'].close()
param['spi'] = None
return pfiles
def init_process(process_number, rank, input_files, **extra):
# Initialize a child process
rank = mpi_utility.get_size(**extra)*rank + process_number
param = {}
param['spi'] = spider.open_session(input_files, rank=rank, **extra)
return param
def finalize(files, **extra):
# Finalize global parameters for the script
_logger.info("Completed")
def setup_options(parser, pgroup=None, main_option=False):
#Setup options for automatic option parsing
from ..core.app.settings import setup_options_from_doc
if main_option:
pgroup.add_option("-i", input_files=[], help="List of input filenames containing volumes triples, full_vol, half_vol_1, half_vol_2", required_file=True, gui=dict(filetype="file-list"))
pgroup.add_option("-o", output="", help="Output filename for the filtered, masked volume as well as base output name for FSC curve (`res_$output`)", gui=dict(filetype="save"), required_file=True)
pgroup.add_option("-e", enhance=False, help="Enhance the output volume")
spider_params.setup_options(parser, pgroup, True)
setup_options_from_doc(parser, filter_volume.filter_volume_highpass, group=pgroup)
if main_option:
parser.change_default(thread_count=4, log_level=3)
def check_options(options, main_option=False):
#Check if the option values are valid
from ..core.app.settings import OptionValueError
if main_option:
spider_params.check_options(options)
if len(options.input_files)%3 != 0:
_logger.debug("Found: %s"%",".join(options.input_files))
raise OptionValueError, "Requires input files in sets of 3, e.g. full_vol,half1_vol,half2_vol - found %d"%len(options.input_files)
if not spider_utility.test_valid_spider_input(options.input_files):
raise OptionValueError, "Multiple input files must have numeric suffix, e.g. vol0001.spi"
def main():
#Main entry point for this script
program.run_hybrid_program(__name__,
description = '''Prepare a volume for refinement
$ %prog raw_vol01.spi raw1_vol01.spi raw2_vol01.spi -p params.spi -o filt_vol_0001.spi
http://guam/vispider/vispider/manual.html#module-vispider.batch.prepare_volume
Uncomment (but leave a space before) the following lines to run current configuration file on
source /guam.raid.cluster.software/arachnid/arachnid.rc
nohup %prog -c $PWD/$0 > `basename $0 cfg`log &
exit 0
''',
supports_MPI=True,
use_version = False,
max_filename_len = 78,
)
def dependents(): return [filter_volume, resolution, mask_volume, enhance_volume]
if __name__ == "__main__": main()
|
ezralanglois/arachnid
|
arachnid/pyspider/prepare_volume.py
|
Python
|
gpl-2.0
| 14,002
|
[
"Gaussian"
] |
7e8a5aba845cc697fdedaddf1140bf20ae09a3916b35d8b32d2b38ea52936ce8
|
import copy
import itertools
import megadb.tree as tree
import megadb.algebra as algebra
from megadb.algebra.parser import print_parse_tree
class BaseOptimizator(object):
def run(self, tree):
raise NotImplementedError()
def tree_traverse(root, type, visit):
children = root.children[:] if isinstance(root, tree.TreeNode) else []
if isinstance(root, type):
visit(root)
for c in children:
tree_traverse(c, type, visit)
def tree_traverse_first(root, type, visit):
children = root.children[:] if isinstance(root, tree.TreeNode) else []
if isinstance(root, type):
result = visit(root)
return result
for c in children:
result = tree_traverse_first(c, type, visit)
if result:
return result
def convert_cascading_selections(root):
def visit_selection(node):
if len(node.conds) == 1:
return
root = node.parent
for c in node.conds:
new_node = algebra.Selection(root, [c])
root = new_node
for c in node.children:
c.parent = root
node.parent = None
tree_traverse(root, algebra.Selection, visit_selection)
def collect_namespaces(node):
if isinstance(node, algebra.Relation):
return set([str(node.name)])
else:
ns = set()
for c in node.children:
ns = ns | collect_namespaces(c)
return ns
def clone_tree(root):
node = copy.deepcopy(root)
return node
def clone_partial_tree(node):
if node.parent is None:
return None
else:
old_children = node.parent.children
new_children = [c for c in old_children if c is not node]
node.parent.children = new_children
new_parent = copy.deepcopy(node.parent)
node.parent.children = old_children
parent_of_new_parent = clone_partial_tree(new_parent)
if parent_of_new_parent is not None:
new_parent.parent = parent_of_new_parent
return new_parent
def find_root(node):
while node.parent:
node = node.parent
return node
def extract_join_order(node):
if not isinstance(node, algebra.NaturalJoin):
return []
lc, rc = node.children
if isinstance(rc, algebra.NaturalJoin):
rc, lc = lc, rc
if isinstance(lc, algebra.NaturalJoin):
participants = [rc]
participants.extend(extract_join_order(lc))
return participants
return [lc, rc]
def enumerate_join_orders(root):
def enumerate_join_order_left_deep(node):
def combine(x, y):
join = algebra.NaturalJoin(None)
new_y = clone_tree(y)
new_y.parent = join
x.parent = join
return join
enums = []
participants = set(extract_join_order(node))
combs = itertools.combinations(participants, 2)
for p1, p2 in combs:
for perm in itertools.permutations(participants - set((p1, p2))):
init_join = algebra.NaturalJoin(None)
new_p1 = clone_tree(p1)
new_p2 = clone_tree(p2)
new_p1.parent = init_join
new_p2.parent = init_join
join_tree = reduce(combine, perm, init_join)
partial_tree = clone_partial_tree(node)
join_tree.parent = partial_tree
enums.append(find_root(partial_tree))
return enums
def enumerate_join_order_bushy(node):
def combine(pair):
if len(pair) == 1:
return pair[0]
else:
join = algebra.NaturalJoin(None)
new_x = clone_tree(pair[0])
new_y = clone_tree(pair[1])
new_x.parent = join
new_y.parent = join
return join
def build_two_pair(parts):
if len(parts) == 0:
return []
elif len(parts) == 1:
return [[(parts.pop(),)]]
results = []
part_sets = set(parts)
for p1, p2 in itertools.combinations(part_sets, 2):
rest = build_two_pair(part_sets - set((p1, p2)))
if rest:
for r in rest:
r.append((p1, p2))
results.append(rest)
else:
results.append([[(p1, p2)]])
return list(itertools.chain.from_iterable(results))
def enumerate_pairs(parts):
results = []
pairs = build_two_pair(parts)
for p in pairs:
folded = map(combine, p)
if len(folded) == 1:
new_tree = clone_partial_tree(node)
folded[0].parent = new_tree
results.append(find_root(new_tree))
else:
results.extend(enumerate_pairs(folded))
return results
participants = set(extract_join_order(node))
# return build_two_pair([1,2])
return enumerate_pairs(participants)
left_deep_trees = tree_traverse_first(root, algebra.NaturalJoin, enumerate_join_order_left_deep) or []
bushy_trees = tree_traverse_first(root, algebra.NaturalJoin, enumerate_join_order_bushy) or []
combined = left_deep_trees + bushy_trees
return combined or [root]
def enumerate_selections(root):
"""
1. traverse to leafnode
2. climb up to find selections
3. if there is consecutive selections: build permutations of selections into new trees
"""
def climb_up(node):
if getattr(node, 'traversed', None):
return []
node.traversed = True
enums = []
while node:
while node and not isinstance(node, algebra.Selection):
prev = node
node = node.parent
if node is None:
return enums
start = prev
selects = []
while isinstance(node, algebra.Selection):
selects.append(node)
node = node.parent
if len(selects) > 1:
select_perms = itertools.permutations(selects)
for perms in select_perms:
new_tree = clone_tree(start)
new_end = new_tree
for _ in selects:
new_end = new_end.parent
for p in perms:
new_selection = algebra.Selection(None, p.conds[:])
new_tree.parent = new_selection
new_tree = new_selection
new_tree.parent = new_end.parent
new_end.parent = None
enums.append(find_root(new_tree))
enums = tree_traverse_first(root, tree.LeafNode, climb_up)
while enums:
next_results = []
for e in enums:
next_enums = tree_traverse_first(e, tree.LeafNode, climb_up)
if next_enums:
next_results.extend(next_enums)
if len(next_results) == 0:
break
enums = next_results
return enums or [root]
class PushSelectionDownOptimizator(BaseOptimizator):
"""
1. find selection
2. find join
3. check namespace set of two children
4. move applicable conditions to corresponding side
-> recursive on children
"""
def run(self, root):
def visit_selection(selection):
def visit_join(join):
child_p, child_q = join.children
# collect namespaces of two children
ns_p = collect_namespaces(child_p)
ns_q = collect_namespaces(child_q)
new_conds = []
for cond in selection.conds:
related_ns = set()
if isinstance(cond.x, algebra.Field) and cond.x.namespace:
related_ns.add(cond.x.namespace)
if isinstance(cond.y, algebra.Field) and cond.y.namespace:
related_ns.add(cond.y.namespace)
if ((isinstance(cond.x, algebra.Field) and cond.x.namespace is None) or
(isinstance(cond.y, algebra.Field) and cond.y.namespace is None)):
related_ns.add(None)
if related_ns <= ns_p or related_ns <= ns_q:
new_selection = algebra.Selection(join, [cond])
if related_ns <= ns_p:
child_p.parent = new_selection
child_p = new_selection
else:
child_q.parent = new_selection
child_q = new_selection
else:
new_conds.append(cond)
assert len(selection.children) == 1
selection.conds = new_conds
tree_traverse(selection, algebra.CartesianProduct, visit_join)
if not selection.conds:
selection.children[0].parent = selection.parent
selection.parent = None
tree_traverse(root, algebra.Selection, visit_selection)
convert_cascading_selections(root)
return root
#########
class CostBasedOptimizator(BaseOptimizator):
def __init__(self, stats):
self.stats = stats
class CartesianProductToThetaJoinOptimizator(CostBasedOptimizator):
"""
Notice: apply this after push selections down optimizator (or conds will be folded in join)
1. find selection
2. check that its child is cross join
3. if yes: merge two node into one thetajoin or natrualjoin
"""
def run(self, root):
def extract_fields(node):
if isinstance(node, algebra.Relation):
fnames = self.stats[str(node.name)][1].keys()
fields = map(lambda x: algebra.Field.from_components(x, str(node.name)), fnames)
return set(fields)
elif isinstance(node, algebra.Selection):
return extract_fields(node.children[0])
elif isinstance(node, algebra.CartesianProduct):
return extract_fields(node.children[0]) | extract_fields(node.children[1])
else:
raise NotImplementedError()
def can_do_natural_join(conds, cross_children):
fs_left, fs_right = map(extract_fields, cross_children)
fns_left = {x.name for x in fs_left}
fns_right = {x.name for x in fs_right}
common_attrs = fns_left & fns_right
join_attrs = set()
for cond in conds:
if not isinstance(cond.x, algebra.Field) or not isinstance(cond.y, algebra.Field):
return False
if cond.x.name != cond.y.name:
return False
join_attrs.add(cond.x.name)
return common_attrs == join_attrs
def visit_selection(selection):
if (selection.children
and isinstance(selection.children[0], algebra.CartesianProduct)):
prev_selection = None
curr_selection = selection
all_conds = []
while isinstance(curr_selection, algebra.Selection):
all_conds.extend(curr_selection.conds)
prev_selection = curr_selection
curr_selection = curr_selection.parent
cross_join = selection.children[0]
if can_do_natural_join(all_conds, cross_join.children):
join = algebra.NaturalJoin(prev_selection.parent)
else:
join = algebra.ThetaJoin(prev_selection.parent, all_conds)
for c in cross_join.children[:]:
c.parent = join
prev_selection.parent = None
tree_traverse(join, algebra.Selection, visit_selection)
tree_traverse(root, algebra.Selection, visit_selection)
return root
class GreedyJoinOrderOptimizator(CostBasedOptimizator):
"""
1. find NaturalJoin
2. extract_join_order on join
3. compute estimation for each participant
4. using T(P) * T(Q) / (max{V(P, a), V(Q, a)}) to fold
"""
def run(self, root):
def estimate_stat(p):
if isinstance(p, algebra.Relation):
return copy.copy(self.stats[str(p.name)])
elif isinstance(p, algebra.Selection):
child_stat = estimate_stat(p.children[0])
cond = p.conds[0]
attr_name = cond.x.name if isinstance(cond.x, algebra.Field) else cond.y.name
var = child_stat[1][attr_name]
child_stat[1][attr_name] = 1
return [float(child_stat[0]) / var, child_stat[1]]
raise NotImplementedError()
def estimate_cost(p, q):
p_stat, q_stat = p[1], q[1]
new_table_size = float(p_stat[0] * q_stat[0])
new_value_set = dict(p_stat[1].items() + q_stat[1].items())
common_attrs = (set(p_stat[1]) & set(q_stat[1]))
# if not common_attrs:
# equal to Cartesian product
# return [p_stat[0] * q_stat[0], new_value_set]
for common_attr in common_attrs:
new_table_size /= max(p_stat[1][common_attr], q_stat[1][common_attr])
new_value_set[common_attr] = min(p_stat[1][common_attr], q_stat[1][common_attr])
return [new_table_size, new_value_set]
def visit_join(join):
participants = extract_join_order(join)
estimations = map(estimate_stat, participants)
participants = zip(participants, estimations)
while len(participants) > 1:
min_pair = None
min_cost = None
for p1, p2 in itertools.combinations(participants, 2):
cost = estimate_cost(p1, p2)
if min_cost is None or min_cost[0] > cost[0]:
min_pair = (p1, p2)
min_cost = cost
for p in min_pair:
participants.remove(p)
new_join = algebra.NaturalJoin(None)
for p, _ in min_pair:
p.parent = new_join
participants.append([new_join, min_cost])
new_join, _ = participants[0]
new_join.parent = join.parent
join.parent = None
tree_traverse_first(root, algebra.NaturalJoin, visit_join)
return root
# contributed by Ray Chien
class EnumerationBasedOptimizator(CostBasedOptimizator):
def run(self, root):
def cost(node, cost_list):
if isinstance(node, algebra.Relation):
return copy.copy(self.stats[str(node.name)])
elif isinstance(node, algebra.Projection):
return cost(node.children[0], cost_list)
elif isinstance(node, algebra.Selection): # T(S) = T(R) / V(R, a)
child = cost(node.children[0], cost_list)
cond = node.conds[0]
attr_name = cond.x.name if isinstance(cond.x, algebra.Field) else cond.y.name
var = child[1][attr_name]
new_t = float(child[0]) / var
cost_list.append(new_t)
return [new_t, child[1]]
else: # T(R) * T(S) / max(V(R, a), V(S, a))
child_r = cost(node.children[0], cost_list)
child_s = cost(node.children[1], cost_list)
new_t = float(child_r[0] * child_s[0])
new_v = dict(child_r[1].items() + child_s[1].items())
common_attrs = set(child_r[1]) & set(child_s[1])
for common_attr in common_attrs:
new_t /= max(child_r[1][common_attr], child_s[1][common_attr])
cost_list.append(new_t)
return [new_t, new_v]
def find_optimized_tree(root, enumerator):
optimized_tree = None
smallest_cost = float('inf')
possible_trees = enumerator(root)
for tree in possible_trees:
cost_list = []
cost(tree, cost_list)
total_cost = sum(cost_list)
if total_cost < smallest_cost:
smallest_cost = total_cost
optimized_tree = tree
return optimized_tree
best_selection_tree = find_optimized_tree(root, enumerate_selections)
best_join_order_tree = find_optimized_tree(best_selection_tree, enumerate_join_orders)
return best_join_order_tree
# contributed by Jianqing Zhang and Jian Yuan
class GreedyOptimizator(CostBasedOptimizator):
def run(self, root):
# order in statforS indicates the selection order
self.statforS = {}
self.statforJ = {}
self.projFields = []
self.relationTobeJoin = []
self.forSelec = []
self.cascadeSele = False
self.subTrees = {}
def Traverse(node):
# if node type is 'Projection', then record the fields to perform Projection
if isinstance(node, algebra.Projection):
self.projFields = node.fields
# record the relations to perform Natural Join
if isinstance(node, algebra.NaturalJoin):
if isinstance(node.children[0], algebra.Relation):
self.relationTobeJoin.append(str(node.children[0].name))
# set up a dictionary to store the statistics used for Natural Join
self.statforJ[str(node.children[0].name)] = self.stats[str(node.children[0].name)]
elif isinstance(node.children[1], algebra.Relation):
self.relationTobeJoin.append(str(node.children[1].name))
# set up a dictionary to store the statistics used for Natural Join
self.statforJ[str(node.children[0].name)] = self.stats[str(node.children[0].name)]
if isinstance(node, algebra.Selection):
rName = node.conds[0].x.namespace
attrName = node.conds[0].x.name
cond = node.conds[0]
# record the condition to perform Selection
self.forSelec.append(cond)
assert(self.stats.has_key(rName))
# set up a new dictionary to store statistics used for Selection only
if not self.statforS.has_key(rName):
self.statforS[rName] = []
else:
self.cascadeSele = True # used to decide whether to determine Selection Order
T = self.stats[rName][0] # number of tuples in Relation "rName"
newT = T / self.stats[rName][1][attrName]
assert(newT >= 1)
# temperary dictionary to store post selection statistics
tmp = {}
for eachAttr in self.stats[rName][1].keys():
if eachAttr == attrName:
newVar = 1
tmp[eachAttr] = newVar
else:
# asumption that after selection, variance of other attributes won't change unless it's greater than the new T
newVar = min(self.stats[rName][1][eachAttr], newT)
tmp[eachAttr] = newVar
data = []
data.append(attrName) # different from original statistics, used to tell the selection is performed on which attribute
data.append(newT)
data.append(tmp)
self.statforS[rName].append(data)
# also should update the dictionary of statistics for Natural Join
if not self.statforJ.has_key(rName):
self.statforJ[rName] = []
self.statforJ[rName].append(newT)
self.statforJ[rName].append(tmp)
else:
newStat = []
newT = self.statforJ[rName][0] / self.statforJ[rName][1][attrName] # cascade Selection
assert(newT >= 1)
tmp = {}
for eachAttr in self.statforJ[rName][1].keys():
if eachAttr == attrName:
newVar = 1
tmp[eachAttr] = newVar
else:
# asumption that after selection, variance of other attributes won't change unless it's greater than the new T
newVar = min(self.statforJ[rName][1][eachAttr], newT)
tmp[eachAttr] = newVar
newStat.append(newT)
newStat.append(tmp)
self.statforJ[rName] = newStat
if isinstance(node, tree.LeafNode):
if isinstance(node.parent, algebra.Selection):
self.relationTobeJoin.append(str(node.name))
if isinstance(node, tree.TreeNode):
for child in node.children:
Traverse(child)
Traverse(root)
def SelectOrder():
if self.cascadeSele == False:
return
for each in self.statforS.keys():
# sort the statistic based on # of tuples to implement the select order
self.statforS[each] = sorted(self.statforS[each], key=lambda item: item[1])
SelectOrder()
def JoinOrder():
newNode = None
# if only two relations to be join, then no need to decide the join order
if len(self.relationTobeJoin) == 2:
# add the last relation into the joinOrder list
newNode = algebra.NaturalJoin(newNode)
newRName = self.relationTobeJoin[0] + ' ' + self.relationTobeJoin[1]
if self.relationTobeJoin[0].find(' ') == -1:
#algebra.Relation(newNode, self.relationTobeJoin[0])
thisNode = None
thisNode = algebra.Relation(newNode, self.relationTobeJoin[0])
########################################################### Add selection node
if self.relationTobeJoin[0] in self.statforS:
for e in self.statforS[self.relationTobeJoin[0]]:
seleAttr = e[0]
for eCond in self.forSelec:
if eCond.x.namespace == self.relationTobeJoin[0] and eCond.x.name == seleAttr:
tNode = algebra.Selection(newNode, [eCond])
thisNode.parent = tNode
thisNode = tNode
###########################################################
if not len(self.subTrees) == 0:
self.subTrees[self.relationTobeJoin[1]].parent = newNode
del self.subTrees[self.relationTobeJoin[1]]
if self.relationTobeJoin[1].find(' ') == -1:
#algebra.Relation(newNode, self.relationTobeJoin[1])
thisNode = None
thisNode = algebra.Relation(newNode, self.relationTobeJoin[1])
########################################################### Add selection node
if self.relationTobeJoin[1] in self.statforS:
for e in self.statforS[self.relationTobeJoin[1]]:
seleAttr = e[0]
for eCond in self.forSelec:
if eCond.x.namespace == self.relationTobeJoin[1] and eCond.x.name == seleAttr:
tNode = algebra.Selection(newNode, [eCond])
thisNode.parent = tNode
thisNode = tNode
###########################################################
if not len(self.subTrees) == 0:
self.subTrees[self.relationTobeJoin[0]].parent = newNode
del self.subTrees[self.relationTobeJoin[0]]
if not self.relationTobeJoin[0].find(' ') == -1 and not self.relationTobeJoin[1].find(' ') == -1:
self.subTrees[self.relationTobeJoin[0]].parent = newNode
self.subTrees[self.relationTobeJoin[1]].parent = newNode
del self.subTrees[self.relationTobeJoin[0]]
del self.subTrees[self.relationTobeJoin[1]]
self.subTrees[newRName] = newNode
# Now subTrees dict should only have one entry
assert(len(self.subTrees) == 1)
return newNode
tmp1 = [] # tmp1 temperarily store the cost of all possible join pairs
# for each possible relation join pairs
for each in self.relationTobeJoin:
for another in self.relationTobeJoin:
if self.relationTobeJoin.index(each) >= self.relationTobeJoin.index(another):
continue
else:
# try to find if they have common attributes
attr = None
attrEach = set(self.statforJ[each][1].keys())
attrAnot = set(self.statforJ[another][1].keys())
attr = list(attrEach.intersection(attrAnot))
numofAttr = len(attr)
if numofAttr > 0:
# calculate the cost of the join of these two relations
T1 = self.statforJ[each][0]
T2 = self.statforJ[another][0]
cost = T1 * T2
# if have multiple attributes in common
for i in range(0, numofAttr): # remember 'i' won't reach numofAttr
cost = cost / max(self.statforJ[each][1][attr[i]], self.statforJ[another][1][attr[i]])
if cost == 0:
cost = 1
tmp2 = [] # tmp2 temperarily store the cost information of one possible join pair
tmp2.append(each)
tmp2.append(another)
tmp2.append(cost)
tmp1.append(tmp2)
# sort the list tmp1 based on the cost
# the first pair has the least cost
tmp1 = sorted(tmp1, key=lambda item: item[2])
# Construct the sub-tree using this least cost pair
newNode = algebra.NaturalJoin(newNode)
newRName = tmp1[0][0] + ' ' + tmp1[0][1] # name of the post-join relation is indicated by the space in the name
if tmp1[0][0].find(' ') == -1:
thisNode = None
thisNode = algebra.Relation(newNode, tmp1[0][0])
############################################# # add selection node
if tmp1[0][0] in self.statforS:
for e in self.statforS[tmp1[0][0]]:
seleAttr = e[0]
for eCond in self.forSelec:
if eCond.x.namespace == tmp1[0][0] and eCond.x.name == seleAttr:
tNode = None
tNode = algebra.Selection(newNode, [eCond])
thisNode.parent = tNode
thisNode = tNode
#############################################
else:
self.subTrees[tmp1[0][0]].parent = newNode
del self.subTrees[tmp1[0][0]]
if tmp1[0][1].find(' ') == -1:
thisNode = None
thisNode = algebra.Relation(newNode, tmp1[0][1])
############################################# # add selection node
if tmp1[0][1] in self.statforS:
for e in self.statforS[tmp1[0][1]]:
seleAttr = e[0]
for eCond in self.forSelec:
if eCond.x.namespace == tmp1[0][1] and eCond.x.name == seleAttr:
tNode = None
tNode = algebra.Selection(newNode, [eCond])
thisNode.parent = tNode
thisNode = tNode
#############################################
else:
self.subTrees[tmp1[0][1]].parent = newNode
del self.subTrees[tmp1[0][1]]
# update the sub-tree dictionary
self.subTrees[newRName] = newNode
# update the statistic for join
newRSize = tmp1[0][2]
# add a new entry in the statforJ dictionary
self.statforJ[newRName] = []
self.statforJ[newRName].append(newRSize)
data = {} # data dictionary to temperarily store the variance information of the new post-join relation
# determine the value of variance for each attribute of the new post-join relation
for attr in self.statforJ[tmp1[0][0]][1].keys():
if data.has_key(attr):
if self.statforJ[tmp1[0][0]][1][attr] < data[attr]:
data[attr] = self.statforJ[tmp1[0][0]][1][attr]
else:
data[attr] = min(self.statforJ[tmp1[0][0]][1][attr], newRSize)
for attr in self.statforJ[tmp1[0][1]][1].keys():
if data.has_key(attr):
if self.statforJ[tmp1[0][1]][1][attr] < data[attr]:
data[attr] = self.statforJ[tmp1[0][1]][1][attr]
else:
data[attr] = min(self.statforJ[tmp1[0][1]][1][attr], newRSize)
self.statforJ[newRName].append(data)
# delete the two entries for the relations been joined
del self.statforJ[tmp1[0][0]]
del self.statforJ[tmp1[0][1]]
# update relation to be join
self.relationTobeJoin.append(newRName)
self.relationTobeJoin.remove(tmp1[0][0])
self.relationTobeJoin.remove(tmp1[0][1])
# for the new relationTobeJoin and new statistic for join, continue to do JoinOrder()
newNode = JoinOrder()
return newNode
def treeBuild():
newTree = None
if len(self.relationTobeJoin) == 1: # no relation to perform join
assert(len(self.statforS) <= 1)
rName = self.statforS.keys()[0]
newTree = algebra.Relation(newTree, rName)
for e in self.statforS[rName]:
seleAttr = e[0]
for eCond in self.forSelec:
if eCond.x.namespace == rName and eCond.x.name == seleAttr:
tNode = None
tNode = algebra.Selection(tNode, [eCond])
newTree.parent = tNode
newTree = tNode
return newTree
else:
newTree = JoinOrder()
return newTree
newTree = treeBuild()
def AddProject(newTree):
newRoot = algebra.Projection(None, self.projFields)
newTree.parent = newRoot
return newRoot
newTree = AddProject(newTree)
return newTree
|
itswindtw/pyMega
|
megadb/optimization/optimizator.py
|
Python
|
mit
| 31,843
|
[
"VisIt"
] |
8d7cf604d6e99c73e1c70be8aeeef703dcdcb315ce9e75309347c9e5e00270bd
|
#
# Copyright (c) 2016 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, print_function
import os
from commoncode.testcase import FileBasedTesting
from scancode import api
class TestAPI(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def test_get_package_infos_can_pickle(self):
test_file = self.get_test_loc('api/package/package.json')
package = api.get_package_infos(test_file)
import pickle
import cPickle
try:
_pickled = pickle.dumps(package, pickle.HIGHEST_PROTOCOL)
_cpickled = cPickle.dumps(package, pickle.HIGHEST_PROTOCOL)
self.fail('pickle.HIGHEST_PROTOCOL used to fail to pickle this data')
except:
_pickled = pickle.dumps(package)
_cpickled = cPickle.dumps(package)
def test_get_file_infos_has_no_nulls(self):
# note the test file is EMPTY on purpose to generate all False is_* flags
test_dir = self.get_test_loc('api/info')
info = api.get_file_infos(test_dir, as_list=False)
is_key_values = [v for k, v in info.items() if k.startswith('is_')]
assert all(v is not None for v in is_key_values)
|
yasharmaster/scancode-toolkit
|
tests/scancode/test_api.py
|
Python
|
apache-2.0
| 2,534
|
[
"VisIt"
] |
ca20e486c32430ae298936779aa769954a458be6082808fb4468a013aadf28cc
|
#!/usr/bin/env python2
"""Script to retrieve and back up calendars and contacts from Google."""
import datetime
import httplib2
import os
import sys
from apiclient import discovery
from gdata import gauth
from gdata.contacts import client as contacts_client
from oauth2client import client as oauth_client
from oauth2client import file as oauth_file
import gflags
import calendar_pb2
import contacts_pb2
import flags # pylint: disable=unused-import
import utils
FLAGS = gflags.FLAGS
class GdataBackup(object):
"""Encapsulating class for script."""
def __init__(self):
self._flow = None
self._oauth_token = None
self._storage = oauth_file.Storage(
os.path.join(
utils.resolve_path(FLAGS.config_dir),
"credentials.dat"
)
)
self._credentials = self._storage.get()
@property
def flow(self):
"""Get the OAuth flow object."""
if self._flow is None:
self._flow = oauth_client.flow_from_clientsecrets(
os.path.join(
utils.resolve_path(FLAGS.config_dir),
"client_secrets.json"
),
scope=[
"https://www.google.com/m8/feeds",
"https://www.googleapis.com/auth/calendar"
],
redirect_uri="urn:ietf:wg:oauth:2.0:oob"
)
self._flow.user_agent = self.__class__.__name__
return self._flow
@property
def credentials(self):
"""Get the OAuth credentials object used for the calendar client."""
if self._credentials is None or self._credentials.invalid == True:
auth_uri = self.flow.step1_get_authorize_url()
utils.output((
"Please visit {0} in your browser to authorise the app. Once "
"authorised, return here and enter the provided auth code.\n\n"
).format(auth_uri), 0)
auth_code = raw_input("Enter the auth code: ")
self._credentials = self.flow.step2_exchange(auth_code)
self._storage.locked_put(self._credentials)
return self._credentials
@property
def oauth_token(self):
"""Get the OAuth token object used for the contacts client."""
if self._oauth_token is None:
self._oauth_token = gauth.OAuth2Token(
self.flow.client_id,
self.flow.client_secret,
self.flow.scope,
self.flow.user_agent,
self.flow.auth_uri,
self.flow.token_uri,
self.credentials.access_token,
self.credentials.refresh_token,
self.flow.revoke_uri
)
return self._oauth_token
def contacts(self):
"""Download and save all contacts."""
client = contacts_client.ContactsClient(
auth_token=self.oauth_token,
source="GData-Backup"
)
feed = client.GetContacts()
utils.output("Starting contacts backup.\n", 1)
contact_list_pb = contacts_pb2.ContactList()
count = 0
while feed is not None:
for contact in feed.entry:
utils.contact_to_protobuf(contact, contact_list_pb)
count += 1
utils.output("\rBacked up {0} contacts".format(count), 2)
next_uri = feed.GetNextLink()
if next_uri:
feed = client.GetContacts(uri=next_uri.href)
else:
feed = None
# Add newline after the counter stops counting
utils.output("\n", 2)
contact_filename = os.path.join(
utils.resolve_path(FLAGS.storage_dir),
"contacts-{0}.binproto".format(
datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
)
)
with open(contact_filename, "w+") as contact_file:
contact_file.write(contact_list_pb.SerializeToString())
utils.output("Finished contacts backup.\n", 1)
def calendar(self):
"""Download and save all calendars and events."""
service = discovery.build(
"calendar",
"v3",
http=self.credentials.authorize(httplib2.Http())
)
utils.output("Starting calendar backup.\n", 1)
calendar_list = calendar_pb2.CalendarList()
page_token = None
while True:
calendars = service.calendarList().list(
pageToken=page_token
).execute()
for calendar in calendars["items"]:
if calendar["accessRole"] == "owner":
utils.output(
"Backing up {0}\n".format(calendar["summary"]),
2
)
calendar_pb = calendar_list.calendar.add()
calendar_pb.summary = calendar["summary"]
calendar_pb.timezone = calendar["timeZone"]
try:
calendar_pb.description = calendar["description"]
except KeyError:
pass
try:
calendar_pb.location = calendar["location"]
except KeyError:
pass
count = 0
event_page_token = None
first_request = True
while True:
events = service.events().list(
calendarId=calendar["id"],
pageToken=event_page_token,
singleEvents=True
).execute()
if first_request:
first_request = False
try:
for reminder in events["default_reminders"]:
calendar_pb.default_reminder.add(
method=reminder["method"],
minutes=reminder["minutes"]
)
except KeyError:
pass
for event in events["items"]:
utils.event_to_protobuf(event, calendar_pb)
count += 1
utils.output(
"\rBacked up {0} events".format(count),
2
)
event_page_token = events.get("nextPageToken")
if not event_page_token:
break
# Add newline after the counter stops counting
utils.output("\n", 2)
page_token = calendars.get("nextPageToken")
if not page_token:
break
calendar_filename = os.path.join(
utils.resolve_path(FLAGS.storage_dir),
"calendar-{0}.binproto".format(
datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
)
)
with open(calendar_filename, "w+") as calendar_file:
calendar_file.write(calendar_list.SerializeToString())
utils.output("Finished calendar backup.\n", 1)
def main(argv):
"""Load the flag values, and run the backup routines."""
try:
argv = FLAGS(argv)
except gflags.FlagsError as exc:
sys.stderr.write(
"{0}\nUsage: {1} ARGS\n{2}".format(exc, sys.argv[0], FLAGS)
)
sys.exit(1)
backup = GdataBackup()
if FLAGS.calendar:
backup.calendar()
if FLAGS.contacts:
backup.contacts()
if __name__ == "__main__":
main(sys.argv)
|
toastwaffle/GData-Backup
|
backup.py
|
Python
|
mit
| 7,896
|
[
"VisIt"
] |
5664a374fa8487afa348a210af57bd525b06a4d01f475f211b6312706fccc946
|
# -*- coding: utf-8 -*-
#
"""Testing script for wlsqm, doubles as a usage example.
-JJ 2016-11-10
"""
from __future__ import division, print_function, absolute_import
import time
import numpy as np
import sympy as sy
import scipy.spatial # cKDTree
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d.axes3d as p3
try:
import wlsqm
except ImportError:
import sys
sys.exit("WLSQM not found; is it installed?")
import sudoku_lhs
# from various scripts, e.g. miniprojects/misc/tworods/main2.py
def axis_marginize(ax, epsx, epsy):
a = ax.axis()
w = a[1] - a[0]
h = a[3] - a[2]
ax.axis( [ a[0] - w*epsx, a[1] + w*epsx, a[2] - h*epsy, a[3] + h*epsy] )
# from find_neighbors2.py
class SimpleTimer:
def __init__(self, label="", n=None):
self.label = label
self.n = n # number of repetitions done inside the "with..." section (for averaging in timing info)
def __enter__(self):
self.t0 = time.time()
return self
def __exit__(self, errtype, errvalue, traceback):
dt = time.time() - self.t0
identifier = ("%s" % self.label) if len(self.label) else "time taken: "
avg = (", avg. %gs per run" % (dt/self.n)) if self.n is not None else ""
print( "%s%gs%s" % (identifier, dt, avg) )
# many simultaneous local models, 2D
#
def testmany2d():
#########################
# config
#########################
ntasks = 8 # OpenMP parallelization
axislims = [0., 1., 0., 1.] # [xmin, xmax, ymin, ymax], for plotting
nvis = 201 # number of visualization points per axis
expr = sy.sympify("sin(pi*x) * cos(pi*y)")
points_per_axis = 100 # for point cloud generation
r = 5e-2 # neighborhood radius
max_nk = 100 # maximum number of neighbor points to accept into each neighborhood (affects memory allocation)
knowns = 1 # function value is known
fit_order = 4
weighting_method = wlsqm.WEIGHT_CENTER
max_iter = 10 # for iterative fitting method
reps = 20 # for demonstration of solving multiple times using the same geometry
#########################
# the test itself
#########################
print()
print( "=" * 79 )
print( "many neighborhoods, 2D case" )
print( "=" * 79 )
print()
# create a stratified point cloud
print( "generating sudoku sample" )
with SimpleTimer(label=(" done in ")) as s:
S,m = sudoku_lhs.sample(2, points_per_axis, 1)
bins_per_axis = m*points_per_axis
S = S / float(bins_per_axis - 1) # scale the sample from [0, bins_per_axis-1]**2 to [0, 1]**2
npoints = len(S)
print( " %d points" % (npoints) )
# index the point cloud for fast neighbor searching
print( "indexing sample" )
with SimpleTimer(label=(" done in ")) as s:
tree = scipy.spatial.cKDTree( data=S )
# If this was an IBVP, we would here get the previous state of the unknown field.
#
# In this example, we just sample our function f().
#
lambdify_numpy_2d = lambda expr: sy.lambdify(("x","y"), expr, modules="numpy") # SymPy expr --> lambda(x,y)
f = lambdify_numpy_2d(expr)
dfdx = lambdify_numpy_2d(sy.diff(expr, "x"))
dfdy = lambdify_numpy_2d(sy.diff(expr, "y"))
print( "evaluating example function" )
with SimpleTimer(label=(" done in ")) as s:
no = wlsqm.number_of_dofs( dimension=2, order=fit_order )
fi = np.empty( (npoints,no), dtype=np.float64 )
fi[:,0] = f( S[:,0], S[:,1] ) # fi[i,0] contains the function value at point S[i,:]
# find the neighborhoods
print( "generating neighborhoods for each point" )
with SimpleTimer(label=(" done in ")) as s:
hoods = np.zeros( (npoints,max_nk), dtype=np.int32 ) # neighbor point indices (pointing to rows in S[])
nk = np.empty( (npoints,), dtype=np.int32 ) # number of neighbors, i.e. nk[i] is the number of actually used columns in hoods[i,:]
for i in range(npoints):
I = tree.query_ball_point( S[i], r ) # indices of neighbors of S[i] at distance <= r (but also including S[i] itself!)
I = [ idx for idx in I if idx != i ] # exclude S[i] itself
if len(I) > max_nk:
I = I[:max_nk]
I = np.array( I, dtype=np.int32 )
nk[i] = len(I)
hoods[i,:nk[i]] = I
# DEBUG
print( "number of neighbors min = %g, avg = %g, max = %g" % ( np.min(nk), np.mean(nk), np.max(nk) ) )
print( "neighbor lists for each problem instance:" )
print( hoods )
print( "number of neighbors for each problem instance:" )
print( nk )
# perform the fitting
print( "fitting %d local surrogate models of order %d, driver mode (fit each model once)" % (npoints, fit_order) )
fit_order_array = fit_order * np.ones( (npoints,), dtype=np.int32 )
knowns_array = knowns * np.ones( (npoints,), dtype=np.int64 )
wm_array = weighting_method * np.ones( (npoints,), dtype=np.int32 )
with SimpleTimer(label=(" done in ")) as s:
# max_iterations_taken = wlsqm.fit_2D_many( xk=S[hoods], fk=fi[hoods,0], nk=nk,
# xi=S, fi=fi,
# sens=None, do_sens=False,
# order=fit_order_array, knowns=knowns_array, weighting_method=wm_array,
# debug=False )
max_iterations_taken = wlsqm.fit_2D_many_parallel( xk=S[hoods], fk=fi[hoods,0], nk=nk,
xi=S, fi=fi,
sens=None, do_sens=False,
order=fit_order_array, knowns=knowns_array, weighting_method=wm_array,
ntasks=ntasks, debug=False )
# max_iterations_taken = wlsqm.fit_2D_iterative_many( xk=S[hoods], fk=fi[hoods,0], nk=nk,
# xi=S, fi=fi,
# sens=None, do_sens=False,
# order=fit_order_array, knowns=knowns_array, weighting_method=wm_array,
# max_iter=max_iter, debug=False )
# max_iterations_taken = wlsqm.fit_2D_iterative_many_parallel( xk=S[hoods], fk=fi[hoods,0], nk=nk,
# xi=S, fi=fi,
# sens=None, do_sens=False,
# order=fit_order_array, knowns=knowns_array, weighting_method=wm_array,
# max_iter=max_iter, ntasks=ntasks, debug=False )
# Expert mode: allows solving multiple times (with new fk data) in the same geometry, performing the prepare step only once.
#
# This is especially good for a large number of repetitions with ALGO_BASIC, where a large majority of the computational cost comes from the prepare step.
#
# The total advantage is slightly smaller for a small number of repetitions with ALGO_ITERATIVE,
# since the iterative mode already uses this strategy internally (also when invoked in driver mode).
#
print( "fitting %d local surrogate models of order %d, expert mode" % (npoints, fit_order) )
print( " init" )
with SimpleTimer(label=(" done in ")) as s:
solver = wlsqm.ExpertSolver( dimension=2, nk=nk, order=fit_order_array, knowns=knowns_array, weighting_method=wm_array, algorithm=wlsqm.ALGO_BASIC, do_sens=False, max_iter=max_iter, ntasks=ntasks, debug=False )
print( " prepare" )
with SimpleTimer(label=(" done in ")) as s:
solver.prepare( xi=S, xk=S[hoods] )
print( " fit (each model %d times)" % (reps) )
with SimpleTimer(label=(" %d reps done in " % reps), n=reps) as s:
for k in range(reps):
solver.solve( fk=fi[hoods,0], fi=fi, sens=None )
# DEBUG
print( "max corrective iterations taken: %d" % (max_iterations_taken) )
# see that we got the derivatives at each point
if fit_order > 0: # no derivatives if piecewise constant fit
print( dfdx( S[:,0], S[:,1] ) - fi[:,1] )
print( dfdy( S[:,0], S[:,1] ) - fi[:,2] )
#########################
# plotting
#########################
xx = np.linspace(axislims[0], axislims[1], nvis)
yy = np.linspace(axislims[2], axislims[3], nvis)
X,Y = np.meshgrid(xx, yy)
W = f(X,Y)
shp = np.shape(X)
Xlin = np.reshape(X, -1)
Ylin = np.reshape(Y, -1)
x = np.empty( (len(Xlin), 2), dtype=np.float64 )
x[:,0] = Xlin
x[:,1] = Ylin
print( "preparing to interpolate global model" )
with SimpleTimer(label=(" done in ")) as s:
solver.prep_interpolate()
print( "interpolating global model to %d points" % (len(Xlin)) )
with SimpleTimer(label=(" done in ")) as s:
W2,dummy = solver.interpolate( x, mode='continuous', r=r ) # slow, continuous
# W2,dummy = solver.interpolate( x, mode='nearest' ) # fast, surprisingly accurate if a reasonable number of points (and continuous-looking although technically has jumps over Voronoi cell boundaries)
W2 = np.reshape( W2, shp )
# make 3d plot of the function
#
# see http://matplotlib.sourceforge.net/examples/mplot3d/lines3d_demo.html
fig = plt.figure(3, figsize=(12,12))
plt.clf()
# Axes3D has a tendency to underestimate how much space it needs; it draws its labels
# outside the window area in certain orientations.
#
# This causes the labels to be clipped, which looks bad. We prevent this by creating the axes
# in a slightly smaller rect (leaving a margin). This way the labels will show - outside the Axes3D,
# but still inside the figure window.
#
# The final touch is to set the window background to a matching white, so that the
# background of the figure appears uniform.
#
fig.patch.set_color( (1,1,1) )
fig.patch.set_alpha( 1.0 )
x0y0wh = [ 0.02, 0.02, 0.96, 0.96 ] # left, bottom, width, height (here as fraction of subplot area)
# # compute the corresponding figure coordinates for the 2x1 subplot layout
# x0y0wh[0] = 0.5 + 0.5*x0y0wh[0] # left
# x0y0wh[2] = 0.5*x0y0wh[2] # width
ax = p3.Axes3D(fig, rect=x0y0wh)
stride = max(1, (nvis-1)//10) # pick a good-looking stride (for lines; we actually have more vertices, making a smoother-looking curve between the lines)
# use linewidth=0 to remove the wireframe if desired.
# surf = ax.plot_surface(X,Y,W, rstride=stride, cstride=stride, cmap=matplotlib.cm.Blues_r, clim=[fmin,fmax], linewidth=0.25, alpha=0.5)
ax.plot_wireframe(X,Y,W, rstride=stride, cstride=stride, color='k', linewidth=0.5, linestyle='solid')
# plt.colorbar(surf, shrink=0.5, aspect=5)
# plt.colorbar(surf, shrink=0.96)
# sampled points
if points_per_axis < 50:
ax.plot( S[:,0], S[:,1], f( S[:,0], S[:,1] ), linestyle='none', marker='o', markeredgecolor='r', markerfacecolor='none' ) # exact
# surrogate model (global, patched)
ax.plot_wireframe(X,Y,W2, rstride=stride, cstride=stride, color='r', linewidth=0.5, linestyle='solid')
# ax.view_init(20, -48)
# ax.view_init(18, -46)
# ax.view_init(18, -128)
ax.view_init(34, 140)
ax.axis('tight')
ax.set_zlim(-1.01, 1.01)
plt.xlabel('$x$')
plt.ylabel('$y$')
ax.set_title('f(x,y)')
print( " uninit" )
with SimpleTimer(label=(" done in ")) as s:
del solver
# one local model, 3D
#
def test3d():
#########################
# config
#########################
axislims = [0., 1., 0., 1.] # [xmin, xmax, ymin, ymax], for plotting
nvis = 101 # number of visualization points per axis
# Let's manufacture a solution (for which we know the derivatives analytically):
#
expr = sy.sympify("sin(pi*x) * cos(pi*y) * exp(z)")
# expr = sy.sympify("exp(x)*exp(y)*exp(z)")
# expr = sy.sympify("1*x + 2*y + 3*z")
# expr = sy.sympify("0 + 1*x + 2*y + 3*z + 4*x**2 + 5*x*y + 6*y**2 + 7*y*z + 8*z**2 + 9*x*z + 10*x**3 + 11*x**2*y + 12*x*y**2 + 13*y**3 + 14*y**2*z + 15*y*z**2 + 16*z**3 + 17*z**2*x + 18*z*x**2 + 19*x*y*z")
noise_eps = 0#1e-3 # introduce this much Gaussian noise into each sampled function value (use 0. to turn off)
xi = np.array( (0.45, 0.25, 0.35) ) # point (x,y,z) where we wish to find the derivatives
# xi = np.array( (0., 0., 0.) ) # point (x,y,z) where we wish to find the derivatives
# Degree of the surrogate polynomial; a full polynomial of this order will be used.
#
# In the fit, when compared to the original function (if any is available),
# usually the highest order will be nonsense, and the lower orders will be pretty accurate.
#
# (I.e. the unfittable part seems to favor the highest order; which OTOH has the highest spatial frequency. Maybe there's something here?)
#
fit_order = 4 # 0 (constant), 1 (linear), 2 (quadratic), 3 (cubic) or 4 (quartic)
# weighting_method = wlsqm.WEIGHT_UNIFORM # best overall fit for function values
weighting_method = wlsqm.WEIGHT_CENTER # emphasize center to improve derivatives at the point xi
max_iter = 100 # maximum number of refinement iterations for iterative fitting
do_sens = False # do sensitivity analysis of solution? ( d( fi[j] ) / d( fk[k] ) )
debug = False#True # print row scaling and condition number information? (if True, then do_sens must be False; the combination with both True is not supported)
# Bitmask of what we know at point xi. In this example, just set the bits;
# the data (from expr) will be automatically inserted into fi[].
#
# See the constants b3_* in wlsqm.fitter.defs.
#
knowns = 1
# How many neighbor points to generate (to simulate the meshless 'grid').
#
# At least n_unknowns points are needed to make the model fitting work at all
# (but then the fit will be nonsensical, since it is possible to make the polynomial
# pass through exactly those points).
#
# n_unknows + 1 is the first value that makes the fitting overdetermined,
# i.e. where the least-squares procedure starts providing any advantage.
#
# Here "unknown" means any element of fi[] not tagged as known in the "knowns" bitmask.
#
nk = 200 # used if grid_type == 'random'
r = 1e-1 # neighborhood radius
# grid_type = 'random'
grid_type = 'stencil'
# grid_type = 'sudoku'
#########################
# the test itself
#########################
print()
print( "=" * 79 )
print( "3D case" )
print( "=" * 79 )
print()
print( "expr: %s, xi = %s" % (expr, xi) )
labels = ["F",
"DX", "DY", "DZ",
"DX2", "DXDY", "DY2", "DYDZ", "DZ2", "DXDZ",
"DX3", "DX2DY", "DXDY2", "DY3", "DY2DZ", "DYDZ2", "DZ3", "DXDZ2", "DX2DZ", "DXDYDZ",
"DX4", "DX3DY", "DX2DY2", "DXDY3", "DY4", "DY3DZ", "DY2DZ2", "DYDZ3", "DZ4", "DXDZ3", "DX2DZ2", "DX3DZ", "DX2DYDZ", "DXDY2DZ", "DXDYDZ2" ]
print( "legend: %s" % ("\t".join(labels)) )
knowns_str = ""
for j in range(wlsqm.SIZE3): # SIZE3 = maximum size of c matrix for 3D case
if j > 0:
knowns_str += '\t'
if knowns & (1 << j):
knowns_str += labels[j]
print( "knowns: %s" % knowns_str )
# # http://stackoverflow.com/questions/699866/python-int-to-binary
# print "knowns (mask): %s" % format(knowns, '010b')[::-1]
print( "surrogate order: %d" % fit_order )
if noise_eps > 0.:
print( "simulating noisy input with eps = %g" % noise_eps )
# SymPy expr --> lambda(x,y)
lambdify_numpy_3d = lambda expr: sy.lambdify(("x","y","z"), expr, modules="numpy")
f = lambdify_numpy_3d(expr)
dfdx = lambdify_numpy_3d(sy.diff(expr, "x"))
dfdy = lambdify_numpy_3d(sy.diff(expr, "y"))
dfdz = lambdify_numpy_3d(sy.diff(expr, "z"))
d2fdx2 = lambdify_numpy_3d(sy.diff(expr, "x", 2))
d2fdxdy = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x"), "y" ))
d2fdy2 = lambdify_numpy_3d(sy.diff(expr, "y", 2))
d2fdydz = lambdify_numpy_3d(sy.diff( sy.diff(expr, "y"), "z" ))
d2fdz2 = lambdify_numpy_3d(sy.diff(expr, "z", 2))
d2fdxdz = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x"), "z" ))
d3fdx3 = lambdify_numpy_3d(sy.diff(expr, "x", 3))
d3fdx2dy = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x", 2), "y" ))
d3fdxdy2 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x"), "y", 2 ))
d3fdy3 = lambdify_numpy_3d(sy.diff(expr, "y", 3))
d3fdy2dz = lambdify_numpy_3d(sy.diff( sy.diff(expr, "y", 2), "z" ))
d3fdydz2 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "y"), "z", 2 ))
d3fdz3 = lambdify_numpy_3d(sy.diff(expr, "z", 3))
d3fdxdz2 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x"), "z", 2 ))
d3fdx2dz = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x", 2), "z" ))
d3fdxdydz = lambdify_numpy_3d(sy.diff( sy.diff( sy.diff(expr, "x"), "y"), "z"))
d4fdx4 = lambdify_numpy_3d(sy.diff(expr, "x", 4))
d4fdx3dy = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x", 3), "y" ))
d4fdx2dy2 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x", 2), "y", 2 ))
d4fdxdy3 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x"), "y", 3 ))
d4fdy4 = lambdify_numpy_3d(sy.diff(expr, "y", 4))
d4fdy3dz = lambdify_numpy_3d(sy.diff( sy.diff(expr, "y", 3), "z" ))
d4fdy2dz2 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "y", 2), "z" , 2))
d4fdydz3 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "y"), "z", 3 ))
d4fdz4 = lambdify_numpy_3d(sy.diff(expr, "z", 4))
d4fdxdz3 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x"), "z", 3 ))
d4fdx2dz2 = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x", 2), "z" , 2))
d4fdx3dz = lambdify_numpy_3d(sy.diff( sy.diff(expr, "x", 3), "z" ))
d4fdx2dydz = lambdify_numpy_3d(sy.diff( sy.diff( sy.diff(expr, "x", 2), "y"), "z"))
d4fdxdy2dz = lambdify_numpy_3d(sy.diff( sy.diff( sy.diff(expr, "x"), "y", 2), "z"))
d4fdxdydz2 = lambdify_numpy_3d(sy.diff( sy.diff( sy.diff(expr, "x"), "y"), "z", 2))
# list so we can refer to the functions by indices
funcs = ( f,
dfdx, dfdy, dfdz,
d2fdx2, d2fdxdy, d2fdy2, d2fdydz, d2fdz2, d2fdxdz,
d3fdx3, d3fdx2dy, d3fdxdy2, d3fdy3, d3fdy2dz, d3fdydz2, d3fdz3, d3fdxdz2, d3fdx2dz, d3fdxdydz,
d4fdx4, d4fdx3dy, d4fdx2dy2, d4fdxdy3, d4fdy4, d4fdy3dz, d4fdy2dz2, d4fdydz3, d4fdz4, d4fdxdz3, d4fdx2dz2, d4fdx3dz, d4fdx2dydz, d4fdxdy2dz, d4fdxdydz2
)
# create neighbor points xk around the point xi - this simulates our meshless 'grid'
#
if grid_type == 'random':
xk = np.tile(xi, (nk,1)) + r*2.*( np.random.sample( (nk,3) ) - 0.5 )
elif grid_type == 'stencil':
points_per_axis = max(1,fit_order) + 1
tt = np.linspace(-1., 1., points_per_axis)
X,Y,Z = np.meshgrid(tt,tt,tt)
X = np.reshape(X, -1)
Y = np.reshape(Y, -1)
Z = np.reshape(Z, -1)
# convert to list of (x,y) pairs, rejecting the point (0,0) (that represents xi itself), if present
point_list = [ (x,y,z) for x,y,z in zip(X,Y,Z) if (x,y,z) != (0.,0.,0.) ]
nk = len(point_list)
xk = np.array( [ ( xi[0] + r*p[0], xi[1] + r*p[1], xi[2] + r*p[2] ) for p in point_list ] )
elif grid_type == 'sudoku':
points_per_axis = max(1,fit_order) + 1
S,m = sudoku_lhs.sample(3, points_per_axis, 1)
bins_per_axis = points_per_axis*m
S = S / float(bins_per_axis - 1) # scale the sample from [0, bins_per_axis-1]**3 to [0, 1]**3
S = 2. * (S - 0.5) # move to [-1, 1]**3
# If points_per_axis is odd, a bin exists exactly at the center, so Sudoku LHS may place one point there.
#
# This would coincide with the point xi, so it is not useful, because we want the neighbors to be
# distinct from xi.
#
# Thus, for odd points_per_axis, filter the sample to remove the point at the origin if it happens to be there.
# Note that because of the scaling, the coordinates might not be exactly zero. We HACK by checking numerical equality;
# a proper solution would be to filter S before the conversion to float.
#
if points_per_axis % 2 == 1:
point_list = S.tolist()
oldlen = len(point_list)
point_list = [item for item in point_list if not (abs(item[0]) < 1e-8 and abs(item[1]) < 1e-8 and abs(item[2]) < 1e-8)]
S = np.array(point_list)
if len(point_list) < oldlen:
print( "Sudoku LHS sampled the point at the origin; discarding it from the sample" )
nk = len(S)
xk = np.tile(xi, (nk,1)) + r*S
else:
raise ValueError("Unknown grid_type '%s'; valid: 'random', 'stencil', 'sudoku'" % grid_type)
# sample the function values at the neighbor points xk (these are used to fit the surrogate model)
#
sample_also_xi_str = " (and xi itself)" if knowns & 1 else ""
print( "sampling %d points%s" % (nk, sample_also_xi_str) )
fk = np.empty( (nk,), dtype=np.float64 )
for k in range(nk):
fk[k] = f( xk[k,0], xk[k,1], xk[k,2] )
# simulate numerical errors by adding noise to the neighbor point function value samples
#
if noise_eps > 0.:
# # uniform
# noise = noise_eps*2.*(np.random.sample( np.shape(fk) ) - 0.5)
# Gaussian, truncated
mu = 0.0
sigma = noise_eps / 3.
noise = np.random.normal( loc=mu, scale=sigma, size=np.shape(fk) )
noise[noise < -3.*sigma] = -3.*sigma
noise[noise > +3.*sigma] = +3.*sigma
fk += noise
# set knowns *at point xi*
#
# we use nan to spot unfilled entries
fi = np.nan * np.empty( (wlsqm.SIZE3,), dtype=np.float64 ) # F, DX, DY, DZ, ... at point xi
for d in range(wlsqm.SIZE3):
if knowns & (1 << d):
fi[d] = funcs[d]( xi[0], xi[1], xi[2] ) # fill in the known value # TODO: add noise here too?
# allocate array for sensitivity data
#
# for output; sens[k,j] = d(fi[j])/d(fk[k]) if f[i] unknown
# nan if fi[j] known
#
# Note that if order=1, the part on second derivatives is not touched (so that an (nk,3) array
# is valid); hence we pre-fill by nan.
#
if do_sens:
sens = np.nan * np.empty( (nk,wlsqm.SIZE3), dtype=np.float64 )
else:
sens = None
# fit the surrogate model (see wlsqm.fitter.simple for detailed documentation)
#
if debug:
print() # blank line before debug info
iterations_taken = wlsqm.fit_3D_iterative( xk, fk, xi, fi, sens, do_sens=do_sens, order=fit_order, knowns=knowns, debug=debug, weighting_method=weighting_method, max_iter=max_iter )
# iterations_taken = wlsqm.fit_3D( xk, fk, xi, fi, sens, do_sens=do_sens, order=fit_order, knowns=knowns, debug=debug, weighting_method=weighting_method )
print( "refinement iterations taken: %d" % iterations_taken )
# check exact solution and relative error
#
exact = np.array( [func( xi[0], xi[1], xi[2] ) for func in funcs] )
err = (fi - exact)
print()
print( "derivatives at xi:" )
print( "exact:" )
print( exact )
print( "wlsqm solution:" )
print( fi )
if do_sens:
print( "sensitivity:" )
print( sens )
print( "abs error:" )
print( err )
print( "rel error:" )
print( (err / exact) )
#########################
# plotting
#########################
# surrogate model - the returned fi[] are actually the coefficients of a polynomial
model = wlsqm.lambdify_fit( xi, fi, dimension=3, order=fit_order ) # lambda x,y : ...
print()
print( "function values at neighbor points:" )
fxk = f( xk[:,0], xk[:,1], xk[:,2] )
mxk = model( xk[:,0], xk[:,1], xk[:,2] )
print( "exact:" )
print( fxk )
print( "wlsqm solution:" )
print( mxk )
print( "abs error:" )
errf = mxk - fxk
print( errf )
print( "rel error:" )
print( (errf / fxk) )
# comparison
xx2 = np.linspace(xi[0] - r, xi[0] + r, nvis)
yy2 = np.linspace(xi[1] - r, xi[1] + r, nvis)
zz2 = np.linspace(xi[2] - r, xi[2] + r, nvis)
X2,Y2,Z2 = np.meshgrid(xx2, yy2, zz2)
W2 = model(X2,Y2,Z2)
W3 = f(X2,Y2,Z2)
diff = W2 - W3 # fitted - exact
idx = np.argmax(np.abs( diff ))
diff_lin = np.reshape(diff, -1)
W3_lin = np.reshape(W3, -1)
maxerr_abs = diff_lin[idx]
maxerr_rel = diff_lin[idx] / W3_lin[idx]
print( "largest absolute total fit error (over the domain of the fit, not just the neighbor points):" )
print( "absolute: %g" % (maxerr_abs) )
print( "relative: %g" % (maxerr_rel) )
# one local model, 2D
#
def test2d():
#########################
# config
#########################
axislims = [0., 1., 0., 1.] # [xmin, xmax, ymin, ymax], for plotting
nvis = 101 # number of visualization points per axis
# Let's manufacture a solution (for which we know the derivatives analytically):
#
# expr = sy.sympify("2*x + 3*y")
# expr = sy.sympify("0.2*x + 0.3*y")
# expr = sy.sympify("1.0 + 2*x + 3*y + 4*x**2 + 5*x*y + 6*y**2")
# expr = sy.sympify("0.1 + 0.2*x + 0.3*y + 0.4*x**2 + 0.5*x*y + 0.6*y**2")
# expr = sy.sympify("sin(pi*x)")
expr = sy.sympify("sin(pi*x) * cos(pi*y)")
# expr = sy.sympify("exp(x) * 1/(1 + y) - 1")
# expr = sy.sympify("exp(x) * log(1 + y)")
# expr = sy.sympify("1.0 + 2*x + 3*y + 4*x**2 + 5*x*y + 6*y**2 + 7*x**3 + 8*y**4")
noise_eps = 0#1e-3 # introduce this much Gaussian noise into each sampled function value (use 0. to turn off)
xi = np.array( (0.45, 0.25) ) # point (x,y) where we wish to find the derivatives
# Degree of the surrogate polynomial; a full polynomial of this order will be used.
#
# In the fit, when compared to the original function (if any is available),
# usually the highest order will be nonsense, and the lower orders will be pretty accurate.
#
# (I.e. the unfittable part seems to favor the highest order; which OTOH has the highest spatial frequency. Maybe there's something here?)
#
fit_order = 4 # 0 (constant), 1 (linear), 2 (quadratic), 3 (cubic) or 4 (quartic)
# weighting_method = wlsqm.WEIGHT_UNIFORM # best overall fit for function values
weighting_method = wlsqm.WEIGHT_CENTER # emphasize center to improve derivatives at the point xi
max_iter = 100 # maximum number of refinement iterations for iterative fitting
do_sens = False # do sensitivity analysis of solution? ( d( fi[j] ) / d( fk[k] ) )
debug = False#True # print row scaling and condition number information? (if True, then do_sens must be False; the combination with both True is not supported)
# Bitmask of what we know at point xi. In this example, just set the bits;
# the data (from expr) will be automatically inserted into fi[].
#
# Bits from least sig. to most sig.: F, DX, DY, DX2, DXDY, DY2, ... (see ordering of "labels", below)
#
knowns = 1
# How many neighbor points to generate (to simulate the meshless 'grid').
#
# At least n_unknowns points are needed to make the model fitting work at all
# (but then the fit will be nonsensical, since it is possible to make the polynomial
# pass through exactly those points).
#
# n_unknows + 1 is the first value that makes the fitting overdetermined,
# i.e. where the least-squares procedure starts providing any advantage.
#
# Here "unknown" means any element of fi[] not tagged as known in the "knowns" bitmask.
#
nk = 24 # used if grid_type == 'random'
r = 1e-1 # neighborhood radius
# grid_type = 'random'
# grid_type = 'stencil'
grid_type = 'sudoku'
#########################
# the test itself
#########################
print()
print( "=" * 79 )
print( "2D case" )
print( "=" * 79 )
print()
print( "expr: %s, xi = %s" % (expr, xi) )
labels = ["F", "DX", "DY", "DX2", "DXDY", "DY2", "DX3", "DX2DY", "DXDY2", "DY3", "DX4", "DX3DY", "DX2DY2", "DXDY3", "DY4"]
print( "legend: %s" % ("\t".join(labels)) )
knowns_str = ""
for j in range(wlsqm.SIZE2): # SIZE2 = maximum size of c matrix for 2D case
if j > 0:
knowns_str += '\t'
if knowns & (1 << j):
knowns_str += labels[j]
print( "knowns: %s" % knowns_str )
# # http://stackoverflow.com/questions/699866/python-int-to-binary
# print ("knowns (mask): %s" % format(knowns, '010b')[::-1] )
print( "surrogate order: %d" % fit_order )
if noise_eps > 0.:
print( "simulating noisy input with eps = %g" % noise_eps )
# SymPy expr --> lambda(x,y)
lambdify_numpy_2d = lambda expr: sy.lambdify(("x","y"), expr, modules="numpy")
f = lambdify_numpy_2d(expr)
dfdx = lambdify_numpy_2d(sy.diff(expr, "x"))
dfdy = lambdify_numpy_2d(sy.diff(expr, "y"))
d2fdx2 = lambdify_numpy_2d(sy.diff(expr, "x", 2))
d2fdxdy = lambdify_numpy_2d(sy.diff( sy.diff(expr, "x"), "y" ))
d2fdy2 = lambdify_numpy_2d(sy.diff(expr, "y", 2))
d3fdx3 = lambdify_numpy_2d(sy.diff(expr, "x", 3))
d3fdx2dy = lambdify_numpy_2d(sy.diff( sy.diff(expr, "x", 2), "y" ))
d3fdxdy2 = lambdify_numpy_2d(sy.diff( sy.diff(expr, "x"), "y", 2 ))
d3fdy3 = lambdify_numpy_2d(sy.diff(expr, "y", 3))
d4fdx4 = lambdify_numpy_2d(sy.diff(expr, "x", 4))
d4fdx3dy = lambdify_numpy_2d(sy.diff( sy.diff(expr, "x", 3), "y" ))
d4fdx2dy2 = lambdify_numpy_2d(sy.diff( sy.diff(expr, "x", 2), "y", 2 ))
d4fdxdy3 = lambdify_numpy_2d(sy.diff( sy.diff(expr, "x"), "y", 3 ))
d4fdy4 = lambdify_numpy_2d(sy.diff(expr, "y", 4))
funcs = (f, dfdx, dfdy, d2fdx2, d2fdxdy, d2fdy2, d3fdx3, d3fdx2dy, d3fdxdy2, d3fdy3, d4fdx4, d4fdx3dy, d4fdx2dy2, d4fdxdy3, d4fdy4) # list so we can refer to the functions by indices
# create neighbor points xk around the point xi - this simulates our meshless 'grid'
#
if grid_type == 'random':
xk = np.tile(xi, (nk,1)) + r*2.*( np.random.sample( (nk,2) ) - 0.5 )
elif grid_type == 'stencil':
points_per_axis = max(1,fit_order) + 1
tt = np.linspace(-1., 1., points_per_axis)
X,Y = np.meshgrid(tt,tt)
X = np.reshape(X, -1)
Y = np.reshape(Y, -1)
# convert to list of (x,y) pairs, rejecting the point (0,0) (that represents xi itself), if present
point_list = [ (x,y) for x,y in zip(X,Y) if (x,y) != (0.,0.) ]
nk = len(point_list)
xk = np.array( [ ( xi[0] + r*p[0], xi[1] + r*p[1] ) for p in point_list ] )
elif grid_type == 'sudoku':
points_per_axis = max(1,fit_order) + 1
S,m = sudoku_lhs.sample(2, points_per_axis, 1)
bins_per_axis = points_per_axis*m
S = S / float(bins_per_axis - 1) # scale the sample from [0, bins_per_axis-1]**2 to [0, 1]**2
S = 2. * (S - 0.5) # move to [-1, 1]**2
# If points_per_axis is odd, a bin exists exactly at the center, so Sudoku LHS may place one point there.
#
# This would coincide with the point xi, so it is not useful, because we want the neighbors to be
# distinct from xi.
#
# Thus, for odd points_per_axis, filter the sample to remove the point at the origin if it happens to be there.
# Note that because of the scaling, the coordinates might not be exactly zero. We HACK by checking numerical equality;
# a proper solution would be to filter S before the conversion to float.
#
if points_per_axis % 2 == 1:
point_list = S.tolist()
oldlen = len(point_list)
point_list = [ item for item in point_list if not (abs(item[0]) < 1e-8 and abs(item[1]) < 1e-8) ]
S = np.array(point_list)
if len(point_list) < oldlen:
print( "Sudoku LHS sampled the point at the origin; discarding it from the sample" )
nk = len(S)
xk = np.tile(xi, (nk,1)) + r*S
else:
raise ValueError("Unknown grid_type '%s'; valid: 'random', 'stencil', 'sudoku'" % grid_type)
# sample the function values at the neighbor points xk (these are used to fit the surrogate model)
#
sample_also_xi_str = " (and xi itself)" if knowns & 1 else ""
print( "sampling %d points%s" % (nk, sample_also_xi_str) )
fk = np.empty( (nk,), dtype=np.float64 )
for k in range(nk):
fk[k] = f( xk[k,0], xk[k,1] )
# simulate numerical errors by adding noise to the neighbor point function value samples
#
if noise_eps > 0.:
# # uniform
# noise = noise_eps*2.*(np.random.sample( np.shape(fk) ) - 0.5)
# Gaussian, truncated
mu = 0.0
sigma = noise_eps / 3.
noise = np.random.normal( loc=mu, scale=sigma, size=np.shape(fk) )
noise[noise < -3.*sigma] = -3.*sigma
noise[noise > +3.*sigma] = +3.*sigma
fk += noise
# set knowns *at point xi*
#
# we use nan to spot unfilled entries
fi = np.nan * np.empty( (wlsqm.SIZE2,), dtype=np.float64 ) # F, DX, DY, DX2, DXDY, DY2, DX3, DX2DY, DXDY2, DY3 at point xi
for d in range(wlsqm.SIZE2):
if knowns & (1 << d):
fi[d] = funcs[d]( xi[0], xi[1] ) # fill in the known value # TODO: add noise here too?
# allocate array for sensitivity data
#
# for output; sens[k,j] = d(fi[j])/d(fk[k]) if f[i] unknown
# nan if fi[j] known
#
# Note that if order=1, the part on second derivatives is not touched (so that an (nk,3) array
# is valid); hence we pre-fill by nan.
#
if do_sens:
sens = np.nan * np.empty( (nk,wlsqm.SIZE2), dtype=np.float64 )
else:
sens = None
# fit the surrogate model (see wlsqm.fitter.simple for detailed documentation)
#
if debug:
print() # blank line before debug info
iterations_taken = wlsqm.fit_2D_iterative( xk, fk, xi, fi, sens, do_sens=do_sens, order=fit_order, knowns=knowns, debug=debug, weighting_method=weighting_method, max_iter=max_iter )
print( "refinement iterations taken: %d" % iterations_taken )
# check exact solution and relative error
#
exact = np.array( [func( xi[0], xi[1] ) for func in funcs] )
err = (fi - exact)
print()
print( "derivatives at xi:" )
print( "exact:" )
print( exact )
print( "wlsqm solution:" )
print( fi )
if do_sens:
print( "sensitivity:" )
print( sens )
print( "abs error:" )
print( err )
print( "rel error:" )
print( (err / exact) )
#########################
# plotting
#########################
xx = np.linspace(axislims[0], axislims[1], nvis)
yy = np.linspace(axislims[2], axislims[3], nvis)
X,Y = np.meshgrid(xx, yy)
W = f(X,Y)
# surrogate model - the returned fi[] are actually the coefficients of a polynomial
model = wlsqm.lambdify_fit( xi, fi, dimension=2, order=fit_order ) # lambda x,y : ...
xx2 = np.linspace(xi[0] - r, xi[0] + r, nvis)
yy2 = np.linspace(xi[1] - r, xi[1] + r, nvis)
X2,Y2 = np.meshgrid(xx2, yy2)
W2 = model(X2,Y2)
# # It is also possible to interpolate the model using the C API wrapper directly.
# # The result is exactly the same; sometimes this API may be more convenient.
# #
# # Note that for the C API, the points x to which to interpolate the model must be formatted as x[k,:] = (xk,yk).
# #
# shp = np.shape(X2)
# X2lin = np.reshape(X2, -1)
# Y2lin = np.reshape(Y2, -1)
# temp_x = np.array( [ (x,y) for x,y in zip(X2lin,Y2lin) ] )
# out = wlsqm.interpolate_fit( xi, fi, dimension=2, order=fit_order, x=temp_x )
# out = np.reshape( out, shp )
# print()
# print( "difference between Python and C API model interpolation:" )
# print( out - W2 ) # should be close to zero
print()
print( "function values at neighbor points:" )
fxk = f( xk[:,0], xk[:,1] )
mxk = model( xk[:,0], xk[:,1] )
print( "exact:" )
print( fxk )
print( "wlsqm solution:" )
print( mxk )
print( "abs error:" )
errf = mxk - fxk
print( errf )
print( "rel error:" )
print( (errf / fxk) )
# comparison
W3 = f(X2,Y2)
diff = W2 - W3 # fitted - exact
idx = np.argmax(np.abs( diff ))
diff_lin = np.reshape(diff, -1)
W3_lin = np.reshape(W3, -1)
maxerr_abs = diff_lin[idx]
maxerr_rel = diff_lin[idx] / W3_lin[idx]
print( "largest absolute total fit error (over the domain of the fit, not just the neighbor points):" )
print( "absolute: %g" % (maxerr_abs) )
print( "relative: %g" % (maxerr_rel) )
fig = plt.figure(2, figsize=(12,6)) # for 2x1 subplots
# fig = plt.figure(2, figsize=(12,12))
fig.clf()
ax = plt.subplot(1,2, 1)
ax.plot( (xx[0], xx[-1]), (yy[0], yy[0]), 'k-' )
ax.plot( (xx[-1], xx[-1]), (yy[0], yy[-1]), 'k-' )
ax.plot( (xx[0], xx[-1]), (yy[-1], yy[-1]), 'k-' )
ax.plot( (xx[0], xx[0]), (yy[0], yy[-1]), 'k-' )
ax.plot( xk[:,0], xk[:,1], linestyle='none', marker='o', markeredgecolor='r', markerfacecolor='none' )
ax.plot( (xi[0] - r, xi[0] + r), (xi[1] - r, xi[1] - r), 'r-' )
ax.plot( (xi[0] + r, xi[0] + r), (xi[1] - r, xi[1] + r), 'r-' )
ax.plot( (xi[0] - r, xi[0] + r), (xi[1] + r, xi[1] + r), 'r-' )
ax.plot( (xi[0] - r, xi[0] - r), (xi[1] - r, xi[1] + r), 'r-' )
ax.plot( (xi[0],), (xi[1],), linestyle='none', marker='x', markeredgecolor='k', markerfacecolor='none' )
plt.axis('tight')
axis_marginize(ax, 0.02, 0.02)
plt.grid(b=True, which='both')
plt.xlabel('x')
plt.ylabel('y')
plt.subplot(1,2, 2)
# make 3d plot of the function
#
# see http://matplotlib.sourceforge.net/examples/mplot3d/lines3d_demo.html
# Axes3D has a tendency to underestimate how much space it needs; it draws its labels
# outside the window area in certain orientations.
#
# This causes the labels to be clipped, which looks bad. We prevent this by creating the axes
# in a slightly smaller rect (leaving a margin). This way the labels will show - outside the Axes3D,
# but still inside the figure window.
#
# The final touch is to set the window background to a matching white, so that the
# background of the figure appears uniform.
#
fig.patch.set_color( (1,1,1) )
fig.patch.set_alpha( 1.0 )
x0y0wh = [ 0.02, 0.02, 0.96, 0.96 ] # left, bottom, width, height (here as fraction of subplot area)
# compute the corresponding figure coordinates for the 2x1 subplot layout
x0y0wh[0] = 0.5 + 0.5*x0y0wh[0] # left
x0y0wh[2] = 0.5*x0y0wh[2] # width
ax = p3.Axes3D(fig, rect=x0y0wh)
stride = max(1, (nvis-1)//10) # pick a good-looking stride (for lines; we actually have more vertices, making a smoother-looking curve between the lines)
# use linewidth=0 to remove the wireframe if desired.
# surf = ax.plot_surface(X,Y,W, rstride=stride, cstride=stride, cmap=matplotlib.cm.Blues_r, clim=[fmin,fmax], linewidth=0.25, alpha=0.5)
ax.plot_wireframe(X,Y,W, rstride=stride, cstride=stride, color='k', linewidth=0.5, linestyle='solid')
# plt.colorbar(surf, shrink=0.5, aspect=5)
# plt.colorbar(surf, shrink=0.96)
# sampled points
if noise_eps > 0.:
ax.plot( xk[:,0], xk[:,1], f( xk[:,0], xk[:,1] ), linestyle='none', marker='o', markeredgecolor='k', markerfacecolor='none' ) # exact
ax.plot( xk[:,0], xk[:,1], fk[:], linestyle='none', marker='o', markeredgecolor='r', markerfacecolor='none' ) # including noise
else:
ax.plot( xk[:,0], xk[:,1], f( xk[:,0], xk[:,1] ), linestyle='none', marker='o', markeredgecolor='r', markerfacecolor='none' ) # exact
# surrogate model
ax.plot_wireframe(X2,Y2,W2, rstride=stride, cstride=stride, color='r', linewidth=0.5, linestyle='solid')
# point xi
ax.plot( (xi[0],), (xi[1],), f( xi[0], xi[1] ), linestyle='none', marker='x', markeredgecolor='k', markerfacecolor='none' )
# ax.view_init(20, -48)
# ax.view_init(18, -46)
# ax.view_init(18, -128)
ax.view_init(34, 140)
ax.axis('tight')
ax.set_zlim(-1.01, 1.01)
plt.xlabel('$x$')
plt.ylabel('$y$')
ax.set_title('f(x,y)')
# one local model, 1D
#
def test1d():
#########################
# config
#########################
axislims = [0., 1., 0., 1.] # [xmin, xmax, ymin, ymax], for plotting
nvis = 101 # number of visualization points
# Let's manufacture a solution (for which we know the derivatives analytically):
#
# expr = sy.sympify("2*x")
# expr = sy.sympify("0.2*x")
# expr = sy.sympify("1.0 + 2*x + 4*x**2")
# expr = sy.sympify("0.1 + 0.2*x + 0.4*x**2")
expr = sy.sympify("sin(pi*x)")
# expr = sy.sympify("1 / (1 + x)")
# expr = sy.sympify("exp(x)")
# expr = sy.sympify("log(1 + x)")
# expr = sy.sympify("1.0 + 2*x + 4*x**2 + 7*x**3")
noise_eps = 0#1e-3 # introduce this much Gaussian noise into each sampled function value (use 0. to turn off)
xi = 0.45 # point x where we wish to find the derivatives
# Degree of the surrogate polynomial.
#
# In the fit, when compared to the original function (if any is available),
# usually the highest order will be nonsense, and the lower orders will be pretty accurate.
#
fit_order = 4 # 0 (constant), 1 (linear), 2 (quadratic), 3 (cubic) or 4 (quartic)
# weighting_method = wlsqm.WEIGHT_UNIFORM # best overall fit for function values
weighting_method = wlsqm.WEIGHT_CENTER # emphasize center to improve derivatives at the point xi
max_iter = 100 # maximum number of refinement iterations for iterative fitting
do_sens = False # do sensitivity analysis of solution? ( d( fi[j] ) / d( fk[k] ) )
debug = False#True # print row scaling and condition number information? (if True, then do_sens must be False; the combination with both True is not supported)
# Bitmask of what we know at point xi. In this example, just set the bits;
# the data (from expr) will be automatically inserted into fi[].
#
# Bits from least sig. to most sig.: F, DX, DX2, DX3, DX4
#
knowns = 1
# How many neighbor points to generate (to simulate the meshless 'grid').
#
# At least n_unknowns points are needed to make the model fitting work at all
# (but then the fit will be nonsensical, since it is possible to make the polynomial
# pass through exactly those points).
#
# n_unknows + 1 is the first value that makes the fitting overdetermined,
# i.e. where the least-squares procedure starts providing any advantage.
#
# Here "unknown" means any element of fi[] not tagged as known in the "knowns" bitmask.
#
nk = 7 # used if grid_type == 'random'
r = 1e-1 # neighborhood radius
# grid_type = 'random'
grid_type = 'stencil'
#########################
# the test itself
#########################
print()
print( "=" * 79 )
print( "1D case" )
print( "=" * 79 )
print()
print( "expr: %s, xi = %s" % (expr, xi) )
labels = ["F", "DX", "DX2", "DX3", "DX4"]
print( "legend: %s" % ("\t".join(labels)) )
knowns_str = ""
for j in range(wlsqm.SIZE1): # SIZE1 = maximum size of c matrix for 1D case
if j > 0:
knowns_str += '\t'
if knowns & (1 << j):
knowns_str += labels[j]
print( "knowns: %s" % knowns_str )
print( "surrogate order: %d" % fit_order )
if noise_eps > 0.:
print( "simulating noisy input with eps = %g" % noise_eps )
# SymPy expr --> lambda(x)
lambdify_numpy_1d = lambda expr: sy.lambdify(("x"), expr, modules="numpy")
f = lambdify_numpy_1d(expr)
dfdx = lambdify_numpy_1d(sy.diff(expr, "x"))
d2fdx2 = lambdify_numpy_1d(sy.diff(expr, "x", 2))
d3fdx3 = lambdify_numpy_1d(sy.diff(expr, "x", 3))
d4fdx4 = lambdify_numpy_1d(sy.diff(expr, "x", 4))
funcs = (f, dfdx, d2fdx2, d3fdx3, d4fdx4) # list so we can refer to the functions by indices
# create neighbor points xk around the point xi - this simulates our meshless 'grid'
#
if grid_type == 'random':
xk = xi + r*2.*( np.random.sample( (nk,) ) - 0.5 )
elif grid_type == 'stencil':
points_per_axis = max(1,fit_order) + 1
tt = np.linspace(-1., 1., points_per_axis)
# reject the point at the origin if it is there
point_list = [ x for x in tt if x != 0. ]
xk = np.array( [ xi + r*p for p in point_list ] )
nk = len(xk)
else:
raise ValueError("Unknown grid_type '%s'; valid: 'random', 'stencil'" % grid_type)
# sample the function values at the neighbor points xk (these are used to fit the surrogate model)
#
sample_also_xi_str = " (and xi itself)" if knowns & 1 else ""
print( "sampling %d points%s" % (nk, sample_also_xi_str) )
fk = np.empty( (nk,), dtype=np.float64 )
for k in range(nk):
fk[k] = f( xk[k] )
# simulate numerical errors by adding noise to the neighbor point function value samples
#
if noise_eps > 0.:
# # uniform
# noise = noise_eps*2.*(np.random.sample( np.shape(fk) ) - 0.5)
# Gaussian, truncated
mu = 0.0
sigma = noise_eps / 3.
noise = np.random.normal( loc=mu, scale=sigma, size=np.shape(fk) )
noise[noise < -3.*sigma] = -3.*sigma
noise[noise > +3.*sigma] = +3.*sigma
fk += noise
# set knowns *at point xi*
#
# we use nan to spot unfilled entries
fi = np.nan * np.empty( (wlsqm.SIZE1,), dtype=np.float64 )
for d in range(wlsqm.SIZE1):
if knowns & (1 << d):
fi[d] = funcs[d]( xi ) # fill in the known value # TODO: add noise here too?
# allocate array for sensitivity data
#
# for output; sens[k,j] = d(fi[j])/d(fk[k]) if f[i] unknown
# nan if fi[j] known
#
# Note that if order=1, the part on second derivatives is not touched (so that an (nk,3) array
# is valid); hence we pre-fill by nan.
#
if do_sens:
sens = np.nan * np.empty( (nk,wlsqm.SIZE1), dtype=np.float64 )
else:
sens = None
# do the numerical differentiation
#
# xk : in, (nk,) array of neighbor point coordinates
# fk : in, (nk,) array of function values at the neighbor points
# xi : in, double, coordinate of the point xi
# fi : in/out: if order=2, (3,) array containing (f, dfdx, d2fdx2) at point xi
# if order=1, (2,) array containing (f, dfdx) at point xi
# on input: those elements must be filled that correspond to the bitmask "knowns".
# on output: the unknown elements will be filled in (leaving the knowns untouched).
# sens : out: if order=2, (nk,3) array containing sensitivity information.
# if order=1, (nk,2) array containing sensitivity information.
# if fi[j] is unknown: sens[k,j] = d( fi[j] ) / d( fk[k] )
# if fi[j] is known: sens[:,j] = nan (to indicate "not applicable").
# order : in, order of the surrogate polynomial. Can be 1 or 2.
# Linear fit gives first derivatives only and has O(h**2) error.
# Quadratic fit gives first and second derivatives and has O(h**3) error.
# knowns : in, bitmask describing what is known about the function at the point xi.
# See the b1_* (bitmask, 1D case) constants.
#
if debug:
print() # blank line before debug info
iterations_taken = wlsqm.fit_1D_iterative( xk, fk, xi, fi, sens, do_sens=do_sens, order=fit_order, knowns=knowns, debug=debug, weighting_method=weighting_method, max_iter=max_iter )
print( "refinement iterations taken: %d" % iterations_taken )
# check exact solution and relative error
#
exact = np.array( [func( xi ) for func in funcs] )
err = (fi - exact)
print()
print( "derivatives at xi:" )
print( "exact:" )
print( exact )
print( "wlsqm solution:" )
print( fi )
if do_sens:
print( "sensitivity:" )
print( sens )
print( "abs error:" )
print( err )
print( "rel error:" )
print( (err / exact) )
#########################
# plotting
#########################
nvis = 10001
xx = np.linspace(axislims[0], axislims[1], nvis)
ww = f(xx)
# surrogate model - the returned fi[] are actually the coefficients of a polynomial
model = wlsqm.lambdify_fit( xi, fi, dimension=1, order=fit_order ) # lambda x : ...
xx2 = np.linspace(xi - r, xi + r, nvis)
ww2 = model(xx2)
# # It is also possible to interpolate the model using the C API wrapper directly.
# # The result is exactly the same; sometimes this API may be more convenient.
# #
# # Note that for the C API, the points x to which to interpolate the model must be formatted as x[:] = (xk).
# #
# out = wlsqm.interpolate_fit( xi, fi, dimension=1, order=fit_order, x=xx2 )
# print
# print "difference between Python and C API model interpolation:"
# print out - ww2 # should be close to zero
print()
print( "function values (and derivatives) at neighbor points:" )
flags = [ wlsqm.i1_F, wlsqm.i1_X, wlsqm.i1_X2, wlsqm.i1_X3, wlsqm.i1_X4 ]
for label,func,flag in zip(labels,funcs,flags):
m = wlsqm.lambdify_fit( xi, fi, dimension=1, order=fit_order, diff=flag ) # using diff=..., derivatives of the model can be lambdified, too
fxk = func( xk )
mxk = m( xk )
print( label )
print( "exact:" )
print( fxk )
print( "wlsqm solution:" )
print( mxk )
print( "abs error:" )
errf = mxk - fxk
print( errf )
print( "rel error:" )
print( (errf / fxk) )
# comparison
ww3 = f(xx2)
diff = ww2 - ww3 # fitted - exact
idx = np.argmax(np.abs( diff ))
maxerr_abs = diff[idx]
maxerr_rel = diff[idx] / ww3[idx]
print( "largest absolute total fit error (over the domain of the fit, not just the neighbor points):" )
print( "absolute: %g" % (maxerr_abs) )
print( "relative: %g" % (maxerr_rel) )
fig = plt.figure(1, figsize=(6,6))
fig.clf()
ax = plt.subplot(1,1, 1)
# the function
ax.plot( xx, ww, color='k', linewidth=0.5, linestyle='solid' )
# surrogate model
ax.plot( xx2, ww2, color='r', linewidth=1., linestyle='solid' )
# sampled points
#
fxk = f(xk)
if noise_eps > 0.:
ax.plot( xk, fxk, linestyle='none', marker='o', markeredgecolor='k', markerfacecolor='none' ) # exact
ax.plot( xk, fk, linestyle='none', marker='o', markeredgecolor='r', markerfacecolor='none' ) # including noise
else:
ax.plot( xk, fxk, linestyle='none', marker='o', markeredgecolor='r', markerfacecolor='none' ) # exact
# helper lines for easier reading of figure
# ax.plot( np.tile(xk, (2,1)), np.tile((-0.05, 0.05), (nk,1)).T, 'k--' )
tmp = np.zeros( (2,nk), dtype=np.float64 ) # for vertical lines from zero level to f(xk)
tmp[1,:] = fxk
ax.plot( np.tile(xk, (2,1)), tmp, 'r--', linewidth=0.25 )
# sampled region
ax.plot( (xi - r, xi + r), (0., 0.), 'r-', linewidth=2. )
ax.plot( (xi - r, xi - r), (0., f(xi-r)), 'r--', linewidth=0.5 )
ax.plot( (xi + r, xi + r), (0., f(xi+r)), 'r--', linewidth=0.5 )
# point xi
ax.plot( xi, f(xi), linestyle='none', marker='x', markeredgecolor='k', markerfacecolor='none' )
plt.axis('tight')
axis_marginize(ax, 0.02, 0.02)
plt.grid(b=True, which='both')
plt.xlabel('x')
plt.ylabel('y')
def main():
test3d()
test2d()
test1d()
testmany2d()
# wlsqm.test_pointer_wrappers()
print()
plt.show()
if __name__ == '__main__':
main()
|
Technologicat/python-wlsqm
|
examples/wlsqm_example.py
|
Python
|
bsd-2-clause
| 52,676
|
[
"Gaussian"
] |
491ec84afcbef3646c8084d37fbbe961cf28b732990516af0a5a935d15625c2f
|
#--------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
#--------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
#--------------------------------------------------------------------------
import sys
import vtk
import wx
from wx.lib.pubsub import pub as Publisher
import vtk_utils as vu
# Update progress value in GUI
UpdateProgress = vu.ShowProgress()
def ApplyDecimationFilter(polydata, reduction_factor):
"""
Reduce number of triangles of the given vtkPolyData, based on
reduction_factor.
"""
# Important: vtkQuadricDecimation presented better results than
# vtkDecimatePro
decimation = vtk.vtkQuadricDecimation()
decimation.SetInput(polydata)
decimation.SetTargetReduction(reduction_factor)
decimation.GetOutput().ReleaseDataFlagOn()
decimation.AddObserver("ProgressEvent", lambda obj, evt:
UpdateProgress(decimation, "Reducing number of triangles..."))
return decimation.GetOutput()
def ApplySmoothFilter(polydata, iterations, relaxation_factor):
"""
Smooth given vtkPolyData surface, based on iteration and relaxation_factor.
"""
smoother = vtk.vtkSmoothPolyDataFilter()
smoother.SetInput(polydata)
smoother.SetNumberOfIterations(iterations)
smoother.SetFeatureAngle(80)
smoother.SetRelaxationFactor(relaxation_factor)
smoother.FeatureEdgeSmoothingOn()
smoother.BoundarySmoothingOn()
smoother.GetOutput().ReleaseDataFlagOn()
smoother.AddObserver("ProgressEvent", lambda obj, evt:
UpdateProgress(smoother, "Smoothing surface..."))
return smoother.GetOutput()
def FillSurfaceHole(polydata):
"""
Fill holes in the given polydata.
"""
# Filter used to detect and fill holes. Only fill
print "Filling polydata"
filled_polydata = vtk.vtkFillHolesFilter()
filled_polydata.SetInput(polydata)
filled_polydata.SetHoleSize(500)
return filled_polydata.GetOutput()
def CalculateSurfaceVolume(polydata):
"""
Calculate the volume from the given polydata
"""
# Filter used to calculate volume and area from a polydata
measured_polydata = vtk.vtkMassProperties()
measured_polydata.SetInput(polydata)
return measured_polydata.GetVolume()
def CalculateSurfaceArea(polydata):
"""
Calculate the volume from the given polydata
"""
# Filter used to calculate volume and area from a polydata
measured_polydata = vtk.vtkMassProperties()
measured_polydata.SetInput(polydata)
return measured_polydata.GetSurfaceArea()
def Merge(polydata_list):
append = vtk.vtkAppendPolyData()
for polydata in polydata_list:
triangle = vtk.vtkTriangleFilter()
triangle.SetInput(polydata)
append.AddInput(triangle.GetOutput())
clean = vtk.vtkCleanPolyData()
clean.SetInput(append.GetOutput())
return append.GetOutput()
def Export(polydata, filename, bin=False):
writer = vtk.vtkXMLPolyDataWriter()
print filename, type(filename)
writer.SetFileName(filename.encode('utf-8'))
if bin:
writer.SetDataModeToBinary()
else:
writer.SetDataModeToAscii()
writer.SetInput(polydata)
writer.Write()
def Import(filename):
reader = vtk.vtkXMLPolyDataReader()
if isinstance(filename, unicode):
reader.SetFileName(filename.encode(wx.GetDefaultPyEncoding()))
else:
reader.SetFileName(filename)
reader.Update()
return reader.GetOutput()
def JoinSeedsParts(polydata, point_id_list):
"""
The function require vtkPolyData and point id
from vtkPolyData.
"""
conn = vtk.vtkPolyDataConnectivityFilter()
conn.SetInput(polydata)
conn.SetExtractionModeToPointSeededRegions()
UpdateProgress = vu.ShowProgress(1 + len(point_id_list))
pos = 1
for seed in point_id_list:
conn.AddSeed(seed)
UpdateProgress(pos, _("Analysing selected regions..."))
pos += 1
conn.AddObserver("ProgressEvent", lambda obj, evt:
UpdateProgress(conn, "Getting selected parts"))
conn.Update()
result = vtk.vtkPolyData()
result.DeepCopy(conn.GetOutput())
result.Update()
return result
def SelectLargestPart(polydata):
"""
"""
UpdateProgress = vu.ShowProgress(1)
conn = vtk.vtkPolyDataConnectivityFilter()
conn.SetInput(polydata)
conn.SetExtractionModeToLargestRegion()
conn.AddObserver("ProgressEvent", lambda obj, evt:
UpdateProgress(conn, "Getting largest part..."))
conn.Update()
result = vtk.vtkPolyData()
result.DeepCopy(conn.GetOutput())
result.Update()
return result
def SplitDisconectedParts(polydata):
"""
"""
conn = vtk.vtkPolyDataConnectivityFilter()
conn.SetInput(polydata)
conn.SetExtractionModeToAllRegions()
conn.Update()
nregions = conn.GetNumberOfExtractedRegions()
conn.SetExtractionModeToSpecifiedRegions()
conn.Update()
polydata_collection = []
# Update progress value in GUI
progress = nregions -1
if progress:
UpdateProgress = vu.ShowProgress(progress)
for region in xrange(nregions):
conn.InitializeSpecifiedRegionList()
conn.AddSpecifiedRegion(region)
conn.Update()
p = vtk.vtkPolyData()
p.DeepCopy(conn.GetOutput())
p.Update()
polydata_collection.append(p)
if progress:
UpdateProgress(region, _("Splitting disconnected regions..."))
return polydata_collection
|
givanaldo/invesalius3
|
invesalius/data/polydata_utils.py
|
Python
|
gpl-2.0
| 6,513
|
[
"VTK"
] |
027ac74bafd16d9ebe6da314a6aec2833cbecf9a74c466a78aead89c2613490c
|
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy
from pyscf.pbc.df import mdf
import pyscf.pbc.gto as pgto
from pyscf.pbc.lib import kpts_helper
from pyscf import ao2mo
L = 5.
n = 3
cell = pgto.Cell()
cell.a = numpy.diag([L,L,L])
cell.mesh = numpy.array([n,n,n])
cell.atom = '''He 3. 2. 3.
He 1. 1. 1.'''
cell.basis = 'ccpvdz'
cell.verbose = 0
cell.rcut = 17
cell.build(0,0)
nao = cell.nao_nr()
def tearDownModule():
global cell
del cell
class KnownValues(unittest.TestCase):
def test_eri1111(self):
kpts = numpy.random.random((4,3)) * .25
kpts[3] = -numpy.einsum('ij->j', kpts[:3])
with_df = mdf.MDF(cell).set(auxbasis='weigend')
with_df.linear_dep_threshold = 1e-7
with_df.kpts = kpts
mo =(numpy.random.random((nao,nao)) +
numpy.random.random((nao,nao))*1j)
eri = with_df.get_eri(kpts).reshape((nao,)*4)
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, kpts)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 9)
def test_eri0110(self):
kpts = numpy.random.random((4,3)) * .25
kpts[3] = kpts[0]
kpts[2] = kpts[1]
with_df = mdf.MDF(cell).set(auxbasis='weigend')
with_df.linear_dep_threshold = 1e-7
with_df.kpts = kpts
mo =(numpy.random.random((nao,nao)) +
numpy.random.random((nao,nao))*1j)
eri = with_df.get_eri(kpts).reshape((nao,)*4)
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, kpts)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 8)
def test_eri0000(self):
with_df = mdf.MDF(cell).set(auxbasis='weigend')
with_df.linear_dep_threshold = 1e-7
with_df.kpts = numpy.zeros((4,3))
mo =(numpy.random.random((nao,nao)) +
numpy.random.random((nao,nao))*1j)
eri = ao2mo.restore(1, with_df.get_eri(with_df.kpts), nao)
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, with_df.kpts)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 9)
mo = mo.real
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, with_df.kpts, compact=False)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 9)
def test_ao2mo_7d(self):
L = 3.
n = 6
cell = pgto.Cell()
cell.a = numpy.diag([L,L,L])
cell.mesh = [n,n,n]
cell.atom = '''He 2. 2.2 2.
He 1.2 1. 1.'''
cell.basis = {'He': [[0, (1.2, 1)], [1, (0.6, 1)]]}
cell.verbose = 0
cell.build(0,0)
kpts = cell.make_kpts([1,3,1])
nkpts = len(kpts)
nao = cell.nao_nr()
numpy.random.seed(1)
mo =(numpy.random.random((nkpts,nao,nao)) +
numpy.random.random((nkpts,nao,nao))*1j)
with_df = mdf.MDF(cell, kpts)
out = with_df.ao2mo_7d(mo, kpts)
ref = numpy.empty_like(out)
kconserv = kpts_helper.get_kconserv(cell, kpts)
for ki, kj, kk in kpts_helper.loop_kkk(nkpts):
kl = kconserv[ki, kj, kk]
tmp = with_df.ao2mo((mo[ki], mo[kj], mo[kk], mo[kl]), kpts[[ki,kj,kk,kl]])
ref[ki,kj,kk] = tmp.reshape([nao]*4)
self.assertAlmostEqual(abs(out-ref).max(), 0, 12)
if __name__ == '__main__':
print("Full Tests for mdf ao2mo")
unittest.main()
|
sunqm/pyscf
|
pyscf/pbc/df/test/test_mdf_ao2mo.py
|
Python
|
apache-2.0
| 4,910
|
[
"PySCF"
] |
59906813e3be4cb9a6153d369816de555b6b0466a264693a455e0cafd38898f2
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Introduction: This script used to get G+C content
# 3rd position of synonymous codons (GC3s)
# from each fasta format sequence
# Input is a Seq format object (Bio.SeqIO)
# Created by galaxy on 17-3-12 4:12pm
def read_seq(seq):
seq_string = str(seq).upper()
max_seq = len(seq_string)
seq_codons = [seq_string[i:i + 3] for i in range(0, max_seq, 3)]
return seq_codons
def get_gc3s(seq, precision=2):
exclude_codons = ['ATG', 'TGG', 'TGG', 'TAA', 'TAG', 'TGA']
seq_codons = read_seq(seq)
gc_content = 0
ex = 0
for each_codon in seq_codons:
s = each_codon[2]
if each_codon not in exclude_codons:
if 'G' == s or s == 'C':
gc_content += 1
else:
ex += 1
seq_gc3s = gc_content / (len(seq_codons) - ex)
return round(seq_gc3s, precision)
|
cvn001/codonPY
|
src/get_GC3s_from_fasta.py
|
Python
|
mit
| 926
|
[
"Galaxy"
] |
ecb35480746c5037f5c1013d5873c565c34b9ef8faecab0a8fd9d43c34295ba6
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
########### Attension
# [n] means n-dimension array
# [m x n] means (m x n)-dimension array
# no [] means 1-dimension data
import random as rand
import numpy as np
import numpy.linalg as nplnlg
import neuro_function as nf
class NeuroElement:
def __init__(self) : self.size = []; self.value = []
def make_elment(self): return self
# Weights
class NeuroWeight(NeuroElement):
def __init__(self): NeuroElement().__init__()
def make_elment(self, dim):
if len(dim) == 2: self.value = np.matrix([[rand.uniform(-0.1, 0.1) for j in range(0, dim[1])] for k in range(0, dim[0])], dtype = 'float')
# if len(dim) == 2: self.value = np.matrix([[rand.uniform(-0.5, 0.5) for j in range(0, dim[1])] for k in range(0, dim[0])], dtype = 'float')
self.size = np.array(dim)
return self
def __deepcopy__(self, memo):
mine = NeuroWeight(); mine.value = copy.deepcopy(self.value); mine.size = copy.deepcopy(self.size);
return mine
class NeuroNode(NeuroElement):
def __init__(self): NeuroElement().__init__(); self.func = 'sigmoid'
def make_elment(self, dim, func = 'sigmoid'):
if len(dim) == 1: self.value = np.array([np.zeros(dim)], dtype = 'float')
if func == 'sigmoid' : self.func = nf.sigmoid
elif func == 'tanh' : self.func = nf.tanh
elif func == 'linear' : self.func = nf.linear
elif func == 'perceptron': self.func = nf.perceptron
elif func == 'softmax' : self.func = nf.softmax
self.size = np.array(dim)
return self
def __deepcopy__(self, memo):
mine = NeuroNode(); mine.value = copy.deepcopy(self.value); mine.size = copy.deepcopy(self.size); mine.func = copy.deepcopy(self.func);
return mine
# Multi layer neuralnet
class Mln:
def __init__(self):
# neural_element : This means that input node, weights, hidden node, weights, ..., outputs node are existed sequencially.
self.neural_element = [] ; self.bios = 0.0 ; self.teach = [] ; self.error = [] ; self.mse = 0.0;
self.learning_rate = 0.15; self.number_of_element = 0; self.number_of_layer = 0; self.name_of_element = []
def __deepcopy__(self, memo):
mine = Mln()
for i in range(0, self.number_of_element): mine.neural_element.append(copy.deepcopy(self.neural_element[i]))
mine.bios = self.bios; mine.teach = np.copy(self.teach); mine.error = np.copy(self.error); mine.mse = self.mse
mine.learning_rate = self.learning_rate; mine.number_of_element = self.number_of_element; mine.number_of_layer = self.number_of_layer
for i in range(0, self.number_of_element): mine.name_of_element.append(self.name_of_element[i])
return mine
# Initialization (Not Constructor)
# network_dims : [input_layer_dimension, hidden_layer_dimension1, hidden_layer_dimension2, ..., output_layer_fimension]
# output_funcion : output function of all neurons such as 'perceptron', 'sigmoid', 'tanh'
# learning_rate : learning rate which use for back propergation (0 < learning_rate <= 1)
# bios : all layer's bios value
def make_neuralnet(self, network_dims, output_function = ['sigmoid'], learning_rate = 0.15, bios = 1.0, solved = 'fitting'):
if len(output_function) + 1 != len(network_dims): print "Invallid number of element: network_dims and output_function is not same."; exit(0)
self.learning_rate = learning_rate; self.bios = bios
# make number of element & layer
self.number_of_layer = len(network_dims); self.number_of_element = len(network_dims)*2-1;
# make nodes and weights of all layers
func = ['linear'] + [func for func in output_function]
for i in range(0, self.number_of_layer): self.neural_element.append(NeuroNode().make_elment([network_dims[i]], func[i]))
for i in range(0, self.number_of_layer-1): self.neural_element.insert(i*2+1, NeuroWeight().make_elment(np.array(network_dims[i:i+2])))
# elements_names presents that a element of parameter of 'neural_elements' is 'node', 'weight', 'error'.
for i in range(0, self.number_of_layer): self.name_of_element.append('node'); self.name_of_element.append('weight')
self.name_of_element.pop()
# make teach & error signals
self.teach = np.matrix([np.zeros(network_dims[len(network_dims)-1])]); self.error = np.matrix([np.zeros(network_dims[len(network_dims)-1])])
self.solved = solved
self.total_error = 0.
return self
# show any elements on neural network
# elemets_name : elament's name which you want to show such as following:
# 'function': show output functions
# 'teach' : show teach signals
# 'weight' : show weights
# 'node' : show node signals
# 'err' : show error signals
# 'ttlerr' : show total error signals, if you select "solved = 'fitting'" then you can see MSE, and "solved = 'classification'" then you can see total error.
def show_element(self, element_name):
if element_name == 'teach' : print '// Teaches //' , self.teach; return
elif element_name == 'err' : print '// Output Errors //' , self.error; return
elif element_name == 'ttlerr' : print '// Output Total Error //' , self.total_error ; return
elif element_name == 'input' : print '// Input //' , self.neural_element[0].value ; return
elif element_name == 'output' : print '// Output //' , self.neural_element[len(self.neural_element)-1].value ; return
elif element_name == 'weight' : print '// Bios of all Layer //' , self.bios
names = ['Input'] + ['Hidden' for i in range(0, self.number_of_layer-2)] + ['Output']
for i in range(0, len(self.name_of_element)):
if element_name == 'weight' and self.name_of_element[i] == 'weight':
messages = '// Weights of ' + names[0] ; del names[0]; print messages + ' to ' + names[0] + ' Layer //'; print self.neural_element[i].value
elif element_name == 'node' and self.name_of_element[i] == 'node' :
messages = '// Nodes of ' + names[0] ; del names[0]; print messages + ' //' ; print self.neural_element[i].value
elif element_name == 'func' and self.name_of_element[i] == 'node' :
messages = '// Functions of ' + names[0] ; del names[0]; print messages + ' //' ; print self.neural_element[i].func
# set input signals
# input_data : input signals, same dimeision as number of input layer's node.
def input_signals(self, input_data):
# set input signals to input layer's node
tmp_data = np.matrix(input_data)
if self.neural_element[0].value.shape == tmp_data.shape: self.neural_element[0].value = tmp_data
else: print 'Invallid size of input data:', tmp_data.shape; print 'You must use size of input data:', self.neural_element[0].value.shape
# set teach signals
# teach_data : teach signals, same dimeision as number of output layer's node.
def teach_signals(self, teach_data):
# set teach signals to output layer's node
tmp_data = np.matrix(teach_data)
if self.teach.shape == tmp_data.shape: self.teach = tmp_data
else: print 'Invallid size of teach data:', tmp_data.shape; print 'You must use size of teach data:', self.teach.shape
# caliculate err signals
# call after call those; input_to_neuron(), teach_to_neuron(), output_for_neuron()
def error_signals(self):
# solved problem
if self.solved == 'fitting':
self.error = self.neural_element[self.number_of_element-1].value - self.teach
self.total_error = np.sum(self.error)
elif self.solved == 'classification':
# softmax
if self.neural_element[self.number_of_element-1].func == nf.softmax:
self.error = np.array(self.teach) * np.array(np.log(self.neural_element[self.number_of_element-1].value))
self.total_error = np.sum(self.error)
# sigmoid
elif self.neural_element[self.number_of_element-1].func == nf.sigmoid:
self.error = np.array(self.teach.T) *np.array(np.log(self.neural_element[self.number_of_element-1].value)) + np.array(1 - self.teach.T) * np.array(np.log(1 - self.neural_element[self.number_of_element-1].value))
self.total_error = np.sum(self.error)
# tanh
elif self.neural_element[self.number_of_element-1].func == nf.tanh:
pass
# caliculate output signals
def output_signals(self):
tmp_output = self.neural_element[0].value + self.bios
for i in range(1, self.number_of_element):
if self.name_of_element[i] == 'weight': tmp_output = np.dot(tmp_output, self.neural_element[i].value);
elif self.name_of_element[i] == 'node' :
# softmax function or not
if self.neural_element[i].func != nf.softmax:
tmp_output = np.matrix([self.neural_element[i].func(element + self.bios) for element in np.nditer(tmp_output)]).reshape(tmp_output.shape);
self.neural_element[i].value = tmp_output
else:
# add neuron outputs with bios
tmp_output = tmp_output + self.bios;
# apply softmax function to upper value
tmp_output = self.neural_element[i].func(tmp_output)
self.neural_element[i].value = tmp_output
def caliculate_backpropergation(self, de_dy):
errs = de_dy
# make update value pool
delta_pool = [[]]
for i in range(1, self.number_of_element):
if self.name_of_element[i] == 'weight': delta_pool.append([np.matrix(np.zeros(self.neural_element[i].value.shape))])
else : delta_pool.append([])
# caliculate on output layer
[delta_pool[self.number_of_element-2], delta_part] = \
nf.back_propergation(\
errs, \
np.array(self.neural_element[self.number_of_element-1].value), \
self.neural_element[self.number_of_element-3].value.T, \
self.neural_element[self.number_of_element-1].func,\
self.learning_rate,\
solved = self.solved);
errs = np.array(np.dot(delta_part, self.neural_element[self.number_of_element-2].value.T));
for i in range(self.number_of_element-3, 1, -2):
[delta_pool[i-1], delta_part] = \
nf.back_propergation(\
errs, \
np.array(self.neural_element[i].value), \
self.neural_element[i-2].value.T, \
self.neural_element[i].func, \
self.learning_rate, \
solved = 'fitting')
errs = np.array(np.dot(delta_part, self.neural_element[i-1].value.T));
return delta_pool
def caliculate_de_dy(self, teach_data):
# caliculate de/dy
# solved problem
if self.solved == 'fitting':
errs = -np.array(self.neural_element[self.number_of_element-1].value - teach_data)
elif self.solved == 'classification':
# softmax
if self.neural_element[self.number_of_element-1].func == nf.softmax:
errs = np.array(self.neural_element[self.number_of_element-1].value - teach_data)
# sigmoid
elif self.neural_element[self.number_of_element-1].func == nf.sigmoid:
errs = np.array(self.neural_element[self.number_of_element-1].value - teach_data)
# tanh
elif self.neural_element[self.number_of_element-1].func == nf.tanh:
pass
return errs
# learn (using input signals to reach teach signals)
# input_data : input signals, same dimeision as number of input layer's node.
# teach_data : teach signals, same dimeision as number of output layer's node.
def learn(self, input_data, teach_data):
# set input and teach signals, and caliculate output and err signals.
self.input_signals(input_data); self.teach_signals(teach_data); self.output_signals(); self.error_signals()
errs = self.caliculate_de_dy(teach_data)
delta_pool = self.caliculate_backpropergation(errs)
# update all weights
for i in range(1, self.number_of_element):
if self.name_of_element[i] == 'weight': self.neural_element[i].value -= delta_pool[i]
def batch_learn(self, input_data_set, teach_data_set, n):
'''
# set input and teach signals, and caliculate output and err signals.
self.input_signals(input_data_set[0]); self.teach_signals(teach_data_set[0]); self.output_signals(); self.error_signals()
err_pool = self.caliculate_de_dy(teach_data_set[0])
# print "err:"; print err_pool
for i in range(1, n):
self.input_signals(input_data_set[i]); self.teach_signals(teach_data_set[i]); self.output_signals(); self.error_signals()
err_pool += self.caliculate_de_dy(teach_data_set[i])
# print "Add err:"; print self.caliculate_de_dy(teach_data_set[i])
# print "AFTER err:"; print err_pool
delta_pool = self.caliculate_backpropergation(err_pool)
# update all weights
for i in range(1, self.number_of_element):
if self.name_of_element[i] == 'weight':
# print "BEFORE:"; print self.neural_element[i].value
# print "ADD :"; print delta_pool[i]
# print "ADD/n :"; print delta_pool[i] / n
self.neural_element[i].value -= delta_pool[i] / n
# self.neural_element[i].value -= delta_pool[i]
# print "AFTER :";print self.neural_element[i].value
'''
# set input and teach signals, and caliculate output and err signals.
self.input_signals(input_data_set[0]); self.teach_signals(teach_data_set[0]); self.output_signals(); self.error_signals()
err_pool = self.caliculate_de_dy(teach_data_set[0])
delta_pool = self.caliculate_backpropergation(err_pool)
# print "------delta_pool [0]------";
# for i in delta_pool:
# print i
for i in range(1, n):
self.input_signals(input_data_set[i]); self.teach_signals(teach_data_set[i]); self.output_signals(); self.error_signals()
err_pool = self.caliculate_de_dy(teach_data_set[i])
delta = self.caliculate_backpropergation(err_pool)
for i in range(0, len(delta)):
delta_pool[i] += delta[i]
# print "------Add delta_pool [%d]------" % i;
# for item in delta:
# print item
# print "------delta_pool [%d]------" % i;
# for item in delta_pool:
# print item
# update all weights
for i in range(1, self.number_of_element):
if self.name_of_element[i] == 'weight':
# print "BEFORE:"; print self.neural_element[i].value
# print "ADD :"; print delta_pool[i]
# print "ADD/n :"; print delta_pool[i] / n
self.neural_element[i].value -= delta_pool[i]
# print "AFTER :";print self.neural_element[i].value
# test (using input signals to reach teach signals)
# input_data : input signals, same dimeision as number of input layer's node.
# teach_data : teach signals, same dimeision as number of output layer's node.
def test(self, input_data, teach_data):
# set input and teach signals, and caliculate output and err signals.
self.input_signals(input_data); self.teach_signals(teach_data); self.output_signals(); self.error_signals()
def add_node(self, add_network_dims, output_funcion = 'sigmoid'):
# make number of element & layer
self.number_of_layer += len(add_network_dims);
self.number_of_element += len(add_network_dims)*2-1;
# make nodes and weights of all layers
func = ['linear'] + [output_funcion for i in range(0, len(network_dims)-1)]
for i in range(0, self.number_of_layer): self.neural_element.append(NeuroNode().make_elment([network_dims[i]], func[i]))
for i in range(0, self.number_of_layer-1): self.neural_element.insert(i*2+1, NeuroWeight().make_elment(np.array(network_dims[i:i+2])))
# elements_names presents that a element of parameter of 'neural_elements' is 'node', 'weight', 'error'.
for i in range(0, self.number_of_layer): self.name_of_element.append('node'); self.name_of_element.append('weight')
self.name_of_element.pop()
# make teach & error signals
self.teach = np.matrix([np.zeros(network_dims[len(network_dims)-1])]); self.error = np.matrix([np.zeros(network_dims[len(network_dims)-1])])
return self
def get_max_output_index(self):
return np.argmax(self.neural_element[len(self.neural_element)-1].value)
def get_min_output_index(self):
return np.argmin(self.neural_element[len(self.neural_element)-1].value)
def get_output(self):
return self.neural_element[len(self.neural_element)-1].value
def get_error(self):
return self.error
|
WaterIsland/DLStudy
|
mln/BatchLearning/Mln.py
|
Python
|
mit
| 17,689
|
[
"NEURON"
] |
eed8b300c43b1b9677b343bdc4b7f9e40637d2c97b30d1d4052153bc190e8dd7
|
try:
import freenect
except:
print "No freenect support"
import logging
import numpy as np
from threading import Thread
import time
MAXDEPTH = 1200
FRAMEBYTES = 640*480*2
class KinectLoop(Thread):
def __init__(self, server, **kwargs):
super(KinectLoop, self).__init__(**kwargs)
self.server = server
self.killfreenect = False
self.last_tilt = 0
def body(self, dev, ctx):
if self.killfreenect:
raise freenect.Kill("Killing freenect runloop")
if self.server.tilt is None or self.server.tilt == self.last_tilt:
return
else:
freenect.set_tilt_degs(dev, self.server.tilt)
self.last_tilt = self.server.tilt
def kill(self):
self.killfreenect = True
def run(self):
freenect.runloop(body=self.body, depth=createdepthhandler(self.server))
class Subject(Thread):
def __init__(self, observers=None, **kwargs):
super(Subject, self).__init__(**kwargs)
self.observers = observers if observers is not None else {}
def add_observer(self, oid, observer):
self.observers[oid] = observer
def remove_observer(self, oid):
try:
del self.observers[oid]
except KeyError:
logging.warn("Could not find observer id {0}".format(oid))
class KinectSubject(Subject):
def __init__(self, max_depth=1200, observers=None, **kwargs):
super(KinectSubject, self).__init__(observers, **kwargs)
self.max_depth = max_depth
self.killfreenect = False
self.last_tilt = 0
self.tilt = 0
self.depth = None
def body(self, dev, ctx):
if self.killfreenect:
raise freenect.Kill("Killing freenect runloop")
if self.tilt != self.last_tilt:
freenect.set_tilt_degs(dev, self.tilt)
self.last_tilt = self.tilt
def depth_handler(self, dev, depth, timestamp):
depth[np.where(depth > self.max_depth)] = self.max_depth
self.depth = bytearray(depth.flatten())
for observer in self.observers.values():
observer(self.depth)
def kill(self):
self.killfreenect = True
def run(self):
freenect.runloop(body=self.body, depth=self.depth_handler)
def set_tilt(self, degs):
self.tilt = degs
class KinectFactory(object):
KINECTSUBJECT = None
@staticmethod
def create_kinect():
if KinectFactory.KINECTSUBJECT is None:
KinectFactory.KINECTSUBJECT = KinectSubject()
if not KinectFactory.KINECTSUBJECT.isAlive():
KinectFactory.KINECTSUBJECT.start()
return KinectFactory.KINECTSUBJECT
@staticmethod
def kill():
if KinectFactory.KINECTSUBJECT is not None:
KinectFactory.KINECTSUBJECT.kill()
class KinectFile(Subject):
def __init__(self, filename, filemem=False, observers=None, **kwargs):
super(KinectFile, self).__init__(observers, **kwargs)
self.filename = filename
self.curbyte = 0
self.filemem = filemem
self.reader = open(self.filename, 'rb').read() if self.filemem else open(self.filename, 'rb')
self.killkinectfile = False
def get_depth(self):
if self.filemem:
depth = self.reader[self.curbyte:self.curbyte+FRAMEBYTES]
if not depth:
self.curbyte = 0
depth = self.reader[self.curbyte:self.curbyte+FRAMEBYTES]
self.curbyte += FRAMEBYTES
else:
depth = self.reader.read(FRAMEBYTES)
if not depth:
self.reader.seek(0)
depth = self.reader.read(FRAMEBYTES)
return bytearray(depth)
def kill(self):
self.killkinectfile = True
def run(self):
while True:
if self.killkinectfile:
try:
self.reader.close()
except AttributeError:
pass
break
depth = self.get_depth()
for observer in self.observers.values():
observer(depth)
time.sleep(1.0/30)
def createdepthhandler(server):
def handledepth(dev, depth, timestamp):
"""Flatten the 2D depth array into a 1D array, convert it into a
bytearray, and send it to the client."""
# truncate at max_depth
depth[np.where(depth > MAXDEPTH)] = MAXDEPTH
# # gaussian blur the depth values
# depth = filters.gaussian_filter(
# depth,
# sigma=3,
# mode='nearest'
# )
array1d = depth.flatten()
barray = bytearray(array1d)
server.sendMessage(barray)
return handledepth
def startloop(server):
freenect.runloop(
depth=createdepthhandler(server)
)
|
parthaa/Kimotion
|
server/input.py
|
Python
|
gpl-2.0
| 4,836
|
[
"Gaussian"
] |
5e128e6a0bfea110ec30ad2b2ae5ff4cb0a843e8e2b785f02b66d350174c6dd2
|
#
# python-ipfix (c) 2013 Brian Trammell.
#
# Many thanks to the mPlane consortium (http://www.ict-mplane.eu) for
# its material support of this effort.
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import with_statement
import xml.etree.ElementTree as etree
from warnings import warn
from . import types, ie, compat
from .compat import urlreq
from io import open
def iana_xml_to_iespec(uri = "http://www.iana.org/assignments/ipfix/ipfix.xml"):
iespecs = []
nsmap = { "iana" : "http://www.iana.org/assignments" }
res = urlreq.urlopen(uri)
root = etree.parse(res).getroot()
for recelem in root.iterfind("iana:registry[@id='ipfix-information-elements']/iana:record", nsmap):
(name, typename, num) = (None, None, None)
for fieldelem in recelem.iter():
if fieldelem.tag[-4:] == "name":
name = fieldelem.text
elif fieldelem.tag[-8:] == "dataType":
typename = fieldelem.text
elif fieldelem.tag[-9:] == "elementId":
num = fieldelem.text
if name and typename and num:
try:
ietype = types.for_name(typename)
iespecs.append("%s(%u)<%s>[%u]" % (name, int(num), ietype.name, ietype.length))
except:
pass
return iespecs
def reverse_iespec(iespec):
(name, pen, num, typename, length) = ie.parse_spec(iespec)
revname = "reverse" + name[0].capitalize() + name[1:]
if pen:
num |= 0x4000
else:
pen = 29305
return "%s(%u/%u)<%s>[%u]" % (revname, pen, num, typename, length)
def write_specfile(filename, iespecs):
with open(filename, "w") as f:
for spec in iespecs:
f.write(spec)
f.write("\n")
|
britram/python-ipfix
|
ipfix/ieutils.py
|
Python
|
lgpl-3.0
| 2,386
|
[
"Brian"
] |
53e2f4eb5a57865d11444d847fb1e0062142c690d88b25f3e428cfe6ae860fd2
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRbgl(RPackage):
"""An interface to the BOOST graph library
A fairly extensive and comprehensive interface to the graph algorithms
contained in the BOOST library."""
homepage = "https://bioconductor.org/packages/RBGL"
git = "https://git.bioconductor.org/packages/RBGL.git"
version('1.66.0', commit='bf0c111dbc231de6d3423c28e115b54fb010e1ea')
version('1.60.0', commit='ef24c17c411659b8f030602bd9781c534d6ec93b')
version('1.58.2', commit='086ad0c6bab7be29311b6ae14fd39df7a21331a6')
version('1.56.0', commit='a1fa9d89c6a3401892c5dd1493df6a14031f0912')
version('1.54.0', commit='e9c743d380e83c155495cb8732102f01f213c905')
version('1.52.0', commit='93e8fcfafec8f1cd5638fe30dc0f9506d15b49c0')
depends_on('r-graph', type=('build', 'run'))
depends_on('r-bh', when='@1.60.0:', type=('build', 'run'))
|
LLNL/spack
|
var/spack/repos/builtin/packages/r-rbgl/package.py
|
Python
|
lgpl-2.1
| 1,084
|
[
"Bioconductor"
] |
f24660b58fa3160a7cca110765f3cdc1f716d1a801969aff69c3ad507b86986c
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
import socket
import time
import os
try:
import selectors
except ImportError:
import selectors2 as selectors
from DIRAC.Core.DISET.private.Transports.BaseTransport import BaseTransport
from DIRAC.FrameworkSystem.Client.Logger import gLogger
from DIRAC.Core.Utilities.ReturnValues import S_ERROR, S_OK
class PlainTransport(BaseTransport):
def initAsClient(self):
timeout = None
if "timeout" in self.extraArgsDict:
timeout = self.extraArgsDict["timeout"]
try:
self.oSocket = socket.create_connection(self.stServerAddress, timeout)
except socket.error as e:
if e.args[0] != 115:
return S_ERROR("Can't connect: %s" % str(e))
# Connect in progress
sel = selectors.DefaultSelector()
sel.register(self.oSocket, selectors.EVENT_READ)
if not sel.select(timeout=self.extraArgsDict["timeout"]):
self.oSocket.close()
return S_ERROR("Connection timeout")
errno = self.oSocket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if errno != 0:
return S_ERROR("Can't connect: %s" % str((errno, os.strerror(errno))))
self.remoteAddress = self.oSocket.getpeername()
return S_OK(self.oSocket)
def initAsServer(self):
if not self.serverMode():
raise RuntimeError("Must be initialized as server mode")
try:
self.oSocket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
except socket.error:
# IPv6 is probably disabled on this node, try IPv4 only instead
self.oSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.bAllowReuseAddress:
self.oSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.oSocket.bind(self.stServerAddress)
self.oSocket.listen(self.iListenQueueSize)
return S_OK(self.oSocket)
def close(self):
gLogger.debug("Closing socket")
try:
self.oSocket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
self.oSocket.close()
def setClientSocket(self, oSocket):
if self.serverMode():
raise RuntimeError("Mustbe initialized as client mode")
self.oSocket = oSocket
if "timeout" in self.extraArgsDict:
self.oSocket.settimeout(self.extraArgsDict["timeout"])
self.remoteAddress = self.oSocket.getpeername()
def acceptConnection(self):
# HACK: Was = PlainTransport( self )
oClientTransport = PlainTransport(self.stServerAddress)
oClientSocket, stClientAddress = self.oSocket.accept()
oClientTransport.setClientSocket(oClientSocket)
return S_OK(oClientTransport)
def _read(self, bufSize=4096, skipReadyCheck=False):
start = time.time()
timeout = False
if "timeout" in self.extraArgsDict:
timeout = self.extraArgsDict["timeout"]
while True:
if timeout:
if time.time() - start > timeout:
return S_ERROR("Socket read timeout exceeded")
try:
data = self.oSocket.recv(bufSize)
return S_OK(data)
except socket.error as e:
if e.errno == 11:
time.sleep(0.001)
else:
return S_ERROR("Exception while reading from peer: %s" % str(e))
except Exception as e:
return S_ERROR("Exception while reading from peer: %s" % str(e))
def _write(self, buf):
sentBytes = 0
timeout = False
if "timeout" in self.extraArgsDict:
timeout = self.extraArgsDict["timeout"]
if timeout:
start = time.time()
while sentBytes < len(buf):
try:
if timeout:
if time.time() - start > timeout:
return S_ERROR("Socket write timeout exceeded")
sent = self.oSocket.send(buf[sentBytes:])
if sent == 0:
return S_ERROR("Connection closed by peer")
if sent > 0:
sentBytes += sent
except socket.error as e:
if e.errno == 11:
time.sleep(0.001)
else:
return S_ERROR("Exception while sending to peer: %s" % str(e))
except Exception as e:
return S_ERROR("Error while sending: %s" % str(e))
return S_OK(sentBytes)
def checkSanity(*args, **kwargs):
return S_OK({})
def delegate(delegationRequest, kwargs):
"""
Check delegate!
"""
return S_OK()
|
ic-hep/DIRAC
|
src/DIRAC/Core/DISET/private/Transports/PlainTransport.py
|
Python
|
gpl-3.0
| 4,874
|
[
"DIRAC"
] |
ec14b14655e5cbc0b167e37389a76dc5c508578bc02ab9fd923fef0ef5fcfd5d
|
"""
Calculated formation energies for inorganic crystals from Materials Project.
"""
import os
import deepchem as dc
from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader
from deepchem.data import Dataset
from typing import List, Optional, Tuple, Union
MPFORME_URL = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/mp_formation_energy.tar.gz'
MPFORME_TASKS = ['formation_energy']
class _MPFormationLoader(_MolnetLoader):
def create_dataset(self) -> Dataset:
dataset_file = os.path.join(self.data_dir, 'mp_formation_energy.json')
targz_file = os.path.join(self.data_dir, 'mp_formation_energy.tar.gz')
if not os.path.exists(dataset_file):
if not os.path.exists(targz_file):
dc.utils.data_utils.download_url(
url=MPFORME_URL, dest_dir=self.data_dir)
dc.utils.data_utils.untargz_file(targz_file, self.data_dir)
loader = dc.data.JsonLoader(
tasks=self.tasks,
feature_field="structure",
label_field="formation_energy",
featurizer=self.featurizer)
return loader.create_dataset(dataset_file)
def load_mp_formation_energy(
featurizer: Union[dc.feat.Featurizer, str] = dc.feat.SineCoulombMatrix(),
splitter: Union[dc.splits.Splitter, str, None] = 'random',
transformers: List[Union[TransformerGenerator, str]] = ['normalization'],
reload: bool = True,
data_dir: Optional[str] = None,
save_dir: Optional[str] = None,
**kwargs
) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]:
"""Load mp formation energy dataset.
Contains 132752 calculated formation energies and inorganic
crystal structures from the Materials Project database. In benchmark
studies, random forest models achieved a mean average error of
0.116 eV/atom during five-folded nested cross validation on this
dataset.
For more details on the dataset see [1]_. For more details
on previous benchmarks for this dataset, see [2]_.
Parameters
----------
featurizer: Featurizer or str
the featurizer to use for processing the data. Alternatively you can pass
one of the names from dc.molnet.featurizers as a shortcut.
splitter: Splitter or str
the splitter to use for splitting the data into training, validation, and
test sets. Alternatively you can pass one of the names from
dc.molnet.splitters as a shortcut. If this is None, all the data
will be included in a single dataset.
transformers: list of TransformerGenerators or strings
the Transformers to apply to the data. Each one is specified by a
TransformerGenerator or, as a shortcut, one of the names from
dc.molnet.transformers.
reload: bool
if True, the first call for a particular featurizer and splitter will cache
the datasets to disk, and subsequent calls will reload the cached datasets.
data_dir: str
a directory to save the raw data in
save_dir: str
a directory to save the dataset in
Returns
-------
tasks, datasets, transformers : tuple
tasks : list
Column names corresponding to machine learning target variables.
datasets : tuple
train, validation, test splits of data as
``deepchem.data.datasets.Dataset`` instances.
transformers : list
``deepchem.trans.transformers.Transformer`` instances applied
to dataset.
References
----------
.. [1] A. Jain*, S.P. Ong*, et al. (*=equal contributions) The Materials Project:
A materials genome approach to accelerating materials innovation APL Materials,
2013, 1(1), 011002. doi:10.1063/1.4812323 (2013).
.. [2] Dunn, A. et al. "Benchmarking Materials Property Prediction Methods: The Matbench
Test Set and Automatminer Reference Algorithm." https://arxiv.org/abs/2005.00707 (2020)
Examples
--------
>>>
>> import deepchem as dc
>> tasks, datasets, transformers = dc.molnet.load_mp_formation_energy()
>> train_dataset, val_dataset, test_dataset = datasets
>> n_tasks = len(tasks)
>> n_features = train_dataset.get_data_shape()[0]
>> model = dc.models.MultitaskRegressor(n_tasks, n_features)
"""
loader = _MPFormationLoader(featurizer, splitter, transformers, MPFORME_TASKS,
data_dir, save_dir, **kwargs)
return loader.load_dataset('mp-forme', reload)
|
deepchem/deepchem
|
deepchem/molnet/load_function/material_datasets/load_mp_formation_energy.py
|
Python
|
mit
| 4,318
|
[
"CRYSTAL"
] |
3ac43596436f4a7898ea485eac86d8123a09dce877404eb6c7285aa599f54315
|
#!/usr/bin/python
# Copyright 2007-2011 David Michael Bryson, all rights reserved.
# http://programerror.com/software/testrunner
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of David Michael Bryson, nor the names of contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY DAVID MICHAEL BRYSON AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL DAVID MICHAEL BRYSON OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: David M. Bryson <david@programerror.com>, Brian Baer <baerb@msu.edu>
#
import ConfigParser
import difflib
import dircache
import fnmatch
import getopt
import os
import shutil
import string
import subprocess
import sys
import tempfile
import threading
import time
import xml.dom.minidom
# This file has been formatted for editors/terminals 125 characters wide
# Global Constants
# ---------------------------------------------------------------------------------------------------------------------------
TESTRUNNER_VERSION = "2.0"
TESTRUNNER_COPYRIGHT = "2007-2011"
TRUE_STRINGS = ("y","Y","yes","Yes","true","True","1")
RESAVAIL = True
# -- Directory/Filename Definitions
CONFIGDIR = "config"
EXPECTDIR = "expected"
PERFDIR = "perf~" # subversion, by default, ignores files/dirs with ~ at the end
TEST_LIST = "test_list"
PERF_BASE = "baseline"
EXPECTED_IGNORE = (".gitignore",)
# Global Variables
# ---------------------------------------------------------------------------------------------------------------------------
settings = {} # {string:string}
tmpdir = None # string
# Conditional Imports
# ---------------------------------------------------------------------------------------------------------------------------
try:
import resource
except ImportError:
RESAVAIL = False
# Global Helper Functions
# ---------------------------------------------------------------------------------------------------------------------------
# void usage() {
def usage():
global settings
usagestr = """
Usage: %(_testrunner_name)s [options] [testname ...]
Runs the specified tests. Test names can be unix-style globbing patterns.
If no tests are specified all available tests will be run and new expected
results generated, where applicable.
Options:
--builddir=dir [%(builddir)s]
Set the path to the build directory.
-f | --force-perf
Force active tests to be treated as peformance tests, regardless of
individual test configuration.
-h | --help
Display this message
--help-test-cfg
Display a sample test configuration file
--ignore-consistency
Ignore consistency tests altogether. Performance results are not saved.
Valid only when used in conjunction with -p or --run-perf-tests.
-j number [%(cpus)d]
Set the number of concurrent tests to run. i.e. - the number of CPUs
that are available.
-l | --list-tests
List all available tests and exits. Tests that will require new
expected results will have an asterisk preceeding the name.
--long-tests
Run tests that have been marked as 'long' running.
--mode=option [%(mode)s]
Set the test runner mode. Options are 'local' and 'slave'.
Local mode generates expected results and adds them to the repository,
if enabled has been found. Slave mode disables expected results generation.
--scm=option [%(scm)s]
Set the source code management system.
Options are 'git', 'svn', and 'none'.
None disables all automatic source code management operations.
-p | --run-perf-tests
Run available performance tests.
--reset-expected
Reset the expected results for active consistency tests
--reset-perf-base
Reset performance test baseline results. Old baseline results are
saved in the 'perf' directory.
--show-diff
Show full file diff for failed consistency tests.
--diff-max-threshold=option [1536]
Threshold size, in bytes, at which testrunner will no longer
calculate detailed diffs. Prevents diff hangups.
--skip-tests
Do not run tests. Only generate new results, where applicable.
-g path | --git=path [%(git)s]
Set the path to the Git command line utility.
-s path | --svn=path [%(svn)s]
Set the path to the Subversion command line utility.
--svnversion=path [%(svnversion)s]
Set the path to the Subversion 'svnversion' command line utility.
--svnmetadir=dir [%(svnmetadir)s]
Set the name of the Subversion metadata directory.
--testdir=dir [%(testdir)s]
Set the path to the directory containing tests.
-v | --verbose
Enable verbose output, showing all test output.
--version
Show version information.
""" % settings
print usagestr
# } // End of usage()
# void sample_test_list() {
def sample_test_list():
global settings, TEST_LIST
test_list = ";--- Begin Test Configuration File (%s) ---" % TEST_LIST
test_list += """
[main]
; Command line arguments to pass to the application
args =
app = %(default_app)s ; Application path to test
nonzeroexit = disallow ; Exit code handling (disallow, allow, or require)
; disallow - treat non-zero exit codes as failures
; allow - all exit codes are acceptable
; require - treat zero exit codes as failures, useful
; for creating tests for app error checking
createdby = ; Who created the test
email = ; Email address for the test's creator
[consistency]
enabled = yes ; Is this test a consistency test?
long = no ; Is this test a long test?
[performance]
enabled = no ; Is this test a performance test?
long = no ; Is this test a long test?
; The following variables can be used in constructing setting values by calling
; them with %(variable_name)s. For example see 'app' above.
;
"""
sk = settings.keys()
sk.sort()
for set in sk:
if set[0] != "_":
test_list += "; %s \n" % set
test_list += ";--- End Test Configuration File ---"
print test_list
# } // End of sample_test_list()
# void version() {
def version():
global TESTRUNNER_VERSION, TESTRUNNER_COPYRIGHT
versionstr = """
TestRunner v%s
Copyright %s David Michael Bryson, all rights reserved.
This software is open source, subject to certain conditions.
See the supplied license for details.
http://www.programerror.com/software/testrunner
""" % (TESTRUNNER_VERSION, TESTRUNNER_COPYRIGHT)
print versionstr
# } // End of version()
# // Calculate the median of a sequence
# int med(int[] seq) {
def med(seq):
seq.sort()
idx = len(seq) / 2
if len(seq) % 2 == 1: return seq[idx]
else: return (seq[idx] + seq[idx - 1]) / 2
# } // End of med()
class SCMWrapper_Git:
def __init__(self):
global settings
self.cmd = settings["git"]
self.submodule = settings["git_submodule"]
print self.submodule
def goSubmodule(self):
if self.submodule != "":
os.chdir(self.submodule)
def retSubmodule(self):
if self.submodule != "":
os.chdir("..")
def getVersionString(self, path):
self.goSubmodule()
rev = "exported"
try:
gverp = os.popen("%s describe" % (self.cmd))
rev = gverp.readline().strip()
gverp.close()
if rev == "": rev = "exported"
except (IOError, OSError): pass
self.retSubmodule();
return rev
def deleteMetadata(self, path): pass
def removeMetadataFromDirList(self, dirs): pass
def addDirectory(self, dir):
self.goSubmodule()
open(os.path.join(dir, ".gitignore"), "a").close()
ecode = os.spawnlp(os.P_WAIT, self.cmd, self.cmd, "add", dir)
if ecode != 0:
self.retSubmodule()
return False
self.retSubmodule()
return True
def removeDirectory(self, dir):
self.goSubmodule()
ecode = os.spawnlp(os.P_WAIT, self.cmd, self.cmd, "rm", "-r", dir)
if ecode != 0:
self.retSubmodule()
return False
if os.path.exists(dir):
self.retSubmodule()
return False
self.retSubmodule()
return True
class SCMWrapper_SVN:
def __init__(self):
global settings
self.cmd = settings["svn"]
self.version = settings["svnversion"]
self.metadir = settings["svnmetadir"]
if not os.path.exists(self.metadir):
print "Warning: Current directory does not appear to be a SVN working copy"
def getVersionString(self, path):
rev = "exported"
try:
sverp = os.popen("cd %s; %s" % (path, self.version))
rev = sverp.readline().strip()
sverp.close()
if rev == "": rev = "exported"
except (IOError, OSError): pass
return rev
def deleteMetadata(self, path):
# Remove copied svn metadata directories
for root, dirs, files in os.walk(path):
if svnmetadir in dirs: dirs.remove(self.metadir)
try:
shutil.rmtree(os.path.join(root, self.metadir))
except (IOError, OSError): pass
def removeMetadataFromDirList(self, dirs):
if self.metadir in dirs: dirs.remove(self.metadir)
def addDirectory(self, dir):
ecode = os.spawnlp(os.P_WAIT, self.cmd, self.cmd, "add", dir)
if ecode != 0: return False
return True
def removeDirectory(self, dir):
print "Error: SVN does not currently support directory removal"
return False
class SCMWrapper_None:
def __init__(self): pass
def getVersionString(self, path): return "exported"
def deleteMetadata(self, path): pass
def removeMetadataFromDirList(self, dirs): pass
def addDirectory(self, dir): return True
def removeDirectory(self, dir):
try:
shutil.rmtree(dir, True)
except (IOError, OSError): return False
return True
# Main Test Class - does the actual work for performing individual tests and reporting results
# ---------------------------------------------------------------------------------------------------------------------------
# class cTest {
class cTest:
NOTFOUND = "file not found"
DONOTMATCH = "content does not match"
# cTest::cTest(string name, string tdir) {
def __init__(self, name, tdir):
global settings, TRUE_STRINGS, RESAVAIL, EXPECTDIR, PERFDIR, TEST_LIST, PERF_BASE
self.name = name
self.tdir = tdir
self.scm = settings["scm"]
if settings.has_key("skip-tests"): self.skip = True
else: self.skip = False
self.cfg = ConfigParser.ConfigParser(settings)
self.cfg.read([os.path.join(tdir, TEST_LIST)])
expectdir = os.path.join(tdir, EXPECTDIR)
if os.path.exists(expectdir) and os.path.isdir(expectdir): self.has_expected = True
else: self.has_expected = False
perfdir = os.path.join(tdir, PERFDIR)
if os.path.exists(perfdir) and os.path.isdir(perfdir) and os.path.isfile(os.path.join(perfdir, PERF_BASE)):
self.has_perf_base = True
else: self.has_perf_base = False
if self.has_perf_base and settings.has_key("_reset_perf_base"):
try:
rev = self.scm.getVersionString(self.tdir)
oname = "perf-%s-reset-%s" % (time.strftime("%Y-%m-%d-%H.%M.%S"), rev)
shutil.move(os.path.join(perfdir, PERF_BASE), os.path.join(perfdir, oname))
print "%s : performance baseline reset" % name
except (IOError, OSError, shutil.Error): pass
# Load the App for the test and check that it exists
try:
self.app = self.cfg.get('main', 'app', False, settings)
except:
self.app = settings['default_app']
self.app = os.path.abspath(self.app)
if not os.path.exists(self.app):
print "Error: Application (%s) not found" % self.app
sys.exit(-1)
if not os.path.isfile(self.app):
print "Error: Application (%s) is not a file" % self.app
sys.exit(-1)
self.args = self.getConfig("main", "args", "")
if self.getConfig("consistency", "enabled", "yes") in TRUE_STRINGS: self.consistency_enabled = True
else: self.consistency_enabled = False
if self.getConfig("performance", "enabled", "no") in TRUE_STRINGS and RESAVAIL: self.performance_enabled = True
else: self.performance_enabled = False
self.success = True
self.result = "passed"
self.disabled = False
self.exitcode = 0
self.errors = []
self.psuccess = True
self.pdisabled = False
self.presult = "passed"
# } // End of cTest::cTest()
# string cTest::getConfig(string sect, string opt, string default)
def getConfig(self, sect, opt, default):
global settings
try:
return self.cfg.get(sect, opt, False, settings)
except:
return default
# } // End of cTest::getConfig()
def getName(self): return self.name
# bool cTest::isConsistencyTest() {
def isConsistencyTest(self): return self.consistency_enabled
# } // End of isConsistencyTest()
# bool cTest::wasConsistencySkipped() {
def wasConsistencySkipped(self): return self.disabled
# } // End of wasConsistencySkipped()
# bool cTest::isPerformanceTest() {
def isPerformanceTest(self): return self.performance_enabled
# } // End of isPerformanceTest()
# bool cTest::wasPerformanceSkipped() {
def wasPerformanceSkipped(self): return self.pdisabled
# } // End of wasPerformanceSkipped()
# void cTest::runConsistencyTest() {
def runConsistencyTest(self, dolongtest):
global settings, tmpdir, CONFIGDIR, EXPECTDIR
confdir = os.path.join(self.tdir, CONFIGDIR)
rundir = os.path.join(tmpdir, self.name)
expectdir = os.path.join(self.tdir, EXPECTDIR)
if not self.isConsistencyTest():
self.result = "skipped (not a consistency test)"
self.disabled = True
return
# If no expected results exist and in slave mode
if not self.has_expected and settings["mode"] == "slave":
self.result = "skipped (no expected results)"
self.disabled = True
return
if settings.has_key("_reset_expected"):
if not self.scm.removeDirectory(expectdir):
print "Error: unable to remove expected directory for reset"
self.success = False
return
self.has_expected = False
if self.has_expected and self.skip:
self.result = "skipped"
self.disabled = True
return
if self.getConfig("consistency", "long", "no") in TRUE_STRINGS and not dolongtest:
self.result = "skipped (long)"
self.disabled = True
return
# Create test directory and populate with config
try:
shutil.copytree(confdir, rundir)
except (IOError, OSError), e:
print "Error: unable to create run dir"
print " -- root cause: %s" % e
self.success = False
return
self.scm.deleteMetadata(rundir)
# Run test app, capturing output and exitcode
p = subprocess.Popen("cd %s; %s %s" % (rundir, self.app, self.args), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
# Process output from app
# Note: must at least swallow app output so that the process output buffer does not fill and block execution
if settings.has_key("_verbose"): print
for line in p.stdout:
if settings.has_key("_verbose"):
sys.stdout.write("%s output: %s" % (self.name, line))
sys.stdout.flush()
self.exitcode = p.wait()
# Check exit code, depending on mode setting
nz = self.getConfig("main", "nonzeroexit", "disallow")
if (nz == "disallow" and self.exitcode != 0) or (nz == "require" and self.exitcode == 0):
self.success = False
try:
shutil.rmtree(rundir, True) # Clean up test directory
except (IOError, OSError): pass
return
# Build dictionary of config structure
confstruct = {}
for root, dirs, files in os.walk(confdir):
self.scm.removeMetadataFromDirList(dirs)
for file in files:
path = os.path.abspath(os.path.join(root, file))
key = path[len(confdir) + 1:] # remove confdir from path
confstruct[key] = path
# If no expected results exist, defer processing of new expected results to results phase
if not self.has_expected:
self.confstruct = confstruct
return
# Build dicitonary of expected structure
expectstruct = {}
for root, dirs, files in os.walk(expectdir):
self.scm.removeMetadataFromDirList(dirs)
for ifile in EXPECTED_IGNORE:
if ifile in files:
files.remove(ifile)
for file in files:
path = os.path.abspath(os.path.join(root, file))
key = path[len(expectdir) + 1:] # remove confdir from path
expectstruct[key] = [path, False, cTest.NOTFOUND]
# Traverse results, comparing with expected
for root, dirs, files in os.walk(rundir):
for file in files:
path = os.path.abspath(os.path.join(root, file))
key = path[len(rundir) + 1:] # remove confdir from path
if expectstruct.has_key(key):
# string[] getStippedLines(string filename) {
def getStrippedLines(filename):
fp = open(filename, "U")
filelines = fp.readlines()
fp.close()
retlines = []
for line in filelines:
line = string.lstrip(line)
if len(line) != 0 and line[0] != "#": retlines.append(line)
return retlines
# } // End of getStrippedLines()
match = True
if os.path.getsize(expectstruct[key][0]) < settings['diff-max-threshold']:
# Generate the diff between the two files, ignoring comments and blank lines
differ = difflib.Differ()
elines = getStrippedLines(path)
tlines = getStrippedLines(expectstruct[key][0])
diff = list(differ.compare(tlines, elines))
for line in diff:
if line[0] != ' ':
expectstruct[key][2] = cTest.DONOTMATCH
if settings.has_key("show-diff"):
expectstruct[key][2] += "\n\n"
for l in diff: expectstruct[key][2] += l
expectstruct[key][2] += "\n"
match = False
break
else:
elines = getStrippedLines(path)
tlines = getStrippedLines(expectstruct[key][0])
for i in range(len(elines)):
if elines[i] != tlines[i]:
expectstruct[key][2] = cTest.DONOTMATCH
match = False
break
expectstruct[key][1] = match
for key in expectstruct.keys():
entry = expectstruct[key]
if not entry[1]:
self.errors.append("%s : %s" % (key, entry[2]))
self.success = False
# Clean up test directory
try:
shutil.rmtree(rundir, True)
except (IOError, OSError): pass
# } // End of cTest::runConsistencyTest()
# void cTest::runPerformanceTest() {
def runPerformanceTest(self, dolongtest, saveresults):
global settings, tmpdir, CONFIGDIR, PERFDIR, TRUE_STRINGS, PERF_BASE
if self.has_perf_base and self.skip:
self.presult = "skipped"
self.pdisabled = True
return
if self.getConfig("performance", "long", "no") in TRUE_STRINGS and not dolongtest:
self.presult = "skipped (long)"
self.pdisabled = True
return
confdir = os.path.join(self.tdir, CONFIGDIR)
rundir = os.path.join(tmpdir, self.name)
perfdir = os.path.join(self.tdir, PERFDIR)
# Create test directory and populate with config
try:
shutil.copytree(confdir, rundir)
except (IOError, OSError):
self.psuccess = False
self.presult = "error occurred creating run directory"
return
self.scm.deleteMetadata(rundir)
# Run test X times, take min value
nz = self.getConfig("main", "nonzeroexit", "disallow")
r_times = []
t_times = []
for i in range(settings["perf_repeat"]):
t_start = time.time()
res_start = resource.getrusage(resource.RUSAGE_CHILDREN)
# Run test app, capturing output and exitcode
p = subprocess.Popen("cd %s; %s %s" % (rundir, self.app, self.args), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
for line in p.stdout: pass
exitcode = p.wait()
res_end = resource.getrusage(resource.RUSAGE_CHILDREN)
t_end = time.time()
# Check exit code
if (nz == "disallow" and exitcode != 0) or (nz == "require" and exitcode == 0):
try:
shutil.rmtree(rundir, True) # Clean up test directory
except (IOError, OSError): pass
self.psuccess = False
self.presult = "test app returned non-zero exit code"
return
r_times.append(res_end.ru_utime - res_start.ru_utime)
t_times.append(t_end - t_start)
# Load baseline results
r_base = 0.0
t_base = 0.0
basepath = os.path.join(perfdir, PERF_BASE)
if self.has_perf_base:
try:
fp = open(basepath, "r")
line = fp.readline()
vals = line.split(',')
r_base = float(vals[0].strip())
t_base = float(vals[4].strip())
fp.close()
except (IOError):
self.has_perf_base = False
r_min = min(r_times)
r_max = max(r_times)
r_ave = sum(r_times) / len(r_times)
r_med = med(r_times)
t_min = min(t_times)
t_max = max(t_times)
t_ave = sum(t_times) / len(t_times)
t_med = med(t_times)
# If no baseline results exist, write out results
if not self.has_perf_base:
if saveresults:
try:
if not os.path.exists(perfdir):
os.mkdir(perfdir)
if not os.path.isdir(perfdir):
try:
shutil.rmtree(rundir, True) # Clean up test directory
except (IOError, OSError): pass
self.psuccess = False
self.presult = "unable to write out baseline, file exists"
return
fp = open(basepath, "w")
fp.write("%f,%f,%f,%f,%f,%f,%f,%f\n" % (r_min, r_max, r_ave, r_med, t_min, t_max, t_ave, t_med))
fp.flush()
fp.close()
except (IOError):
try:
shutil.rmtree(rundir, True) # Clean up test directory
except (IOError, OSError): pass
self.psuccess = False
self.presult = "error occurred writing baseline results"
return
self.presult = "new baseline - wall time: %3.4f user time: %3.4f" % (t_min, r_min)
else:
self.presult = "*unsaved* baseline - wall time: %3.4f user time: %3.4f" % (t_min, r_min)
try:
shutil.rmtree(rundir, True) # Clean up test directory
except (IOError, OSError): pass
return
# Compare results with baseline
r_margin = settings["perf_user_margin"] * r_base
r_umargin = r_base + r_margin
r_lmargin = r_base - r_margin
r_ratio = r_min / r_base
t_margin = settings["perf_wall_margin"] * t_base
t_umargin = t_base + t_margin
t_lmargin = t_base - t_margin
t_ratio = t_min / t_base
if r_min > r_umargin or t_min > t_umargin:
self.psuccess = False
self.presult = "failed"
elif r_min < r_lmargin or t_min < t_lmargin:
if saveresults:
# new baseline, move old baseline and write out new results
try:
rev = self.scm.getVersionString(self.tdir)
oname = "perf-%s-prev-%s" % (time.strftime("%Y-%m-%d-%H.%M.%S"), rev)
shutil.move(basepath, os.path.join(perfdir, oname))
fp = open(basepath, "w")
fp.write("%f,%f,%f,%f,%f,%f,%f,%f\n" % (r_min, r_max, r_ave, r_med, t_min, t_max, t_ave, t_med))
fp.flush()
fp.close()
except (IOError, OSError, shutil.Error):
print "Warning: error updating '%s' performance baseline" % self.name
self.presult = "exceeded"
# Print output on all tests
self.presult += "\n - wall: %2.2f base = %3.4f test = %3.4f" % (t_ratio, t_base, t_min)
self.presult += "\n - user: %2.2f base = %3.4f test = %3.4f" % (r_ratio, r_base, r_min)
# Clean up test directory
try:
shutil.rmtree(rundir, True)
except (IOError, OSError): pass
# } // End of cTest::runPerformanceTest()
# bool cTest::handleNewExpected() {
def handleNewExpected(self):
global settings, EXPECTDIR
if settings["mode"] == "slave": return True
rundir = os.path.join(tmpdir, self.name)
expectdir = os.path.join(self.tdir, EXPECTDIR)
try:
shutil.copytree(rundir, expectdir)
except (IOError, OSError), e:
return False
for cfile in self.confstruct.keys():
try:
os.remove(os.path.join(expectdir, cfile))
except OSError, e:
print "Warning: failed to remove conf file (%s) from expected" % cfile
print " -- root cause: %s" % e
try:
shutil.rmtree(rundir, True) # Clean up test directory
except (IOError, OSError): pass
return self.scm.addDirectory(expectdir)
# } // End of cTest::handleNewExpected()
# bool cTest::reportConsistencyResults() {
def reportConsistencyResults(self):
global settings
print "%s :" % self.name,
if self.success:
if self.has_expected:
print self.result
else:
if self.handleNewExpected():
if settings["mode"] == "slave":
print "skipped (no expected results)"
else:
print "new expected results generated"
else:
print "unable to process new expected results"
self.success = False
else:
print "failed\n"
if self.exitcode != 0:
print "exit code: %d" % os.WEXITSTATUS(self.exitcode)
print "term signal: %d" % os.WTERMSIG(self.exitcode)
else:
print "output variance(s):"
for err in self.errors: print err
print "\n"
return self.success
# } // End of cTest::reportConsistencyResults()
# bool cTest::getConsistencyResults() {
def getConsistencyResults(self):
global settings
message = ""
if self.success:
message = self.result
else:
message = "failed\n"
if self.exitcode != 0:
message += "exit code: %d\n" % os.WEXITSTATUS(self.exitcode)
message += "term signal: %d\n" % os.WTERMSIG(self.exitcode)
else:
message += "output variance(s):\n"
for err in self.errors: message += err + "\n"
return (self.success, message)
# } // End of cTest::getConsistencyResults()
# bool cTest::reportPerformanceResults() {
def reportPerformanceResults(self):
print "%s : %s" % (self.name, self.presult)
return self.psuccess
# } // End of cTest::reportPerformanceResults()
# bool cTest::getPerformanceResults() {
def getPerformanceResults(self):
return (self.psuccess, self.presult)
# } // End of cTest::getPerformanceResults()
# void cTest::describe() {
def describe(self):
if self.has_expected: print " ",
else: print " *",
if self.consistency_enabled: print "c",
else: print " ",
if self.performance_enabled: print "p ",
else: print " ",
print self.name
# } // End of cTest::describe()
# } // End of class cTest
# Main Test Running Functions - subroutines called by main to do the testing work
# ---------------------------------------------------------------------------------------------------------------------------
# (int, int) runConsistencyTests(cTest[] tests) {
def runConsistencyTests(alltests, dolongtests):
global settings, tmpdir
tests = []
for test in alltests:
if test.isConsistencyTest(): tests.append(test)
if len(tests) == 0:
print "No Consistency Tests Available (or Specified)."
return (0, 0)
print "\nRunning Consistency Tests:\n"
# Run Tests
sem = threading.BoundedSemaphore(settings["cpus"])
ti = 0
for test in tests:
# void runTestWrapper(cTest test, Semaphore sem) {
def runTestWrapper(test, sem):
test.runConsistencyTest(dolongtests)
sem.release()
# } // End of runTestWrapper()
sem.acquire()
ti += 1
sys.stdout.write("[% 3d of %d] %s\n" % (ti, len(tests), test.name[:65]))
sys.stdout.flush()
tthread = threading.Thread(target=runTestWrapper, args=(test, sem))
tthread.start()
for i in range(settings["cpus"]): sem.acquire()
sys.stdout.write("\n\n")
sys.stdout.flush()
# Report Results
success = 0
disabled = 0
fail = 0
for test in tests:
if test.reportConsistencyResults():
if test.wasConsistencySkipped():
disabled += 1
else:
success += 1
else: fail += 1
return (success, disabled, fail)
# } // End of runConsistencyTests()
# (int, int) runPerformanceTests(cTest[] tests) {
def runPerformanceTests(alltests, dolongtests, force, saveresults):
global settings, tmpdir
tests = []
if force: tests = alltests
else:
for test in alltests:
if test.isPerformanceTest(): tests.append(test)
if len(tests) == 0:
print "No Performance Tests Available (or Specified)."
return (0, 0)
print "\nRunning Performance Tests:\n"
# Run Tests
ti = 0
for test in tests:
ti += 1
sys.stdout.write("[% 3d of %d] %s\n" % (ti, len(tests), test.name[:65]))
sys.stdout.flush()
test.runPerformanceTest(dolongtests, saveresults)
sys.stdout.write("\n\n")
sys.stdout.flush()
# Report Results
success = 0
disabled = 0
fail = 0
for test in tests:
if test.reportPerformanceResults():
if test.wasPerformanceSkipped():
disabled += 1
else:
success += 1
else: fail += 1
return (success, disabled, fail)
# } // End of runPerformanceTests()
# Main - load configuration and call the workhorse routines
# ---------------------------------------------------------------------------------------------------------------------------
# int main(string[] argv) {
def main(argv):
global settings, tmpdir, CONFIGDIR
scriptdir = os.path.abspath(os.path.dirname(argv[0]))
# Read Configuration File
cfg = ConfigParser.ConfigParser(settings)
cfg.read([os.path.join(scriptdir, "testrunner.cfg")])
# getConfig - embedded function to wrap loading configuration settings w/defaults
# -------------------------------------------------------------------------------
# string getConfig(string sect, string opt, string default) {
def getConfig(sect, opt, default):
try:
global settings
val = cfg.get(sect, opt, False, settings)
return val
except:
return default
# } // End of getConfig()
# Setup Global Settings
# - settings that begin with an underscore (i.e. _testrunner_name) are for internal use and are not intended for
# use as variables in test_list configuration files
settings["default_app"] = "" # App is defined later, since values like builddir can be modified by cmdline settings
settings["builddir"] = getConfig("testrunner", "builddir", "build")
settings["mode"] = getConfig("testrunner", "mode", "local")
settings["scm"] = getConfig("testrunner", "scm", "none")
settings["git"] = getConfig("git", "cmd", "git")
settings["git_submodule"] = getConfig("git", "submodule", "")
settings["svn"] = getConfig("svn", "cmd", "svn")
settings["svnversion"] = getConfig("svn", "svnversion", "svnversion")
settings["svnmetadir"] = getConfig("svn", "metadir", ".svn")
settings["testdir"] = getConfig("testrunner", "testdir", "tests")
settings["_testrunner_name"] = "testrunner.py"
settings["perf_user_margin"] = float(getConfig("performance","usermargin",.05))
settings["perf_wall_margin"] = float(getConfig("performance","wallmargin",.05))
settings["perf_repeat"] = int(getConfig("performance","repeat",5))
settings["cpus"] = 1
settings["diff-max-threshold"] = 1536
# Process Command Line Arguments
try:
opts, args = getopt.getopt(argv[1:], "fhj:lm:pg:s:v", \
["diff-max-threshold=","builddir=", "force-perf", "help", "help-test-cfg", "ignore-consistency", "list-tests", "long-tests", \
"mode=", "scm=", "reset-expected", "reset-perf-base", "run-perf-tests", "show-diff", "skip-tests", "git=", "svnmetadir=", "svn=", "svnversion=", \
"testdir=", "verbose", "version", "xml-report=", "-testrunner-name="])
except getopt.GetoptError:
usage()
return -1
# Define Option Flags
opt_forceperf = False
opt_ignoreconsistency = False
opt_listtests = False
opt_long = False
opt_runperf = False
opt_showhelp = False
opt_showtestcfg = False
opt_showversion = False
# Process Supplied Options
for opt, arg in opts:
if opt in ("-h", "--help"):
opt_showhelp = True
elif opt == "--builddir":
settings["builddir"] = arg
elif opt == "--diff-max-threshold":
settings["diff-max-threshold"] = arg
elif opt == "--help-test-cfg":
opt_showtestcfg = True
elif opt == "-j":
cpus = int(arg)
if cpus < 1: cpus = 1
settings["cpus"] = cpus
elif opt == "--scm":
settings["scm"] = arg
elif opt in ("-f", "--force-perf"):
opt_forceperf = True
elif opt == "--ignore-consistency":
opt_ignoreconsistency = True
elif opt in ("-l", "--list-tests"):
opt_listtests = True
elif opt == "--long-tests":
opt_long = True
elif opt in ("-m", "--mode"):
settings["mode"] = arg
elif opt == "--reset-expected":
settings["_reset_expected"] = ""
elif opt == "--reset-perf-base":
settings["_reset_perf_base"] = ""
elif opt in ("-p", "--run-perf-tests"):
opt_runperf = True
elif opt == "--show-diff":
settings["show-diff"] = ""
elif opt == "--skip-tests":
settings["skip-tests"] = ""
elif opt in ("-g", "--git"):
settings["git"] = arg
elif opt == "--svnmetadir":
settings["svnmetadir"] = arg
elif opt in ("-s", "--svn"):
settings["svn"] = arg
elif opt == "--svnversion":
settings["svnversion"] = arg
elif opt == "--testdir":
settings["testdir"] = arg
elif opt in ("-v", "--verbose"):
settings["_verbose"] = ""
elif opt == "--version":
opt_showversion = True
elif opt == "--xml-report":
settings["xml_report"] = arg
elif opt == "---testrunner-name":
settings["_testrunner_name"] = arg
# Show help or version and exit, if requested to do so
if opt_showhelp or opt_showtestcfg or opt_showversion:
if opt_showversion: version()
if opt_showhelp: usage()
if opt_showtestcfg: sample_test_list()
return 0
# Get the path to the test directory
testdir = os.path.abspath(getConfig("main", "testdir", "."))
settings["testdir"] = testdir
# Re-read Configuration File with filled settings
cfg = ConfigParser.ConfigParser(settings)
cfg.read([os.path.join(scriptdir, "testrunner.cfg")])
# Load the default app to test
try:
settings["default_app"] = os.path.abspath(cfg.get("main", "app"))
except:
print "Warning: No default app configured"
if settings["scm"] == "git":
settings["scm"] = SCMWrapper_Git()
elif settings["scm"] == "svn":
settings["scm"] = SCMWrapper_SVN()
elif settings["scm"] == "none":
settings["scm"] = SCMWrapper_None()
else:
print "Error: Unsupported SCM '%s'" % (settings["scm"])
return -1
# Load in all tests
print "Reading Test Configurations:\n"
tests = []
prefix_filter = ["."]
dlist = []
if len(args) != 0:
for d in dircache.listdir(testdir):
for a in args:
if fnmatch.fnmatch(d, a):
dlist.append(d)
break
else:
dlist = dircache.listdir(testdir)
prefix_filter.append("_")
dircache.annotate(testdir, dlist)
for d in dlist:
# Directories with preceeding underscore or period are ignored, as are files
if d[0] in prefix_filter or d[len(d) - 1] != "/": continue
name = d[:len(d) - 1]
curtdir = os.path.join(testdir, name)
contents = dircache.listdir(curtdir)
if CONFIGDIR in contents:
test = cTest(name, curtdir)
test.describe()
tests.append(test)
# If selected, display available tests and exit
if opt_listtests:
return 0
# Make temp directory to hold active tests
tmpdir = tempfile.mkdtemp("_testrunner")
success = 0
disabled = 0
fail = 0
try:
# Run Consistency Tests
csuccess = 0
cdisabled = 0
cfail = 0
if (not opt_runperf or not opt_ignoreconsistency):
(csuccess, cdisabled, cfail) = runConsistencyTests(tests, opt_long)
success += csuccess
disabled += cdisabled
fail += cfail
# Run Performance Tests
psuccess = 0
pdisabled = 0
pfail = 0
if (opt_ignoreconsistency or fail == 0) and opt_runperf:
(psuccess, pdisabled, pfail) = runPerformanceTests(tests, opt_long, opt_forceperf, not opt_ignoreconsistency)
success += psuccess
disabled += pdisabled
fail += pfail
if settings.has_key("xml_report"):
f = open(settings["xml_report"], "w")
f.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n")
f.write("<testsuites tests=\"%d\" failures=\"%d\" disabled=\"%d\" errors=\"0\" time=\"0\" name=\"AllTests\">\n" % (success + disabled + fail, fail, disabled))
f.write(" <testsuite name=\"ConsistencyTests\" tests=\"%d\" failures=\"%d\" disabled=\"%d\" errors=\"0\" time=\"0\">\n" % (csuccess + cdisabled + cfail, cfail, cdisabled))
for test in tests:
if test.isConsistencyTest():
(tsuccess, message) = test.getConsistencyResults()
run = "run"
if test.wasConsistencySkipped(): run = "disabled"
if not tsuccess:
f.write(" <testcase name=\"%s\" status=\"%s\" time=\"0\" classname=\"ConsistencyTests\">\n" % (test.getName(), run))
f.write(" <failure message=\"%s\" type=\"\"/>\n" % (message))
f.write(" </testcase>\n")
else:
f.write(" <testcase name=\"%s\" status=\"%s\" time=\"0\" classname=\"ConsistencyTests\"/>\n" % (test.getName(), run))
f.write(" </testsuite>\n");
if (opt_ignoreconsistency or fail == 0) and opt_runperf:
f.write(" <testsuite name=\"PerformanceTests\" tests=\"%d\" failures=\"%d\" disabled=\"%d\" errors=\"0\" time=\"0\">\n" % (psuccess + pdisabled + pfail, pfail, pdisabled))
for test in tests:
if test.isPerformanceTest():
(tsuccess, message) = test.getPerformanceResults()
run = "run"
if test.wasPerformanceSkipped(): run = "disabled"
if not tsuccess:
f.write(" <testcase name=\"%s\" status=\"%s\" time=\"0\" classname=\"ConsistencyTests\">\n" % (test.getName(), run))
f.write(" <failure message=\"%s\" type=\"\"/>\n" % (message))
f.write(" </testcase>\n")
else:
f.write(" <testcase name=\"%s\" status=\"%s\" time=\"0\" classname=\"ConsistencyTests\"/>\n" % (test.getName(), run))
f.write(" </testsuite>\n");
f.write("</testsuites>\n")
f.close()
# Clean up test directory
try:
shutil.rmtree(tmpdir, True)
except (IOError, OSError): pass
if fail == 0:
print "\nAll tests passed."
return 0
else:
if disabled != 0:
print "\n%d of %d tests failed (%d disabled)." % (fail, fail + disabled + success, disabled)
else:
print "\n%d of %d tests failed." % (fail, fail + success)
return fail
except (KeyboardInterrupt):
print "\nInterrupted... Terminanting Tests."
# } // End of main()
# void _main() { // Main entry point when called as standalone script
if __name__ == "__main__":
sys.exit(main(sys.argv))
# } // End of _main()
|
rholder613/AvidaAR
|
avida-master/avida-core/tests/_testrunner/testrunner.py
|
Python
|
gpl-2.0
| 41,438
|
[
"Brian"
] |
32df6344f17362c2819cb85fab82561b2494d334288649605d0257d77d1bacc7
|
# Shared methods for the github email hook and cron job
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Author(s): David Shea <dshea@redhat.com>
import os
import requests
import threading
import smtplib
import pymongo
import iso8601
import email.utils
from email.mime.text import MIMEText
from github_email_hook.constants import PULL_REQUEST_COLLECTION
def json_to_email_date(date):
""" Convert a JSON (ISO 8601) date string to an email (RFC 2822) string """
# First, parse the string as a datetime object, and get the timestamp from that
ts = iso8601.parse_date(date).timestamp()
# Convert the timestamp into an email date
return email.utils.formatdate(timeval=ts)
def send_email(msg):
""" Send a email.message object """
# Fill in the to address
del msg['To']
msg['To'] = os.environ['GHEH_EMAIL_TO']
# Add an Approved header if requested
if 'GHEH_EMAIL_APPROVED' in os.environ:
msg['Approved'] = os.environ['GHEH_EMAIL_APPROVED']
if 'GHEH_SMTP_PORT' in os.environ:
smtp_port = os.environ['GHEH_SMTP_PORT']
elif os.environ.get('GHEH_SMTP_TLS', False):
smtp_port = 587
else:
smtp_port = 25
def _send_email(msg):
s = smtplib.SMTP(os.environ['GHEH_SMTP_SERVER'], port=smtp_port)
if os.environ.get('GHEH_SMTP_TLS', False):
s.starttls()
# Resend EHLO over TLS
s.ehlo()
if 'GHEH_SMTP_USER' in os.environ and 'GHEH_SMTP_PASSWORD' in os.environ:
s.login(os.environ['GHEH_SMTP_USER'], os.environ['GHEH_SMTP_PASSWORD'])
s.send_message(msg)
print("Message %s sent" % msg['Message-Id'])
# Don't hold up the web service while email is going
email_thread = threading.Thread(target=_send_email, args=(msg,))
email_thread.start()
def get_github(url, etag=None):
""" Retrieve a URL from github
Returns a requests.Response object
"""
if 'GHEH_GITHUB_OAUTH' in os.environ:
headers = {'Authorization': 'token %s' % os.environ['GHEH_GITHUB_OAUTH']}
else:
headers = {}
if etag:
headers.update({'If-None-Match': etag})
return requests.get(url, headers=headers)
def pull_request_msg_id(pull_request):
""" Generate a message ID from a pull_request json object.
The ID generated will be of the form:
pull-request.<id>.<head.sha>@<base.ref>.<base.id>
"""
return "<pull-request.%s.%s@%s.%s>" % \
(pull_request["id"], pull_request["head"]["sha"],
pull_request["base"]["ref"], pull_request["base"]["repo"]["id"])
def patch_msg_id(pull_request, patch_sha):
""" Generate a message ID for a patch within a pull request. """
return "<patch.%s.%s.%s@%s.%s>" % \
(pull_request["id"], pull_request["head"]["sha"], patch_sha,
pull_request["base"]["ref"], pull_request["base"]["repo"]["id"])
def pull_request_subject(pull_request):
""" Generate a subject line for a pull request. """
return "[%s/pulls/%s %s] %s" % \
(pull_request["base"]["repo"]["full_name"], pull_request["number"],
pull_request["base"]["ref"], pull_request["title"])
def email_footer(url, msg_type="pull request"):
""" Return a footer with a link to the pull request.
url should contain the link.
"""
return "\n-- \nTo view this %s on github, visit %s" % (msg_type, url)
def handle_commit_comment(data):
""" Handle a commit comment event.
This event is triggered by commit comments on commits in *our* repo.
Commit comments on the copy of the commit in *their* repo are handled
by the cron job, because we can't get push events for thos.
"""
# TODO Message-Id
# Find the pull request that this commit is part of, if any
client = pymongo.MongoClient(os.environ[os.environ["GHEH_DB_ENVVAR"]])
db = client[os.environ['GHEH_DB_NAME']]
pull_request_coll = db[PULL_REQUEST_COLLECTION]
record = pull_request_coll.find_one({'commit_list.sha': data['comment']['commit_id']})
if not record:
# TODO maybe do something with comments outside a pull request at some
# point. I dunno.
return
# Find the record for the commit itself
commit = [c for c in record['commit_list'] if c['sha'] == data['comment']['commit_id']][0]
# Re-create the patch subject
subject = "Re: [%s %d/%d] %s" % \
(record['pull_request']['base']['ref'],
record['commit_list'].index(commit) + 1, len(record['commit_list']),
commit['commit']['message'].split('\n')[0])
from_addr = "%s <%s>" % (data["comment"]["user"]["login"], os.environ["GHEH_EMAIL_FROM"])
if data["comment"]["line"]:
# TODO maybe fetch the file and try to create some context
body = "In reply to line %d of %s:\n\n" % (data["comment"]["line"], data["comment"]["path"])
else:
body = ""
body += data["comment"]["body"]
body += '\n' + email_footer(data["comment"]["html_url"], msg_type="comment")
msg = MIMEText(body)
msg['From'] = from_addr
msg['Subject'] = subject
msg['In-Reply-To'] = patch_msg_id(record['pull_request'], commit['sha'])
msg['Date'] = json_to_email_date(data["comment"]["updated_at"])
send_email(msg)
|
rhinstaller/github-email-hook
|
github_email_hook/__init__.py
|
Python
|
gpl-2.0
| 6,225
|
[
"VisIt"
] |
b6e927b107ad45eadf307b597a3b360c9d503a3116bc35a2fa3befab44a8c1bc
|
#!/usr/bin/python
import os
import task
import sys
import pipeline
#import QC
#import Bwa
lib_path = os.path.abspath(os.path.join('drmaa-python'))
sys.path.append(lib_path)
import drmaa
p = pipeline.pipeline();
p.clean()
p.processApp()
p.pj_initialize()
p.run()
#print "Log> Job Start"
#q = TT.TT()
#q.setName("TT task-sample")
#q.init()
#q.run()
#q.finish()
|
s4553711/HiScript
|
src/main.py
|
Python
|
mit
| 361
|
[
"BWA"
] |
d3ea8e7aedab531b173f09a569eb85aa9d62304519e3ecee42408ba4970179bd
|
from ...utils.helpers import CacheDict
from ...exceptions import SmtlibError
from .expression import *
from functools import lru_cache
import copy
import logging
import operator
import math
import threading
from decimal import Decimal
logger = logging.getLogger(__name__)
class Visitor:
"""Class/Type Visitor
Inherit your class visitor from this one and get called on a different
visiting function for each type of expression. It will call the first
implemented method for the __mro__ class order.
For example for a BitVecAdd it will try
visit_BitVecAdd() if not defined then it will try with
visit_BitVecOperation() if not defined then it will try with
visit_BitVec() if not defined then it will try with
visit_Expression()
Other class named visitors are:
visit_BitVec()
visit_Bool()
visit_Array()
"""
def __init__(self, cache=None, **kwargs):
super().__init__()
self._stack = []
self._cache = {} if cache is None else cache
def push(self, value):
assert value is not None
self._stack.append(value)
def pop(self):
if len(self._stack) == 0:
return None
result = self._stack.pop()
return result
@property
def result(self):
assert len(self._stack) == 1
return self._stack[-1]
def _method(self, expression, *args):
for cls in expression.__class__.__mro__[:-1]:
sort = cls.__name__
methodname = "visit_%s" % sort
if hasattr(self, methodname):
value = getattr(self, methodname)(expression, *args)
if value is not None:
return value
return self._rebuild(expression, args)
def visit(self, node, use_fixed_point=False):
"""
The entry point of the visitor.
The exploration algorithm is a DFS post-order traversal
The implementation used two stacks instead of a recursion
The final result is store in self.result
:param node: Node to explore
:type node: Expression
:param use_fixed_point: if True, it runs _methods until a fixed point is found
:type use_fixed_point: Bool
"""
if isinstance(node, ArrayProxy):
node = node.array
cache = self._cache
visited = set()
stack = []
stack.append(node)
while stack:
node = stack.pop()
if node in cache:
self.push(cache[node])
elif isinstance(node, Operation):
if node in visited:
operands = [self.pop() for _ in range(len(node.operands))]
value = self._method(node, *operands)
visited.remove(node)
self.push(value)
cache[node] = value
else:
visited.add(node)
stack.append(node)
stack.extend(node.operands)
else:
self.push(self._method(node))
if use_fixed_point:
old_value = None
new_value = self.pop()
while old_value is not new_value:
self.visit(new_value)
old_value = new_value
new_value = self.pop()
self.push(new_value)
@staticmethod
def _rebuild(expression, operands):
if isinstance(expression, Operation):
if any(x is not y for x, y in zip(expression.operands, operands)):
aux = copy.copy(expression)
aux._operands = operands
return aux
return expression
class Translator(Visitor):
"""Simple visitor to translate an expression into something else"""
def _method(self, expression, *args):
# Special case. Need to get the unsleeved version of the array
if isinstance(expression, ArrayProxy):
expression = expression.array
assert expression.__class__.__mro__[-1] is object
for cls in expression.__class__.__mro__:
sort = cls.__name__
methodname = f"visit_{sort:s}"
if hasattr(self, methodname):
value = getattr(self, methodname)(expression, *args)
if value is not None:
return value
raise SmtlibError(f"No translation for this {expression}")
class GetDeclarations(Visitor):
"""Simple visitor to collect all variables in an expression or set of
expressions
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.variables = set()
def _visit_variable(self, expression):
self.variables.add(expression)
visit_ArrayVariable = _visit_variable
visit_BitVecVariable = _visit_variable
visit_BoolVariable = _visit_variable
@property
def result(self):
return self.variables
class GetDepth(Translator):
"""Simple visitor to collect all variables in an expression or set of
expressions
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def visit_Expression(self, expression):
return 1
def _visit_operation(self, expression, *operands):
return 1 + max(operands)
visit_ArraySelect = _visit_operation
visit_ArrayOperation = _visit_operation
visit_BoolOperation = _visit_operation
visit_BitVecOperation = _visit_operation
def get_depth(exp):
visitor = GetDepth()
visitor.visit(exp)
return visitor.result
class PrettyPrinter(Visitor):
def __init__(self, depth=None, **kwargs):
super().__init__(**kwargs)
self.output = ""
self.indent = 0
self.depth = depth
def _print(self, s, e=None):
self.output += " " * self.indent + str(s) # + '(%016x)'%hash(e)
self.output += "\n"
def visit(self, expression):
"""
Overload Visitor.visit because:
- We need a pre-order traversal
- We use a recursion as it makes it easier to keep track of the indentation
"""
self._method(expression)
def _method(self, expression, *args):
"""
Overload Visitor._method because we want to stop to iterate over the
visit_ functions as soon as a valid visit_ function is found
"""
assert expression.__class__.__mro__[-1] is object
for cls in expression.__class__.__mro__:
sort = cls.__name__
methodname = "visit_%s" % sort
method = getattr(self, methodname, None)
if method is not None:
method(expression, *args)
return
return
def _visit_operation(self, expression, *operands):
self._print(expression.__class__.__name__, expression)
self.indent += 2
if self.depth is None or self.indent < self.depth * 2:
for o in expression.operands:
self.visit(o)
else:
self._print("...")
self.indent -= 2
return ""
visit_ArraySelect = _visit_operation
visit_ArrayOperation = _visit_operation
visit_BoolOperation = _visit_operation
visit_BitVecOperation = _visit_operation
def visit_BitVecExtract(self, expression):
self._print(
expression.__class__.__name__ + "{%d:%d}" % (expression.begining, expression.end),
expression,
)
self.indent += 2
if self.depth is None or self.indent < self.depth * 2:
for o in expression.operands:
self.visit(o)
else:
self._print("...")
self.indent -= 2
return ""
def _visit_constant(self, expression):
self._print(expression.value)
return ""
visit_BitVecConstant = _visit_constant
visit_BoolConstant = _visit_constant
def _visit_variable(self, expression):
self._print(expression.name)
return ""
visit_ArrayVariable = _visit_variable
visit_BitVecVariable = _visit_variable
visit_BoolVariable = _visit_variable
@property
def result(self):
return self.output
def pretty_print(expression, **kwargs):
if not isinstance(expression, Expression):
return str(expression)
pp = PrettyPrinter(**kwargs)
pp.visit(expression)
return pp.result
class ConstantFolderSimplifier(Visitor):
def __init__(self, **kw):
super().__init__(**kw)
operations = {
BitVecMod: operator.__mod__,
BitVecAdd: operator.__add__,
BitVecSub: operator.__sub__,
BitVecMul: operator.__mul__,
BitVecShiftLeft: operator.__lshift__,
BitVecShiftRight: operator.__rshift__,
BitVecAnd: operator.__and__,
BitVecOr: operator.__or__,
BitVecXor: operator.__xor__,
BitVecNot: operator.__not__,
BitVecNeg: operator.__invert__,
BoolAnd: operator.__and__,
BoolEqual: operator.__eq__,
BoolOr: operator.__or__,
BoolNot: operator.__not__,
UnsignedLessThan: operator.__lt__,
UnsignedLessOrEqual: operator.__le__,
UnsignedGreaterThan: operator.__gt__,
UnsignedGreaterOrEqual: operator.__ge__,
}
def visit_BitVecUnsignedDiv(self, expression, *operands) -> Optional[BitVecConstant]:
if all(isinstance(o, Constant) for o in operands):
a = operands[0].value
b = operands[1].value
if a == 0:
ret = 0
else:
ret = math.trunc(Decimal(a) / Decimal(b))
return BitVecConstant(size=expression.size, value=ret, taint=expression.taint)
return None
def visit_LessThan(self, expression, *operands) -> Optional[BoolConstant]:
if all(isinstance(o, Constant) for o in operands):
a = operands[0].signed_value
b = operands[1].signed_value
return BoolConstant(value=a < b, taint=expression.taint)
return None
def visit_LessOrEqual(self, expression, *operands) -> Optional[BoolConstant]:
if all(isinstance(o, Constant) for o in operands):
a = operands[0].signed_value
b = operands[1].signed_value
return BoolConstant(value=a <= b, taint=expression.taint)
return None
def visit_GreaterThan(self, expression, *operands) -> Optional[BoolConstant]:
if all(isinstance(o, Constant) for o in operands):
a = operands[0].signed_value
b = operands[1].signed_value
return BoolConstant(value=a > b, taint=expression.taint)
return None
def visit_GreaterOrEqual(self, expression, *operands) -> Optional[BoolConstant]:
if all(isinstance(o, Constant) for o in operands):
a = operands[0].signed_value
b = operands[1].signed_value
return BoolConstant(value=a >= b, taint=expression.taint)
return None
def visit_BitVecDiv(self, expression, *operands) -> Optional[BitVecConstant]:
if all(isinstance(o, Constant) for o in operands):
signmask = operands[0].signmask
mask = operands[0].mask
numeral = operands[0].value
dividend = operands[1].value
if numeral & signmask:
numeral = -(mask - numeral - 1)
if dividend & signmask:
dividend = -(mask - dividend - 1)
if dividend == 0:
result = 0
else:
result = math.trunc(Decimal(numeral) / Decimal(dividend))
return BitVecConstant(size=expression.size, value=result, taint=expression.taint)
return None
def visit_BitVecConcat(self, expression, *operands):
if all(isinstance(o, Constant) for o in operands):
result = 0
for o in operands:
result <<= o.size
result |= o.value
return BitVecConstant(size=expression.size, value=result, taint=expression.taint)
def visit_BitVecZeroExtend(self, expression, *operands):
if all(isinstance(o, Constant) for o in operands):
return BitVecConstant(
size=expression.size, value=operands[0].value, taint=expression.taint
)
def visit_BitVecSignExtend(self, expression, *operands):
if expression.extend == 0:
return operands[0]
def visit_BitVecExtract(self, expression, *operands):
if all(isinstance(o, Constant) for o in operands):
value = operands[0].value
begining = expression.begining
end = expression.end
value = value >> begining
mask = (1 << (end - begining + 1)) - 1
value = value & mask
return BitVecConstant(size=expression.size, value=value, taint=expression.taint)
def visit_BoolAnd(self, expression, a, b):
if isinstance(a, Constant) and a.value == True:
return b
if isinstance(b, Constant) and b.value == True:
return a
def _visit_operation(self, expression, *operands):
"""constant folding, if all operands of an expression are a Constant do the math"""
operation = self.operations.get(type(expression), None)
if operation is not None and all(isinstance(o, Constant) for o in operands):
value = operation(*(x.value for x in operands))
if isinstance(expression, BitVec):
return BitVecConstant(size=expression.size, value=value, taint=expression.taint)
else:
isinstance(expression, Bool)
return BoolConstant(value=value, taint=expression.taint)
else:
if any(operands[i] is not expression.operands[i] for i in range(len(operands))):
expression = self._rebuild(expression, operands)
return expression
visit_ArraySelect = _visit_operation
visit_ArrayOperation = _visit_operation
visit_BoolOperation = _visit_operation
visit_BitVecOperation = _visit_operation
@lru_cache(maxsize=128, typed=True)
def constant_folder(expression):
simp = ConstantFolderSimplifier()
simp.visit(expression, use_fixed_point=True)
return simp.result
class ArithmeticSimplifier(Visitor):
def __init__(self, parent=None, **kw):
super().__init__(**kw)
@staticmethod
def _same_constant(a, b):
return isinstance(a, Constant) and isinstance(b, Constant) and a.value == b.value or a is b
@staticmethod
def _changed(expression, operands):
if isinstance(expression, Constant) and len(operands) > 0:
return True
arity = len(operands)
return any(operands[i] is not expression.operands[i] for i in range(arity))
def _visit_operation(self, expression, *operands):
"""constant folding, if all operands of an expression are a Constant do the math"""
if all(isinstance(o, Constant) for o in operands):
expression = constant_folder(expression)
if self._changed(expression, operands):
expression = self._rebuild(expression, operands)
return expression
visit_ArrayOperation = _visit_operation
visit_BoolOperation = _visit_operation
visit_BitVecOperation = _visit_operation
def visit_BitVecZeroExtend(self, expression, *operands):
if self._changed(expression, operands):
return BitVecZeroExtend(
size_dest=expression.size, operand=operands[0], taint=expression.taint
)
else:
return expression
def visit_BoolAnd(self, expression, *operands):
if isinstance(operands[0], Constant) and operands[0].value:
return operands[1]
if isinstance(operands[1], Constant) and operands[1].value:
return operands[0]
# AND ( EQ (EXTRACT(0,8, a), EXTRACT(0,8, b)), EQ (EXTRACT(8,16, a), EXTRACT(8,16 b)) ->
# EQ(EXTRACT(0,16, a), EXTRACT(0,16, b))
if isinstance(operands[0], BoolEqual) and isinstance(operands[1], BoolEqual):
# Eq operands
operand_0 = operands[0]
operand_1 = operands[1]
# Extract operands
operand_0_0 = operand_0.operands[0]
operand_0_1 = operand_0.operands[1]
operand_1_0 = operand_1.operands[0]
operand_1_1 = operand_1.operands[1]
if (
isinstance(operand_0_0, BitVecExtract)
and isinstance(operand_0_1, BitVecExtract)
and isinstance(operand_1_0, BitVecExtract)
and isinstance(operand_1_1, BitVecExtract)
):
if (
operand_0_0.value is operand_1_0.value
and operand_0_1.value is operand_1_1.value
and (operand_0_0.begining, operand_0_0.end)
== (operand_0_1.begining, operand_0_1.end)
and (operand_1_0.begining, operand_1_0.end)
== (operand_1_1.begining, operand_1_1.end)
):
if ((operand_0_0.end + 1) == operand_1_0.begining) or (
operand_0_0.begining == (operand_1_0.end + 1)
):
value0 = operand_0_0.value
value1 = operand_0_1.value
beg = min(operand_0_0.begining, operand_1_0.begining)
end = max(operand_0_0.end, operand_1_0.end)
return BitVecExtract(
operand=value0, offset=beg, size=end - beg + 1
) == BitVecExtract(operand=value1, offset=beg, size=end - beg + 1)
def visit_BoolNot(self, expression, *operands):
if isinstance(operands[0], BoolNot):
return operands[0].operands[0]
def visit_BoolEqual(self, expression, *operands):
"""(EQ, ITE(cond, constant1, constant2), constant1) -> cond
(EQ, ITE(cond, constant1, constant2), constant2) -> NOT cond
(EQ (extract a, b, c) (extract a, b, c))
"""
if isinstance(operands[0], BitVecITE) and isinstance(operands[1], Constant):
if isinstance(operands[0].operands[1], Constant) and isinstance(
operands[0].operands[2], Constant
):
value1, value2, value3 = (
operands[1].value,
operands[0].operands[1].value,
operands[0].operands[2].value,
)
if value1 == value2 and value1 != value3:
return operands[0].operands[0] # FIXME: this may break taint propagation
elif value1 == value3 and value1 != value2:
return BoolNot(value=operands[0].operands[0], taint=expression.taint)
if operands[0] is operands[1]:
return BoolConstant(value=True, taint=expression.taint)
if isinstance(operands[0], BitVecExtract) and isinstance(operands[1], BitVecExtract):
if (
operands[0].value is operands[1].value
and operands[0].end == operands[1].end
and operands[0].begining == operands[1].begining
):
return BoolConstant(value=True, taint=expression.taint)
def visit_BoolOr(self, expression, a, b):
if isinstance(a, Constant):
if a.value == False:
return b
if a.value == True:
return a
if isinstance(b, Constant):
if b.value == False:
return a
if b.value == True:
return b
if a is b:
return a
def visit_BitVecITE(self, expression, *operands):
if isinstance(operands[0], Constant):
if operands[0].value:
result = operands[1]
else:
result = operands[2]
new_taint = result._taint | operands[0].taint
if result._taint != new_taint:
result = copy.copy(result)
result._taint = new_taint
return result
if self._changed(expression, operands):
return BitVecITE(
size=expression.size,
condition=operands[0],
true_value=operands[1],
false_value=operands[2],
taint=expression.taint,
)
def visit_BitVecConcat(self, expression, *operands):
"""concat( extract(k1, 0, a), extract(sizeof(a)-k1, k1, a)) ==> a
concat( extract(k1, beg, a), extract(end, k1, a)) ==> extract(beg, end, a)
concat( x , extract(k1, beg, a), extract(end, k1, a), z) ==> concat( x , extract(k1, beg, a), extract(end, k1, a), z)
"""
if len(operands) == 1:
return operands[0]
changed = False
last_o = None
new_operands = []
for o in operands:
if isinstance(o, BitVecExtract):
if last_o is None:
last_o = o
else:
if last_o.value is o.value and last_o.begining == o.end + 1:
last_o = BitVecExtract(
operand=o.value,
offset=o.begining,
size=last_o.end - o.begining + 1,
taint=expression.taint,
)
changed = True
else:
new_operands.append(last_o)
last_o = o
else:
if last_o is not None:
new_operands.append(last_o)
last_o = None
new_operands.append(o)
if last_o is not None:
new_operands.append(last_o)
if changed:
return BitVecConcat(size_dest=expression.size, operands=tuple(new_operands))
op = operands[0]
value = None
end = None
begining = None
for o in operands:
# If found a non BitVecExtract, do not apply
if not isinstance(o, BitVecExtract):
value = None
break
# Set the value for the first item
if value is None:
value = o.value
begining = o.begining
end = o.end
else:
# If concat of extracts of different values do not apply
if value is not o.value:
value = None
break
# If concat of non contiguous extracs do not apply
if begining != o.end + 1:
value = None
break
# update begining variable
begining = o.begining
if value is not None:
if end + 1 != value.size or begining != 0:
return BitVecExtract(
operand=value, offset=begining, size=end - begining + 1, taint=expression.taint
)
return value
def visit_BitVecExtract(self, expression, *operands):
"""extract(sizeof(a), 0)(a) ==> a
extract(16, 0)( concat(a,b,c,d) ) => concat(c, d)
extract(m,M)(and/or/xor a b ) => and/or/xor((extract(m,M) a) (extract(m,M) a)
"""
op = operands[0]
begining = expression.begining
end = expression.end
size = end - begining + 1
# extract(sizeof(a), 0)(a) ==> a
if begining == 0 and end + 1 == op.size:
return op
elif isinstance(op, BitVecExtract):
return BitVecExtract(
operand=op.value, offset=op.begining + begining, size=size, taint=expression.taint
)
elif isinstance(op, BitVecConcat):
new_operands = []
for item in reversed(op.operands):
if size == 0:
assert expression.size == sum([x.size for x in new_operands])
return BitVecConcat(
size_dest=expression.size,
operands=tuple(reversed(new_operands)),
taint=expression.taint,
)
if begining >= item.size:
# skip the item
begining -= item.size
else:
if begining == 0 and size == item.size:
new_operands.append(item)
size = 0
else:
if size <= item.size - begining:
new_operands.append(
BitVecExtract(operand=item, offset=begining, size=size)
)
size = 0
else:
new_operands.append(
BitVecExtract(
operand=item, offset=begining, size=item.size - begining
)
)
size -= item.size - begining
begining = 0
elif isinstance(op, BitVecConstant):
return BitVecConstant(size=size, value=(op.value >> begining) & ~(1 << size))
if isinstance(op, (BitVecAnd, BitVecOr, BitVecXor)):
bitoperand_a, bitoperand_b = op.operands
return op.__class__(
a=BitVecExtract(operand=bitoperand_a, offset=begining, size=expression.size),
b=BitVecExtract(operand=bitoperand_b, offset=begining, size=expression.size),
taint=expression.taint,
)
def visit_BitVecAdd(self, expression, *operands):
"""a + 0 ==> a
0 + a ==> a
"""
left = operands[0]
right = operands[1]
if isinstance(right, BitVecConstant):
if right.value == 0:
return left
if isinstance(left, BitVecConstant):
if left.value == 0:
return right
def visit_BitVecSub(self, expression, *operands):
"""a - 0 ==> a
(a + b) - b ==> a
(b + a) - b ==> a
a - a ==> 0
"""
left = operands[0]
right = operands[1]
if isinstance(left, BitVecAdd):
if self._same_constant(left.operands[0], right):
return left.operands[1]
elif self._same_constant(left.operands[1], right):
return left.operands[0]
elif isinstance(left, BitVecSub) and isinstance(right, Constant):
subleft = left.operands[0]
subright = left.operands[1]
if isinstance(subright, Constant):
return BitVecSub(
a=subleft,
b=BitVecConstant(
size=subleft.size,
value=subright.value + right.value,
taint=subright.taint | right.taint,
),
)
elif isinstance(right, Constant) and right.value == 0:
return left
else:
# If equality can not be computed because of symbolic
# variables '==' will raise an exception
try:
if left == right:
return BitVecConstant(size=left.size, value=0)
except ExpressionEvalError:
pass
def visit_BitVecOr(self, expression, *operands):
"""a | 0 => a
0 | a => a
0xffffffff & a => 0xffffffff
a & 0xffffffff => 0xffffffff
"""
left = operands[0]
right = operands[1]
if isinstance(right, BitVecConstant):
if right.value == 0:
return left
elif right.value == left.mask:
return right
elif isinstance(left, BitVecOr):
left_left = left.operands[0]
left_right = left.operands[1]
if isinstance(right, Constant):
return BitVecOr(a=left_left, b=(left_right | right), taint=expression.taint)
elif isinstance(left, BitVecConstant):
return BitVecOr(a=right, b=left, taint=expression.taint)
def visit_BitVecAnd(self, expression, *operands):
"""ct & x => x & ct move constants to the right
a & 0 => 0 remove zero
a & 0xffffffff => a remove full mask
(b & ct2) & ct => b & (ct&ct2) associative property
(a & (b | c) => a&b | a&c distribute over |
"""
left = operands[0]
right = operands[1]
if isinstance(right, BitVecConstant):
if right.value == 0:
return right
elif right.value == right.mask:
return left
elif isinstance(left, BitVecAnd):
left_left = left.operands[0]
left_right = left.operands[1]
if isinstance(right, Constant):
return BitVecAnd(a=left_left, b=left_right & right, taint=expression.taint)
elif isinstance(left, BitVecOr):
left_left = left.operands[0]
left_right = left.operands[1]
return BitVecOr(a=right & left_left, b=right & left_right, taint=expression.taint)
elif isinstance(left, BitVecConstant):
return BitVecAnd(a=right, b=left, taint=expression.taint)
def visit_BitVecShiftLeft(self, expression, *operands):
"""a << 0 => a remove zero
a << ct => 0 if ct > sizeof(a) remove big constant shift
"""
left = operands[0]
right = operands[1]
if isinstance(right, BitVecConstant):
if right.value == 0:
return left
elif right.value >= right.size:
return left
def visit_ArraySelect(self, expression, *operands):
"""ArraySelect (ArrayStore((ArrayStore(x0,v0) ...),xn, vn), x0)
-> v0
"""
arr, index = operands
if isinstance(arr, ArrayVariable):
return self._visit_operation(expression, *operands)
if isinstance(index, BitVecConstant):
ival = index.value
# props are slow and using them in tight loops should be avoided, esp when they offer no additional validation
# arr._operands[1] = arr.index, arr._operands[0] = arr.array
while (
isinstance(arr, ArrayStore)
and isinstance(arr._operands[1], BitVecConstant)
and arr._operands[1]._value != ival
):
arr = arr._operands[0] # arr.array
if (
isinstance(index, BitVecConstant)
and isinstance(arr, ArrayStore)
and isinstance(arr.index, BitVecConstant)
and arr.index.value == index.value
):
if arr.value is not None:
return arr.value
else:
if arr is not expression.array:
out = arr.select(index)
if out is not None:
return arr.select(index)
return self._visit_operation(expression, *operands)
def visit_Expression(self, expression, *operands):
assert len(operands) == 0
assert not isinstance(expression, Operation)
return expression
@lru_cache(maxsize=128, typed=True)
def arithmetic_simplify(expression):
simp = ArithmeticSimplifier()
simp.visit(expression, use_fixed_point=True)
return simp.result
def to_constant(expression):
"""
Iff the expression can be simplified to a Constant get the actual concrete value.
This discards/ignore any taint
"""
value = simplify(expression)
if isinstance(value, Expression) and value.taint:
raise ValueError("Can not simplify tainted values to constant")
if isinstance(value, Constant):
return value.value
elif isinstance(value, Array):
if expression.index_max:
ba = bytearray()
for i in range(expression.index_max):
value_i = simplify(value[i])
if not isinstance(value_i, Constant):
break
ba.append(value_i.value)
else:
return bytes(ba)
return expression
return value
@lru_cache(maxsize=128, typed=True)
def simplify(expression):
expression = arithmetic_simplify(expression)
return expression
class TranslatorSmtlib(Translator):
"""Simple visitor to translate an expression to its smtlib representation"""
unique = 0
unique_lock = threading.Lock()
def __init__(self, use_bindings=False, *args, **kw):
assert "bindings" not in kw
super().__init__(*args, **kw)
self.use_bindings = use_bindings
self._bindings_cache = {}
self._bindings = []
def _add_binding(self, expression, smtlib):
if not self.use_bindings or len(smtlib) <= 10:
return smtlib
if smtlib in self._bindings_cache:
return self._bindings_cache[smtlib]
with TranslatorSmtlib.unique_lock:
TranslatorSmtlib.unique += 1
name = "a_%d" % TranslatorSmtlib.unique
self._bindings.append((name, expression, smtlib))
self._bindings_cache[expression] = name
return name
@property
def bindings(self):
return self._bindings
translation_table = {
BoolNot: "not",
BoolEqual: "=",
BoolAnd: "and",
BoolOr: "or",
BoolXor: "xor",
BoolITE: "ite",
BitVecAdd: "bvadd",
BitVecSub: "bvsub",
BitVecMul: "bvmul",
BitVecDiv: "bvsdiv",
BitVecUnsignedDiv: "bvudiv",
BitVecMod: "bvsmod",
BitVecRem: "bvsrem",
BitVecUnsignedRem: "bvurem",
BitVecShiftLeft: "bvshl",
BitVecShiftRight: "bvlshr",
BitVecArithmeticShiftLeft: "bvashl",
BitVecArithmeticShiftRight: "bvashr",
BitVecAnd: "bvand",
BitVecOr: "bvor",
BitVecXor: "bvxor",
BitVecNot: "bvnot",
BitVecNeg: "bvneg",
LessThan: "bvslt",
LessOrEqual: "bvsle",
GreaterThan: "bvsgt",
GreaterOrEqual: "bvsge",
UnsignedLessThan: "bvult",
UnsignedLessOrEqual: "bvule",
UnsignedGreaterThan: "bvugt",
UnsignedGreaterOrEqual: "bvuge",
BitVecSignExtend: "(_ sign_extend %d)",
BitVecZeroExtend: "(_ zero_extend %d)",
BitVecExtract: "(_ extract %d %d)",
BitVecConcat: "concat",
BitVecITE: "ite",
ArrayStore: "store",
ArraySelect: "select",
}
def visit_BitVecConstant(self, expression):
assert isinstance(expression, BitVecConstant)
if expression.size == 1:
return "#" + bin(expression.value & expression.mask)[1:]
else:
return "#x%0*x" % (int(expression.size / 4), expression.value & expression.mask)
def visit_BoolConstant(self, expression):
return expression.value and "true" or "false"
def _visit_variable(self, expression):
return expression.name
visit_ArrayVariable = _visit_variable
visit_BitVecVariable = _visit_variable
visit_BoolVariable = _visit_variable
def visit_ArraySelect(self, expression, *operands):
array_smt, index_smt = operands
if isinstance(expression.array, ArrayStore):
array_smt = self._add_binding(expression.array, array_smt)
return "(select %s %s)" % (array_smt, index_smt)
def _visit_operation(self, expression, *operands):
operation = self.translation_table[type(expression)]
if isinstance(expression, (BitVecSignExtend, BitVecZeroExtend)):
operation = operation % expression.extend
elif isinstance(expression, BitVecExtract):
operation = operation % (expression.end, expression.begining)
operands = [self._add_binding(*x) for x in zip(expression.operands, operands)]
return "(%s %s)" % (operation, " ".join(operands))
visit_ArrayOperation = _visit_operation
visit_BoolOperation = _visit_operation
visit_BitVecOperation = _visit_operation
@property
def result(self):
output = super().result
if self.use_bindings:
for name, expr, smtlib in reversed(self._bindings):
output = "( let ((%s %s)) %s )" % (name, smtlib, output)
return output
def translate_to_smtlib(expression, **kwargs):
translator = TranslatorSmtlib(**kwargs)
translator.visit(expression)
return translator.result
class Replace(Visitor):
"""Simple visitor to replaces expressions"""
def __init__(self, bindings=None, **kwargs):
super().__init__(**kwargs)
if bindings is None:
raise ValueError("bindings needed in replace")
self._replace_bindings = bindings
def _visit_variable(self, expression):
if expression in self._replace_bindings:
return self._replace_bindings[expression]
return expression
visit_ArrayVariable = _visit_variable
visit_BitVecVariable = _visit_variable
visit_BoolVariable = _visit_variable
def replace(expression, bindings):
if not bindings:
return expression
visitor = Replace(bindings)
visitor.visit(expression, use_fixed_point=True)
result_expression = visitor.result
return result_expression
class ArraySelectSimplifier(Visitor):
class ExpressionNotSimple(RuntimeError):
pass
def __init__(self, target_index, **kwargs):
super().__init__(**kwargs)
self._target_index = target_index
self.stores = []
def visit_ArrayStore(self, exp, target, where, what):
if not isinstance(what, BitVecConstant):
raise self.ExpressionNotSimple
if where.value == self._target_index:
self.stores.append(what.value)
def simplify_array_select(array_exp):
assert isinstance(array_exp, ArraySelect)
simplifier = ArraySelectSimplifier(array_exp.index.value)
simplifier.visit(array_exp)
return simplifier.stores
def get_variables(expression):
if isinstance(expression, ArrayProxy):
expression = expression.array
visitor = GetDeclarations()
visitor.visit(expression)
return visitor.result
|
trailofbits/manticore
|
manticore/core/smtlib/visitors.py
|
Python
|
agpl-3.0
| 38,537
|
[
"VisIt"
] |
a3543cf1c448b3ab4b5fbeacdb31eef924cd668269d1db214bdb38a024d1455a
|
#
# ui: user interface module for graphical handling
# Copyright (C) 2012 cmiN
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Cosmin Poieana <cmin764@yahoo.com>
from Tkinter import * # it's evil, I know ;[
from tkMessageBox import showinfo, showerror # for About and exceptions
from sys import platform
from xhosts.crawler import HTTPError # web searching can cause exceptions
# some constants
BWD = 6 # button width
LWD = 36 # listbox width
if "linux" in platform:
# best with linux X
BWD = 4
LWD = 30
SEP = "->" # separator between source and destination
DST = "127.0.0.1" # default destination (block)
class Gui(Frame):
def __init__(self, master=None, margin=10, core=None):
"""Create toplevel and grid the frame on it."""
Frame.__init__(self, master)
self.core = core # the application engine
self.lastKey = None # last searched query
self.master.title("xhosts")
self.grid(padx=margin, pady=margin)
self.widgets()
self.reloadButton.invoke() # press Reload
def widgets(self):
"""Crate and grid other widgets."""
# create widgets
self.hostsBox = Listbox(self, width=LWD, height=5)
self.hostsBox.bind("<<ListboxSelect>>", self.select_host)
self.hostsScroll = Scrollbar(self, command=self.hostsBox.yview,
orient=VERTICAL)
self.hostsBox.config(yscrollcommand=self.hostsScroll.set)
self.srcEntry = Entry(self, width=LWD / 2)
self.destEntry = Entry(self, width=LWD / 2)
self.about = Label(self, text="A\nb\no\nu\nt", bg="orange")
self.about.bind("<Button-1>", self.about_message)
self.reloadButton = Button(self, text="Reload", width=BWD,
command=self.reload_hosts)
self.saveButton = Button(self, text="Save", width=BWD,
command=self.save_hosts)
self.addButton = Button(self, text="Add", width=BWD,
command=self.add_host)
self.removeButton = Button(self, text="Remove", width=BWD,
command=self.remove_host)
self.searchEntry = Entry(self, width=2 * LWD / 3)
self.searchButton = Button(self, text="Search", width=BWD,
command=self.search)
self.searchBox = Listbox(self, height=5)
self.searchBox.bind("<<ListboxSelect>>", self.select_site)
self.searchScroll = Scrollbar(self, command=self.searchBox.yview,
orient=VERTICAL)
self.searchBox.config(yscrollcommand=self.searchScroll.set)
# grid (place and show) widgets
self.hostsBox.grid(row=0, column=0, columnspan=2, sticky=W + E)
self.hostsScroll.grid(row=0, column=2, sticky=N + S)
self.srcEntry.grid(row=1, column=0, sticky=W)
self.destEntry.grid(row=1, column=1, sticky=W)
self.about.grid(row=1, column=2, rowspan=3, sticky=N + S + W + E)
self.reloadButton.grid(row=2, column=0, sticky=W)
self.saveButton.grid(row=2, column=0, sticky=E)
self.addButton.grid(row=2, column=1, sticky=W)
self.removeButton.grid(row=2, column=1, sticky=E)
self.searchEntry.grid(row=3, column=0, columnspan=2, sticky=W)
self.searchButton.grid(row=3, column=1, sticky=E)
self.searchBox.grid(row=4, column=0, columnspan=2, sticky=W + E)
self.searchScroll.grid(row=4, column=2, sticky=N + S)
def about_message(self, event):
"""Show this when About is clicked."""
showinfo("About", "Show and edit entries from hosts file, " \
"also search for hostnames on the web. Coded by cmiN.\n\n" \
"\t Visit rstcenter.com")
def __clear_entries(self):
"""Empty the src and dest entry."""
self.srcEntry.delete(0, END)
self.destEntry.delete(0, END)
def __get_selected_host(self):
"""Return selected host's index."""
crtTuple = self.hostsBox.curselection()
if len(crtTuple):
return crtTuple[0]
return None
def __get_selected_site(self):
"""Return selected site's index."""
crtTuple = self.searchBox.curselection()
if len(crtTuple):
return crtTuple[0]
return None
def select_host(self, event):
"""Display the selected host in the entries below."""
index = self.__get_selected_host()
item = self.hostsBox.get(index).split(SEP) # split item
self.__clear_entries() # in src and dest
self.srcEntry.insert(0, item[0].strip())
self.destEntry.insert(0, item[1].strip())
def reload_hosts(self):
"""Reload and display hosts."""
self.__clear_entries()
self.hostsBox.delete(0, END)
for item in self.core.list_entries():
self.hostsBox.insert(END, "%s {} %s".format(SEP) % item)
def save_hosts(self):
"""Save the current entries to hosts file."""
try:
self.core.write_entries()
except IOError as err:
showerror("IOError", err)
def add_host(self):
"""Add host (from src and entry) to memory and listbox."""
src = self.srcEntry.get().strip()
dest = self.destEntry.get().strip()
if not bool(src) or not bool(dest):
return # empty src or dest
rcode = self.core.add_entry(src, dest) # add to memory
item = "%s {} %s".format(SEP) % (src, dest)
if rcode == 1: # added
self.hostsBox.insert(END, item)
else: # replaced
# find and replace in the listbox too
for index in xrange(self.hostsBox.size()):
crt = self.hostsBox.get(index).split(SEP)
if src == crt[0].strip():
# replace the item
self.hostsBox.delete(index)
self.hostsBox.insert(index, item)
break
def remove_host(self):
"""Remove host from both memory and listbox."""
index = self.__get_selected_host() # may be None here
if index: # something is selected, remove it
item = self.hostsBox.get(index).split(SEP)
self.core.remove_entry(item[0].strip())
self.hostsBox.delete(index)
def search(self):
"""Return next results if the same key is used
or start from the beginning if the key is changed.
"""
key = self.searchEntry.get().strip()
if key != self.lastKey:
# reset the search for the new key
self.lastKey = key # also start from 0
self.core.set_key(key) # with first 2 results
self.searchBox.delete(0, END) # clear
try:
sites = self.core.search_sites()
except HTTPError as err:
showerror("HTTPError", err)
return
for site in sites: # insert them (contains the previous searches)
self.searchBox.insert(END, site)
def select_site(self, event):
"""Called when an entry from the sites's listbox is selected."""
index = self.__get_selected_site()
site = self.searchBox.get(index) # get selected site string
self.__clear_entries()
self.srcEntry.insert(0, site)
self.destEntry.insert(0, DST)
|
cmin764/xhosts
|
xhosts/ui.py
|
Python
|
gpl-3.0
| 8,005
|
[
"VisIt"
] |
87d9391bb9d267bc84693584cbde37a78baf4b9018cac7a91b1b89531994d5aa
|
"""
pyaircraft.io.read_p3_flight
=========================
This is a grouping of scripts designed to process NOAA P-3
flight level data recorded during flights and put into NetCDF
format by NOAA AOC.
Created by Nick Guy.
"""
# NOTES:: This has only been tested with DYNAMO data files, versions
# may change and another function may be needed.
# HISTORY::
# 8 Jan 2014 - Nick Guy. NRC, NOAA/NSSL (nick.guy@noaa.gov)
# Converted NCL functions below to Python
# FUNCTIONS::
# flight_level_variable - Read in a variable from flight level NetCDF
# flight_track - Read in data to for flight track
#-------------------------------------------------------------------
# Load the needed packages
from scipy.io import netcdf
from netCDF4 import Dataset,num2date
import json
import numpy as np
import pytz
#-------------------------------------------------------------------
# Define various constants that may be used for calculations
#===============================================================
# BEGIN FUNCTIONS
#===============================================================
def flight_level_variable(fname,Rec):
"""Read in data from NetCDF file containing P3 flight level data created
by NOAA AOC. The NetCDF should be read in the main program and passed
to this function.
A call such as this can be used in the main program:
FltncID=addfile(FlightFileStringName,"r")
INPUT::
fname = Filename [string]
Rec = Variable name to be pulled out [string]
OUTPUT::
VarOut = Masked array containing variable data
USAGE::
Lat = read_flight_level_dynamo('P3.nc','LatGPS.3')
NOTES::
Data file structure::
Available variables (not full list) :
LonGPS.3 = Novatel GPS Longitude
LatGPS.3 = Novatel GPS Latitude
AltGPS.3 = Novatel GPS Altitude [m]
THdgI-GPS.1 = True heading [deg]
TRK.1 = Track [deg]
AltPaADDU.1 = Pressure altitude [m]
WSZ_DPJ = Vertical wind via D Jorgensen calculation [m/s]
TA.1 = Ambient Temperature [C]
TD.1 = Dewpoint Temperature [C]
TVIRT.1 = Virtual Temperature [K]
THETA.1 = Potential Temperature [K]
THETAE.1 = Equivalent Potential Temperature [K]
THETAV.1 = Virtual Potential Temperature [K]
WS.1 = Wind Speed [m/s]
WD.1 = Wind Direction [deg]
HUM_REL.1 = Relative Humidity [%]
HUM_SPEC.1 = Specific Humidity [g/kg]
MR.1 = Mixing ratio [g] [g/g?]
EE.1 = Vapor Pressure [hPa]
EW.1 = Saturated Vapor Pressure [hPa]
"""
# HISTORY::
# 25 Jul 2013 - Nick Guy NOAA/NSSL/WRDD, NRC
# 8 Jan 2014 - With change to python, the procedure of setting the bad
# data by user is no longer used, but the numpy masking
# technique is instead used to nuke "bad" occurences
#---------------------------------------------------
# Read the NetCDF
ncFile = netcdf.netcdf_file(fname,'r')
# Get the variable of interest
VarOut = ncFile.variables['Rec'][:]
# Mask any "_FillValue" or some missing_data type attribute
#try:
# VarOut = np.ma.masked_values(VarOut, VarOut.missing_value)
#except:
# pass
#try:
# VarOut = np.ma.masked_values(VarOut, VarOut.Missing_Value)
#except:
# pass
#try:
# VarOut = np.ma.masked_values(VarOut, VarOut._FillValue)
#except:
# pass
# Mask any NaN values
#VarOut = np.ma.masked_values(VarOut, np.isnan(VarOut)
return VarOut
#**====================================================
def flight_track(fname):
"""Read in data from NetCDF file containing P3 flight level data created
by NOAA AOC. Pull out the needed variables for flight track info.
INPUT::
fname = Filename [string]
OUTPUT::
Lat = Aircraft latitude
Lon = Aircraft longitude
Alt = Aircraft altitude
PAlt = Aircraft pressure altitude
Time = Aircraft time array
USAGE::
Lat,Lon,Alt,PAlt = flight_track(fname)
"""
# HISTORY::
# 7 Mar 2014 - Nick Guy NOAA/NSSL/WRDD, NRC
#---------------------------------------------------
# Read the NetCDF
ncFile = Dataset(fname,'r')
# Pull out each variable
Lat = ncFile.variables['LatGPS.3'][:]
Lon = ncFile.variables['LonGPS.3'][:]
Alt = ncFile.variables['AltGPS.3'][:]
PAlt = ncFile.variables['AltPaADDU.1'][:]
# Pull out the start time
StartTime = ncFile.StartTime
# Create a time array
TimeSec = np.linspace(StartTime,StartTime + len(Lat), len(Lat))
Time_unaware = num2date(TimeSec,'seconds since 1970-01-01 00:00:00+0:00')
Time = Time_unaware#.replace(tzinfo=pytz.UTC)
# Now mask missing values
np.ma.masked_invalid(Lat)
np.ma.masked_invalid(Lon)
np.ma.masked_invalid(Alt)
np.ma.masked_invalid(PAlt)
return Lat,Lon,Alt,PAlt,Time
|
nguy/pyparticleprobe
|
pyparticleprobe/io/read_p3_flight.py
|
Python
|
lgpl-2.1
| 4,960
|
[
"NetCDF"
] |
cc8056a53246a646f51eaec9e488249e0c3f8cec29487da369b951a5ec49d643
|
#!/usr/bin/env python
import numpy
import itertools
from pymatgen.core.lattice import Lattice
from pymatgen.core.operations import SymmOp
from pymatgen.core.structure import Structure
from crystal import fillcell, tikz_atoms
def trilayer(doped = None):
a = 5.43
fcc = Lattice([[a/2,a/2,0],[a/2,0,a/2],[0,a/2,a/2]])
trilayer = Structure(fcc,['Si']*2,[[0.00,0.00,0.00],[0.25,0.25,0.25]])
# Make the cell cubic
trilayer.make_supercell([[1,1,-1],[1,-1,1],[-1,1,1]])
trilayer.make_supercell([[1,1,0],[1,-1,0],[0,0,4]])
# Rotate the cell
rt = 0.70710678118654746
symmop = SymmOp.from_rotation_and_translation([[rt,rt,0],[rt,-rt,0],[0,0,1]])
trilayer.apply_operation(symmop)
if doped is not None:
frac_coords = numpy.array([0.5,0.0,0.5])
for i,atom in enumerate(trilayer):
if numpy.linalg.norm(atom.frac_coords-frac_coords) < 0.001:
trilayer.replace(i,doped,frac_coords)
for z in [0.375,0.625]:
for xy in [0.00,0.50]:
trilayer.append('Mn',[xy,xy,z])
return trilayer
atoms = trilayer(doped=None)
atoms_full = fillcell(atoms)
bondatoms = []
for sitei,sitej in itertools.combinations(atoms_full,2):
radius = sitei.specie.atomic_radius + sitej.specie.atomic_radius
bondlength = sitei.distance_from_point(sitej.coords)
if bondlength <= 1.25 * radius:
if sitei.specie.symbol != 'Mn' and sitej.specie.symbol != 'Mn':
bondatoms.append((sitei,sitej))
tikz_atoms(atoms_full, bondatoms, drawcell = True)
|
ldamewood/figures
|
scripts/trilayer.py
|
Python
|
mit
| 1,564
|
[
"CRYSTAL",
"pymatgen"
] |
c7e84fe12f1511e8f4e0febf14f1fcbec13fcd6252e2760edf987881eb2d52e3
|
#!/usr/bin/env python3
"""
Template by pypi-mobans
"""
import os
import sys
import codecs
import locale
import platform
from shutil import rmtree
from setuptools import Command, setup, find_packages
# Work around mbcs bug in distutils.
# http://bugs.python.org/issue10945
# This work around is only if a project supports Python < 3.4
# Work around for locale not being set
try:
lc = locale.getlocale()
pf = platform.system()
if pf != "Windows" and lc == (None, None):
locale.setlocale(locale.LC_ALL, "C.UTF-8")
except (ValueError, UnicodeError, locale.Error):
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
NAME = "pyexcel-ods"
AUTHOR = "chfw"
VERSION = "0.6.0"
EMAIL = "info@pyexcel.org"
LICENSE = "New BSD"
DESCRIPTION = (
"A wrapper library to read, manipulate and write data in ods format"
)
URL = "https://github.com/pyexcel/pyexcel-ods"
DOWNLOAD_URL = "%s/archive/0.6.0.tar.gz" % URL
FILES = ["README.rst", "CHANGELOG.rst"]
KEYWORDS = [
"python",
]
CLASSIFIERS = [
"Topic :: Software Development :: Libraries",
"Programming Language :: Python",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
]
PYTHON_REQUIRES = ">=3.6"
INSTALL_REQUIRES = [
"pyexcel-io>=0.6.2",
"odfpy>=1.3.5",
]
SETUP_COMMANDS = {}
PACKAGES = find_packages(exclude=["ez_setup", "examples", "tests", "tests.*"])
EXTRAS_REQUIRE = {
}
# You do not need to read beyond this line
PUBLISH_COMMAND = "{0} setup.py sdist bdist_wheel upload -r pypi".format(sys.executable)
GS_COMMAND = ("gs pyexcel-ods v0.6.0 " +
"Find 0.6.0 in changelog for more details")
NO_GS_MESSAGE = ("Automatic github release is disabled. " +
"Please install gease to enable it.")
UPLOAD_FAILED_MSG = (
'Upload failed. please run "%s" yourself.' % PUBLISH_COMMAND)
HERE = os.path.abspath(os.path.dirname(__file__))
class PublishCommand(Command):
"""Support setup.py upload."""
description = "Build and publish the package on github and pypi"
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print("\033[1m{0}\033[0m".format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status("Removing previous builds...")
rmtree(os.path.join(HERE, "dist"))
rmtree(os.path.join(HERE, "build"))
rmtree(os.path.join(HERE, "pyexcel_ods.egg-info"))
except OSError:
pass
self.status("Building Source and Wheel (universal) distribution...")
run_status = True
if has_gease():
run_status = os.system(GS_COMMAND) == 0
else:
self.status(NO_GS_MESSAGE)
if run_status:
if os.system(PUBLISH_COMMAND) != 0:
self.status(UPLOAD_FAILED_MSG)
sys.exit()
SETUP_COMMANDS.update({
"publish": PublishCommand
})
def has_gease():
"""
test if github release command is installed
visit http://github.com/moremoban/gease for more info
"""
try:
import gease # noqa
return True
except ImportError:
return False
def read_files(*files):
"""Read files into setup"""
text = ""
for single_file in files:
content = read(single_file)
text = text + content + "\n"
return text
def read(afile):
"""Read a file into setup"""
the_relative_file = os.path.join(HERE, afile)
with codecs.open(the_relative_file, "r", "utf-8") as opened_file:
content = filter_out_test_code(opened_file)
content = "".join(list(content))
return content
def filter_out_test_code(file_handle):
found_test_code = False
for line in file_handle.readlines():
if line.startswith(".. testcode:"):
found_test_code = True
continue
if found_test_code is True:
if line.startswith(" "):
continue
else:
empty_line = line.strip()
if len(empty_line) == 0:
continue
else:
found_test_code = False
yield line
else:
for keyword in ["|version|", "|today|"]:
if keyword in line:
break
else:
yield line
if __name__ == "__main__":
setup(
test_suite="tests",
name=NAME,
author=AUTHOR,
version=VERSION,
author_email=EMAIL,
description=DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
long_description=read_files(*FILES),
license=LICENSE,
keywords=KEYWORDS,
python_requires=PYTHON_REQUIRES,
extras_require=EXTRAS_REQUIRE,
tests_require=["nose"],
install_requires=INSTALL_REQUIRES,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
classifiers=CLASSIFIERS,
cmdclass=SETUP_COMMANDS
)
|
chfw/pyexcel-ods
|
setup.py
|
Python
|
bsd-3-clause
| 5,214
|
[
"VisIt"
] |
938f3fbb15af7e34e78490fabfe40110fd1b53ce97993e755ab43f08b928d620
|
"""Emojione autogen.
Generated from emojione source. Do not edit by hand.
MIT license.
Copyright (c) http://www.emojione.com
"""
version = "v2.2.7"
name = "emojione"
emoji = {
":100:": {
"category": "symbols",
"name": "hundred points symbol",
"unicode": "1f4af"
},
":1234:": {
"category": "symbols",
"name": "input symbol for numbers",
"unicode": "1f522"
},
":8ball:": {
"category": "activity",
"name": "billiards",
"unicode": "1f3b1"
},
":a:": {
"category": "symbols",
"name": "negative squared latin capital letter a",
"unicode": "1f170"
},
":ab:": {
"category": "symbols",
"name": "negative squared ab",
"unicode": "1f18e"
},
":abc:": {
"category": "symbols",
"name": "input symbol for latin letters",
"unicode": "1f524"
},
":abcd:": {
"category": "symbols",
"name": "input symbol for latin small letters",
"unicode": "1f521"
},
":accept:": {
"category": "symbols",
"name": "circled ideograph accept",
"unicode": "1f251"
},
":aerial_tramway:": {
"category": "travel",
"name": "aerial tramway",
"unicode": "1f6a1"
},
":airplane:": {
"category": "travel",
"name": "airplane",
"unicode": "2708",
"unicode_alt": "2708-fe0f"
},
":airplane_arriving:": {
"category": "travel",
"name": "airplane arriving",
"unicode": "1f6ec"
},
":airplane_departure:": {
"category": "travel",
"name": "airplane departure",
"unicode": "1f6eb"
},
":airplane_small:": {
"category": "travel",
"name": "small airplane",
"unicode": "1f6e9",
"unicode_alt": "1f6e9-fe0f"
},
":alarm_clock:": {
"category": "objects",
"name": "alarm clock",
"unicode": "23f0"
},
":alembic:": {
"category": "objects",
"name": "alembic",
"unicode": "2697",
"unicode_alt": "2697-fe0f"
},
":alien:": {
"category": "people",
"name": "extraterrestrial alien",
"unicode": "1f47d"
},
":ambulance:": {
"category": "travel",
"name": "ambulance",
"unicode": "1f691"
},
":amphora:": {
"category": "objects",
"name": "amphora",
"unicode": "1f3fa"
},
":anchor:": {
"category": "travel",
"name": "anchor",
"unicode": "2693",
"unicode_alt": "2693-fe0f"
},
":angel:": {
"category": "people",
"name": "baby angel",
"unicode": "1f47c"
},
":angel_tone1:": {
"category": "people",
"name": "baby angel tone 1",
"unicode": "1f47c-1f3fb"
},
":angel_tone2:": {
"category": "people",
"name": "baby angel tone 2",
"unicode": "1f47c-1f3fc"
},
":angel_tone3:": {
"category": "people",
"name": "baby angel tone 3",
"unicode": "1f47c-1f3fd"
},
":angel_tone4:": {
"category": "people",
"name": "baby angel tone 4",
"unicode": "1f47c-1f3fe"
},
":angel_tone5:": {
"category": "people",
"name": "baby angel tone 5",
"unicode": "1f47c-1f3ff"
},
":anger:": {
"category": "symbols",
"name": "anger symbol",
"unicode": "1f4a2"
},
":anger_right:": {
"category": "symbols",
"name": "right anger bubble",
"unicode": "1f5ef",
"unicode_alt": "1f5ef-fe0f"
},
":angry:": {
"category": "people",
"name": "angry face",
"unicode": "1f620"
},
":anguished:": {
"category": "people",
"name": "anguished face",
"unicode": "1f627"
},
":ant:": {
"category": "nature",
"name": "ant",
"unicode": "1f41c"
},
":apple:": {
"category": "food",
"name": "red apple",
"unicode": "1f34e"
},
":aquarius:": {
"category": "symbols",
"name": "aquarius",
"unicode": "2652",
"unicode_alt": "2652-fe0f"
},
":aries:": {
"category": "symbols",
"name": "aries",
"unicode": "2648",
"unicode_alt": "2648-fe0f"
},
":arrow_backward:": {
"category": "symbols",
"name": "black left-pointing triangle",
"unicode": "25c0",
"unicode_alt": "25c0-fe0f"
},
":arrow_double_down:": {
"category": "symbols",
"name": "black down-pointing double triangle",
"unicode": "23ec"
},
":arrow_double_up:": {
"category": "symbols",
"name": "black up-pointing double triangle",
"unicode": "23eb"
},
":arrow_down:": {
"category": "symbols",
"name": "downwards black arrow",
"unicode": "2b07",
"unicode_alt": "2b07-fe0f"
},
":arrow_down_small:": {
"category": "symbols",
"name": "down-pointing small red triangle",
"unicode": "1f53d"
},
":arrow_forward:": {
"category": "symbols",
"name": "black right-pointing triangle",
"unicode": "25b6",
"unicode_alt": "25b6-fe0f"
},
":arrow_heading_down:": {
"category": "symbols",
"name": "arrow pointing rightwards then curving downwards",
"unicode": "2935",
"unicode_alt": "2935-fe0f"
},
":arrow_heading_up:": {
"category": "symbols",
"name": "arrow pointing rightwards then curving upwards",
"unicode": "2934",
"unicode_alt": "2934-fe0f"
},
":arrow_left:": {
"category": "symbols",
"name": "leftwards black arrow",
"unicode": "2b05",
"unicode_alt": "2b05-fe0f"
},
":arrow_lower_left:": {
"category": "symbols",
"name": "south west arrow",
"unicode": "2199",
"unicode_alt": "2199-fe0f"
},
":arrow_lower_right:": {
"category": "symbols",
"name": "south east arrow",
"unicode": "2198",
"unicode_alt": "2198-fe0f"
},
":arrow_right:": {
"category": "symbols",
"name": "black rightwards arrow",
"unicode": "27a1",
"unicode_alt": "27a1-fe0f"
},
":arrow_right_hook:": {
"category": "symbols",
"name": "rightwards arrow with hook",
"unicode": "21aa",
"unicode_alt": "21aa-fe0f"
},
":arrow_up:": {
"category": "symbols",
"name": "upwards black arrow",
"unicode": "2b06",
"unicode_alt": "2b06-fe0f"
},
":arrow_up_down:": {
"category": "symbols",
"name": "up down arrow",
"unicode": "2195",
"unicode_alt": "2195-fe0f"
},
":arrow_up_small:": {
"category": "symbols",
"name": "up-pointing small red triangle",
"unicode": "1f53c"
},
":arrow_upper_left:": {
"category": "symbols",
"name": "north west arrow",
"unicode": "2196",
"unicode_alt": "2196-fe0f"
},
":arrow_upper_right:": {
"category": "symbols",
"name": "north east arrow",
"unicode": "2197",
"unicode_alt": "2197-fe0f"
},
":arrows_clockwise:": {
"category": "symbols",
"name": "clockwise downwards and upwards open circle arrows",
"unicode": "1f503"
},
":arrows_counterclockwise:": {
"category": "symbols",
"name": "anticlockwise downwards and upwards open circle arrows",
"unicode": "1f504"
},
":art:": {
"category": "activity",
"name": "artist palette",
"unicode": "1f3a8"
},
":articulated_lorry:": {
"category": "travel",
"name": "articulated lorry",
"unicode": "1f69b"
},
":asterisk:": {
"category": "symbols",
"name": "keycap asterisk",
"unicode": "002a-20e3",
"unicode_alt": "002a-fe0f-20e3"
},
":astonished:": {
"category": "people",
"name": "astonished face",
"unicode": "1f632"
},
":athletic_shoe:": {
"category": "people",
"name": "athletic shoe",
"unicode": "1f45f"
},
":atm:": {
"category": "symbols",
"name": "automated teller machine",
"unicode": "1f3e7"
},
":atom:": {
"category": "symbols",
"name": "atom symbol",
"unicode": "269b",
"unicode_alt": "269b-fe0f"
},
":avocado:": {
"category": "food",
"name": "avocado",
"unicode": "1f951"
},
":b:": {
"category": "symbols",
"name": "negative squared latin capital letter b",
"unicode": "1f171"
},
":baby:": {
"category": "people",
"name": "baby",
"unicode": "1f476"
},
":baby_bottle:": {
"category": "food",
"name": "baby bottle",
"unicode": "1f37c"
},
":baby_chick:": {
"category": "nature",
"name": "baby chick",
"unicode": "1f424"
},
":baby_symbol:": {
"category": "symbols",
"name": "baby symbol",
"unicode": "1f6bc"
},
":baby_tone1:": {
"category": "people",
"name": "baby tone 1",
"unicode": "1f476-1f3fb"
},
":baby_tone2:": {
"category": "people",
"name": "baby tone 2",
"unicode": "1f476-1f3fc"
},
":baby_tone3:": {
"category": "people",
"name": "baby tone 3",
"unicode": "1f476-1f3fd"
},
":baby_tone4:": {
"category": "people",
"name": "baby tone 4",
"unicode": "1f476-1f3fe"
},
":baby_tone5:": {
"category": "people",
"name": "baby tone 5",
"unicode": "1f476-1f3ff"
},
":back:": {
"category": "symbols",
"name": "back with leftwards arrow above",
"unicode": "1f519"
},
":bacon:": {
"category": "food",
"name": "bacon",
"unicode": "1f953"
},
":badminton:": {
"category": "activity",
"name": "badminton racquet",
"unicode": "1f3f8"
},
":baggage_claim:": {
"category": "symbols",
"name": "baggage claim",
"unicode": "1f6c4"
},
":balloon:": {
"category": "objects",
"name": "balloon",
"unicode": "1f388"
},
":ballot_box:": {
"category": "objects",
"name": "ballot box with ballot",
"unicode": "1f5f3",
"unicode_alt": "1f5f3-fe0f"
},
":ballot_box_with_check:": {
"category": "symbols",
"name": "ballot box with check",
"unicode": "2611",
"unicode_alt": "2611-fe0f"
},
":bamboo:": {
"category": "nature",
"name": "pine decoration",
"unicode": "1f38d"
},
":banana:": {
"category": "food",
"name": "banana",
"unicode": "1f34c"
},
":bangbang:": {
"category": "symbols",
"name": "double exclamation mark",
"unicode": "203c",
"unicode_alt": "203c-fe0f"
},
":bank:": {
"category": "travel",
"name": "bank",
"unicode": "1f3e6"
},
":bar_chart:": {
"category": "objects",
"name": "bar chart",
"unicode": "1f4ca"
},
":barber:": {
"category": "objects",
"name": "barber pole",
"unicode": "1f488"
},
":baseball:": {
"category": "activity",
"name": "baseball",
"unicode": "26be",
"unicode_alt": "26be-fe0f"
},
":basketball:": {
"category": "activity",
"name": "basketball and hoop",
"unicode": "1f3c0"
},
":basketball_player:": {
"category": "activity",
"name": "person with ball",
"unicode": "26f9",
"unicode_alt": "26f9-fe0f"
},
":basketball_player_tone1:": {
"category": "activity",
"name": "person with ball tone 1",
"unicode": "26f9-1f3fb"
},
":basketball_player_tone2:": {
"category": "activity",
"name": "person with ball tone 2",
"unicode": "26f9-1f3fc"
},
":basketball_player_tone3:": {
"category": "activity",
"name": "person with ball tone 3",
"unicode": "26f9-1f3fd"
},
":basketball_player_tone4:": {
"category": "activity",
"name": "person with ball tone 4",
"unicode": "26f9-1f3fe"
},
":basketball_player_tone5:": {
"category": "activity",
"name": "person with ball tone 5",
"unicode": "26f9-1f3ff"
},
":bat:": {
"category": "nature",
"name": "bat",
"unicode": "1f987"
},
":bath:": {
"category": "activity",
"name": "bath",
"unicode": "1f6c0"
},
":bath_tone1:": {
"category": "activity",
"name": "bath tone 1",
"unicode": "1f6c0-1f3fb"
},
":bath_tone2:": {
"category": "activity",
"name": "bath tone 2",
"unicode": "1f6c0-1f3fc"
},
":bath_tone3:": {
"category": "activity",
"name": "bath tone 3",
"unicode": "1f6c0-1f3fd"
},
":bath_tone4:": {
"category": "activity",
"name": "bath tone 4",
"unicode": "1f6c0-1f3fe"
},
":bath_tone5:": {
"category": "activity",
"name": "bath tone 5",
"unicode": "1f6c0-1f3ff"
},
":bathtub:": {
"category": "objects",
"name": "bathtub",
"unicode": "1f6c1"
},
":battery:": {
"category": "objects",
"name": "battery",
"unicode": "1f50b"
},
":beach:": {
"category": "travel",
"name": "beach with umbrella",
"unicode": "1f3d6",
"unicode_alt": "1f3d6-fe0f"
},
":beach_umbrella:": {
"category": "objects",
"name": "umbrella on ground",
"unicode": "26f1",
"unicode_alt": "26f1-fe0f"
},
":bear:": {
"category": "nature",
"name": "bear face",
"unicode": "1f43b"
},
":bed:": {
"category": "objects",
"name": "bed",
"unicode": "1f6cf",
"unicode_alt": "1f6cf-fe0f"
},
":bee:": {
"category": "nature",
"name": "honeybee",
"unicode": "1f41d"
},
":beer:": {
"category": "food",
"name": "beer mug",
"unicode": "1f37a"
},
":beers:": {
"category": "food",
"name": "clinking beer mugs",
"unicode": "1f37b"
},
":beetle:": {
"category": "nature",
"name": "lady beetle",
"unicode": "1f41e"
},
":beginner:": {
"category": "symbols",
"name": "japanese symbol for beginner",
"unicode": "1f530"
},
":bell:": {
"category": "symbols",
"name": "bell",
"unicode": "1f514"
},
":bellhop:": {
"category": "objects",
"name": "bellhop bell",
"unicode": "1f6ce",
"unicode_alt": "1f6ce-fe0f"
},
":bento:": {
"category": "food",
"name": "bento box",
"unicode": "1f371"
},
":bicyclist:": {
"category": "activity",
"name": "bicyclist",
"unicode": "1f6b4"
},
":bicyclist_tone1:": {
"category": "activity",
"name": "bicyclist tone 1",
"unicode": "1f6b4-1f3fb"
},
":bicyclist_tone2:": {
"category": "activity",
"name": "bicyclist tone 2",
"unicode": "1f6b4-1f3fc"
},
":bicyclist_tone3:": {
"category": "activity",
"name": "bicyclist tone 3",
"unicode": "1f6b4-1f3fd"
},
":bicyclist_tone4:": {
"category": "activity",
"name": "bicyclist tone 4",
"unicode": "1f6b4-1f3fe"
},
":bicyclist_tone5:": {
"category": "activity",
"name": "bicyclist tone 5",
"unicode": "1f6b4-1f3ff"
},
":bike:": {
"category": "travel",
"name": "bicycle",
"unicode": "1f6b2"
},
":bikini:": {
"category": "people",
"name": "bikini",
"unicode": "1f459"
},
":biohazard:": {
"category": "symbols",
"name": "biohazard sign",
"unicode": "2623",
"unicode_alt": "2623-fe0f"
},
":bird:": {
"category": "nature",
"name": "bird",
"unicode": "1f426"
},
":birthday:": {
"category": "food",
"name": "birthday cake",
"unicode": "1f382"
},
":black_circle:": {
"category": "symbols",
"name": "black circle",
"unicode": "26ab",
"unicode_alt": "26ab-fe0f"
},
":black_heart:": {
"category": "symbols",
"name": "black heart",
"unicode": "1f5a4"
},
":black_joker:": {
"category": "symbols",
"name": "playing card black joker",
"unicode": "1f0cf"
},
":black_large_square:": {
"category": "symbols",
"name": "black large square",
"unicode": "2b1b",
"unicode_alt": "2b1b-fe0f"
},
":black_medium_small_square:": {
"category": "symbols",
"name": "black medium small square",
"unicode": "25fe",
"unicode_alt": "25fe-fe0f"
},
":black_medium_square:": {
"category": "symbols",
"name": "black medium square",
"unicode": "25fc",
"unicode_alt": "25fc-fe0f"
},
":black_nib:": {
"category": "objects",
"name": "black nib",
"unicode": "2712",
"unicode_alt": "2712-fe0f"
},
":black_small_square:": {
"category": "symbols",
"name": "black small square",
"unicode": "25aa",
"unicode_alt": "25aa-fe0f"
},
":black_square_button:": {
"category": "symbols",
"name": "black square button",
"unicode": "1f532"
},
":blossom:": {
"category": "nature",
"name": "blossom",
"unicode": "1f33c"
},
":blowfish:": {
"category": "nature",
"name": "blowfish",
"unicode": "1f421"
},
":blue_book:": {
"category": "objects",
"name": "blue book",
"unicode": "1f4d8"
},
":blue_car:": {
"category": "travel",
"name": "recreational vehicle",
"unicode": "1f699"
},
":blue_circle:": {
"category": "symbols",
"name": "blue circle",
"unicode": "1f535"
},
":blue_heart:": {
"category": "symbols",
"name": "blue heart",
"unicode": "1f499"
},
":blush:": {
"category": "people",
"name": "smiling face with smiling eyes",
"unicode": "1f60a"
},
":boar:": {
"category": "nature",
"name": "boar",
"unicode": "1f417"
},
":bomb:": {
"category": "objects",
"name": "bomb",
"unicode": "1f4a3"
},
":book:": {
"category": "objects",
"name": "open book",
"unicode": "1f4d6"
},
":bookmark:": {
"category": "objects",
"name": "bookmark",
"unicode": "1f516"
},
":bookmark_tabs:": {
"category": "objects",
"name": "bookmark tabs",
"unicode": "1f4d1"
},
":books:": {
"category": "objects",
"name": "books",
"unicode": "1f4da"
},
":boom:": {
"category": "symbols",
"name": "collision symbol",
"unicode": "1f4a5"
},
":boot:": {
"category": "people",
"name": "womans boots",
"unicode": "1f462"
},
":bouquet:": {
"category": "nature",
"name": "bouquet",
"unicode": "1f490"
},
":bow:": {
"category": "people",
"name": "person bowing deeply",
"unicode": "1f647"
},
":bow_and_arrow:": {
"category": "activity",
"name": "bow and arrow",
"unicode": "1f3f9"
},
":bow_tone1:": {
"category": "people",
"name": "person bowing deeply tone 1",
"unicode": "1f647-1f3fb"
},
":bow_tone2:": {
"category": "people",
"name": "person bowing deeply tone 2",
"unicode": "1f647-1f3fc"
},
":bow_tone3:": {
"category": "people",
"name": "person bowing deeply tone 3",
"unicode": "1f647-1f3fd"
},
":bow_tone4:": {
"category": "people",
"name": "person bowing deeply tone 4",
"unicode": "1f647-1f3fe"
},
":bow_tone5:": {
"category": "people",
"name": "person bowing deeply tone 5",
"unicode": "1f647-1f3ff"
},
":bowling:": {
"category": "activity",
"name": "bowling",
"unicode": "1f3b3"
},
":boxing_glove:": {
"category": "activity",
"name": "boxing glove",
"unicode": "1f94a"
},
":boy:": {
"category": "people",
"name": "boy",
"unicode": "1f466"
},
":boy_tone1:": {
"category": "people",
"name": "boy tone 1",
"unicode": "1f466-1f3fb"
},
":boy_tone2:": {
"category": "people",
"name": "boy tone 2",
"unicode": "1f466-1f3fc"
},
":boy_tone3:": {
"category": "people",
"name": "boy tone 3",
"unicode": "1f466-1f3fd"
},
":boy_tone4:": {
"category": "people",
"name": "boy tone 4",
"unicode": "1f466-1f3fe"
},
":boy_tone5:": {
"category": "people",
"name": "boy tone 5",
"unicode": "1f466-1f3ff"
},
":bread:": {
"category": "food",
"name": "bread",
"unicode": "1f35e"
},
":bride_with_veil:": {
"category": "people",
"name": "bride with veil",
"unicode": "1f470"
},
":bride_with_veil_tone1:": {
"category": "people",
"name": "bride with veil tone 1",
"unicode": "1f470-1f3fb"
},
":bride_with_veil_tone2:": {
"category": "people",
"name": "bride with veil tone 2",
"unicode": "1f470-1f3fc"
},
":bride_with_veil_tone3:": {
"category": "people",
"name": "bride with veil tone 3",
"unicode": "1f470-1f3fd"
},
":bride_with_veil_tone4:": {
"category": "people",
"name": "bride with veil tone 4",
"unicode": "1f470-1f3fe"
},
":bride_with_veil_tone5:": {
"category": "people",
"name": "bride with veil tone 5",
"unicode": "1f470-1f3ff"
},
":bridge_at_night:": {
"category": "travel",
"name": "bridge at night",
"unicode": "1f309"
},
":briefcase:": {
"category": "people",
"name": "briefcase",
"unicode": "1f4bc"
},
":broken_heart:": {
"category": "symbols",
"name": "broken heart",
"unicode": "1f494"
},
":bug:": {
"category": "nature",
"name": "bug",
"unicode": "1f41b"
},
":bulb:": {
"category": "objects",
"name": "electric light bulb",
"unicode": "1f4a1"
},
":bullettrain_front:": {
"category": "travel",
"name": "high-speed train with bullet nose",
"unicode": "1f685"
},
":bullettrain_side:": {
"category": "travel",
"name": "high-speed train",
"unicode": "1f684"
},
":burrito:": {
"category": "food",
"name": "burrito",
"unicode": "1f32f"
},
":bus:": {
"category": "travel",
"name": "bus",
"unicode": "1f68c"
},
":busstop:": {
"category": "travel",
"name": "bus stop",
"unicode": "1f68f"
},
":bust_in_silhouette:": {
"category": "people",
"name": "bust in silhouette",
"unicode": "1f464"
},
":busts_in_silhouette:": {
"category": "people",
"name": "busts in silhouette",
"unicode": "1f465"
},
":butterfly:": {
"category": "nature",
"name": "butterfly",
"unicode": "1f98b"
},
":cactus:": {
"category": "nature",
"name": "cactus",
"unicode": "1f335"
},
":cake:": {
"category": "food",
"name": "shortcake",
"unicode": "1f370"
},
":calendar:": {
"category": "objects",
"name": "tear-off calendar",
"unicode": "1f4c6"
},
":calendar_spiral:": {
"category": "objects",
"name": "spiral calendar pad",
"unicode": "1f5d3",
"unicode_alt": "1f5d3-fe0f"
},
":call_me:": {
"category": "people",
"name": "call me hand",
"unicode": "1f919"
},
":call_me_tone1:": {
"category": "people",
"name": "call me hand tone 1",
"unicode": "1f919-1f3fb"
},
":call_me_tone2:": {
"category": "people",
"name": "call me hand tone 2",
"unicode": "1f919-1f3fc"
},
":call_me_tone3:": {
"category": "people",
"name": "call me hand tone 3",
"unicode": "1f919-1f3fd"
},
":call_me_tone4:": {
"category": "people",
"name": "call me hand tone 4",
"unicode": "1f919-1f3fe"
},
":call_me_tone5:": {
"category": "people",
"name": "call me hand tone 5",
"unicode": "1f919-1f3ff"
},
":calling:": {
"category": "objects",
"name": "mobile phone with rightwards arrow at left",
"unicode": "1f4f2"
},
":camel:": {
"category": "nature",
"name": "bactrian camel",
"unicode": "1f42b"
},
":camera:": {
"category": "objects",
"name": "camera",
"unicode": "1f4f7"
},
":camera_with_flash:": {
"category": "objects",
"name": "camera with flash",
"unicode": "1f4f8"
},
":camping:": {
"category": "travel",
"name": "camping",
"unicode": "1f3d5",
"unicode_alt": "1f3d5-fe0f"
},
":cancer:": {
"category": "symbols",
"name": "cancer",
"unicode": "264b",
"unicode_alt": "264b-fe0f"
},
":candle:": {
"category": "objects",
"name": "candle",
"unicode": "1f56f",
"unicode_alt": "1f56f-fe0f"
},
":candy:": {
"category": "food",
"name": "candy",
"unicode": "1f36c"
},
":canoe:": {
"category": "travel",
"name": "canoe",
"unicode": "1f6f6"
},
":capital_abcd:": {
"category": "symbols",
"name": "input symbol for latin capital letters",
"unicode": "1f520"
},
":capricorn:": {
"category": "symbols",
"name": "capricorn",
"unicode": "2651",
"unicode_alt": "2651-fe0f"
},
":card_box:": {
"category": "objects",
"name": "card file box",
"unicode": "1f5c3",
"unicode_alt": "1f5c3-fe0f"
},
":card_index:": {
"category": "objects",
"name": "card index",
"unicode": "1f4c7"
},
":carousel_horse:": {
"category": "travel",
"name": "carousel horse",
"unicode": "1f3a0"
},
":carrot:": {
"category": "food",
"name": "carrot",
"unicode": "1f955"
},
":cartwheel:": {
"category": "activity",
"name": "person doing cartwheel",
"unicode": "1f938"
},
":cartwheel_tone1:": {
"category": "activity",
"name": "person doing cartwheel tone 1",
"unicode": "1f938-1f3fb"
},
":cartwheel_tone2:": {
"category": "activity",
"name": "person doing cartwheel tone 2",
"unicode": "1f938-1f3fc"
},
":cartwheel_tone3:": {
"category": "activity",
"name": "person doing cartwheel tone 3",
"unicode": "1f938-1f3fd"
},
":cartwheel_tone4:": {
"category": "activity",
"name": "person doing cartwheel tone 4",
"unicode": "1f938-1f3fe"
},
":cartwheel_tone5:": {
"category": "activity",
"name": "person doing cartwheel tone 5",
"unicode": "1f938-1f3ff"
},
":cat2:": {
"category": "nature",
"name": "cat",
"unicode": "1f408"
},
":cat:": {
"category": "nature",
"name": "cat face",
"unicode": "1f431"
},
":cd:": {
"category": "objects",
"name": "optical disc",
"unicode": "1f4bf"
},
":chains:": {
"category": "objects",
"name": "chains",
"unicode": "26d3",
"unicode_alt": "26d3-fe0f"
},
":champagne:": {
"category": "food",
"name": "bottle with popping cork",
"unicode": "1f37e"
},
":champagne_glass:": {
"category": "food",
"name": "clinking glasses",
"unicode": "1f942"
},
":chart:": {
"category": "symbols",
"name": "chart with upwards trend and yen sign",
"unicode": "1f4b9"
},
":chart_with_downwards_trend:": {
"category": "objects",
"name": "chart with downwards trend",
"unicode": "1f4c9"
},
":chart_with_upwards_trend:": {
"category": "objects",
"name": "chart with upwards trend",
"unicode": "1f4c8"
},
":checkered_flag:": {
"category": "travel",
"name": "chequered flag",
"unicode": "1f3c1"
},
":cheese:": {
"category": "food",
"name": "cheese wedge",
"unicode": "1f9c0"
},
":cherries:": {
"category": "food",
"name": "cherries",
"unicode": "1f352"
},
":cherry_blossom:": {
"category": "nature",
"name": "cherry blossom",
"unicode": "1f338"
},
":chestnut:": {
"category": "nature",
"name": "chestnut",
"unicode": "1f330"
},
":chicken:": {
"category": "nature",
"name": "chicken",
"unicode": "1f414"
},
":children_crossing:": {
"category": "symbols",
"name": "children crossing",
"unicode": "1f6b8"
},
":chipmunk:": {
"category": "nature",
"name": "chipmunk",
"unicode": "1f43f",
"unicode_alt": "1f43f-fe0f"
},
":chocolate_bar:": {
"category": "food",
"name": "chocolate bar",
"unicode": "1f36b"
},
":christmas_tree:": {
"category": "nature",
"name": "christmas tree",
"unicode": "1f384"
},
":church:": {
"category": "travel",
"name": "church",
"unicode": "26ea",
"unicode_alt": "26ea-fe0f"
},
":cinema:": {
"category": "symbols",
"name": "cinema",
"unicode": "1f3a6"
},
":circus_tent:": {
"category": "activity",
"name": "circus tent",
"unicode": "1f3aa"
},
":city_dusk:": {
"category": "travel",
"name": "cityscape at dusk",
"unicode": "1f306"
},
":city_sunset:": {
"category": "travel",
"name": "sunset over buildings",
"unicode": "1f307"
},
":cityscape:": {
"category": "travel",
"name": "cityscape",
"unicode": "1f3d9",
"unicode_alt": "1f3d9-fe0f"
},
":cl:": {
"category": "symbols",
"name": "squared cl",
"unicode": "1f191"
},
":clap:": {
"category": "people",
"name": "clapping hands sign",
"unicode": "1f44f"
},
":clap_tone1:": {
"category": "people",
"name": "clapping hands sign tone 1",
"unicode": "1f44f-1f3fb"
},
":clap_tone2:": {
"category": "people",
"name": "clapping hands sign tone 2",
"unicode": "1f44f-1f3fc"
},
":clap_tone3:": {
"category": "people",
"name": "clapping hands sign tone 3",
"unicode": "1f44f-1f3fd"
},
":clap_tone4:": {
"category": "people",
"name": "clapping hands sign tone 4",
"unicode": "1f44f-1f3fe"
},
":clap_tone5:": {
"category": "people",
"name": "clapping hands sign tone 5",
"unicode": "1f44f-1f3ff"
},
":clapper:": {
"category": "activity",
"name": "clapper board",
"unicode": "1f3ac"
},
":classical_building:": {
"category": "travel",
"name": "classical building",
"unicode": "1f3db",
"unicode_alt": "1f3db-fe0f"
},
":clipboard:": {
"category": "objects",
"name": "clipboard",
"unicode": "1f4cb"
},
":clock1030:": {
"category": "symbols",
"name": "clock face ten-thirty",
"unicode": "1f565"
},
":clock10:": {
"category": "symbols",
"name": "clock face ten oclock",
"unicode": "1f559"
},
":clock1130:": {
"category": "symbols",
"name": "clock face eleven-thirty",
"unicode": "1f566"
},
":clock11:": {
"category": "symbols",
"name": "clock face eleven oclock",
"unicode": "1f55a"
},
":clock1230:": {
"category": "symbols",
"name": "clock face twelve-thirty",
"unicode": "1f567"
},
":clock12:": {
"category": "symbols",
"name": "clock face twelve oclock",
"unicode": "1f55b"
},
":clock130:": {
"category": "symbols",
"name": "clock face one-thirty",
"unicode": "1f55c"
},
":clock1:": {
"category": "symbols",
"name": "clock face one oclock",
"unicode": "1f550"
},
":clock230:": {
"category": "symbols",
"name": "clock face two-thirty",
"unicode": "1f55d"
},
":clock2:": {
"category": "symbols",
"name": "clock face two oclock",
"unicode": "1f551"
},
":clock330:": {
"category": "symbols",
"name": "clock face three-thirty",
"unicode": "1f55e"
},
":clock3:": {
"category": "symbols",
"name": "clock face three oclock",
"unicode": "1f552"
},
":clock430:": {
"category": "symbols",
"name": "clock face four-thirty",
"unicode": "1f55f"
},
":clock4:": {
"category": "symbols",
"name": "clock face four oclock",
"unicode": "1f553"
},
":clock530:": {
"category": "symbols",
"name": "clock face five-thirty",
"unicode": "1f560"
},
":clock5:": {
"category": "symbols",
"name": "clock face five oclock",
"unicode": "1f554"
},
":clock630:": {
"category": "symbols",
"name": "clock face six-thirty",
"unicode": "1f561"
},
":clock6:": {
"category": "symbols",
"name": "clock face six oclock",
"unicode": "1f555"
},
":clock730:": {
"category": "symbols",
"name": "clock face seven-thirty",
"unicode": "1f562"
},
":clock7:": {
"category": "symbols",
"name": "clock face seven oclock",
"unicode": "1f556"
},
":clock830:": {
"category": "symbols",
"name": "clock face eight-thirty",
"unicode": "1f563"
},
":clock8:": {
"category": "symbols",
"name": "clock face eight oclock",
"unicode": "1f557"
},
":clock930:": {
"category": "symbols",
"name": "clock face nine-thirty",
"unicode": "1f564"
},
":clock9:": {
"category": "symbols",
"name": "clock face nine oclock",
"unicode": "1f558"
},
":clock:": {
"category": "objects",
"name": "mantlepiece clock",
"unicode": "1f570",
"unicode_alt": "1f570-fe0f"
},
":closed_book:": {
"category": "objects",
"name": "closed book",
"unicode": "1f4d5"
},
":closed_lock_with_key:": {
"category": "objects",
"name": "closed lock with key",
"unicode": "1f510"
},
":closed_umbrella:": {
"category": "people",
"name": "closed umbrella",
"unicode": "1f302"
},
":cloud:": {
"category": "nature",
"name": "cloud",
"unicode": "2601",
"unicode_alt": "2601-fe0f"
},
":cloud_lightning:": {
"category": "nature",
"name": "cloud with lightning",
"unicode": "1f329",
"unicode_alt": "1f329-fe0f"
},
":cloud_rain:": {
"category": "nature",
"name": "cloud with rain",
"unicode": "1f327",
"unicode_alt": "1f327-fe0f"
},
":cloud_snow:": {
"category": "nature",
"name": "cloud with snow",
"unicode": "1f328",
"unicode_alt": "1f328-fe0f"
},
":cloud_tornado:": {
"category": "nature",
"name": "cloud with tornado",
"unicode": "1f32a",
"unicode_alt": "1f32a-fe0f"
},
":clown:": {
"category": "people",
"name": "clown face",
"unicode": "1f921"
},
":clubs:": {
"category": "symbols",
"name": "black club suit",
"unicode": "2663",
"unicode_alt": "2663-fe0f"
},
":cocktail:": {
"category": "food",
"name": "cocktail glass",
"unicode": "1f378"
},
":coffee:": {
"category": "food",
"name": "hot beverage",
"unicode": "2615",
"unicode_alt": "2615-fe0f"
},
":coffin:": {
"category": "objects",
"name": "coffin",
"unicode": "26b0",
"unicode_alt": "26b0-fe0f"
},
":cold_sweat:": {
"category": "people",
"name": "face with open mouth and cold sweat",
"unicode": "1f630"
},
":comet:": {
"category": "nature",
"name": "comet",
"unicode": "2604",
"unicode_alt": "2604-fe0f"
},
":compression:": {
"category": "objects",
"name": "compression",
"unicode": "1f5dc",
"unicode_alt": "1f5dc-fe0f"
},
":computer:": {
"category": "objects",
"name": "personal computer",
"unicode": "1f4bb"
},
":confetti_ball:": {
"category": "objects",
"name": "confetti ball",
"unicode": "1f38a"
},
":confounded:": {
"category": "people",
"name": "confounded face",
"unicode": "1f616"
},
":confused:": {
"category": "people",
"name": "confused face",
"unicode": "1f615"
},
":congratulations:": {
"category": "symbols",
"name": "circled ideograph congratulation",
"unicode": "3297",
"unicode_alt": "3297-fe0f"
},
":construction:": {
"category": "travel",
"name": "construction sign",
"unicode": "1f6a7"
},
":construction_site:": {
"category": "travel",
"name": "building construction",
"unicode": "1f3d7",
"unicode_alt": "1f3d7-fe0f"
},
":construction_worker:": {
"category": "people",
"name": "construction worker",
"unicode": "1f477"
},
":construction_worker_tone1:": {
"category": "people",
"name": "construction worker tone 1",
"unicode": "1f477-1f3fb"
},
":construction_worker_tone2:": {
"category": "people",
"name": "construction worker tone 2",
"unicode": "1f477-1f3fc"
},
":construction_worker_tone3:": {
"category": "people",
"name": "construction worker tone 3",
"unicode": "1f477-1f3fd"
},
":construction_worker_tone4:": {
"category": "people",
"name": "construction worker tone 4",
"unicode": "1f477-1f3fe"
},
":construction_worker_tone5:": {
"category": "people",
"name": "construction worker tone 5",
"unicode": "1f477-1f3ff"
},
":control_knobs:": {
"category": "objects",
"name": "control knobs",
"unicode": "1f39b",
"unicode_alt": "1f39b-fe0f"
},
":convenience_store:": {
"category": "travel",
"name": "convenience store",
"unicode": "1f3ea"
},
":cookie:": {
"category": "food",
"name": "cookie",
"unicode": "1f36a"
},
":cooking:": {
"category": "food",
"name": "cooking",
"unicode": "1f373"
},
":cool:": {
"category": "symbols",
"name": "squared cool",
"unicode": "1f192"
},
":cop:": {
"category": "people",
"name": "police officer",
"unicode": "1f46e"
},
":cop_tone1:": {
"category": "people",
"name": "police officer tone 1",
"unicode": "1f46e-1f3fb"
},
":cop_tone2:": {
"category": "people",
"name": "police officer tone 2",
"unicode": "1f46e-1f3fc"
},
":cop_tone3:": {
"category": "people",
"name": "police officer tone 3",
"unicode": "1f46e-1f3fd"
},
":cop_tone4:": {
"category": "people",
"name": "police officer tone 4",
"unicode": "1f46e-1f3fe"
},
":cop_tone5:": {
"category": "people",
"name": "police officer tone 5",
"unicode": "1f46e-1f3ff"
},
":copyright:": {
"category": "symbols",
"name": "copyright sign",
"unicode": "00a9",
"unicode_alt": "00a9-fe0f"
},
":corn:": {
"category": "food",
"name": "ear of maize",
"unicode": "1f33d"
},
":couch:": {
"category": "objects",
"name": "couch and lamp",
"unicode": "1f6cb",
"unicode_alt": "1f6cb-fe0f"
},
":couple:": {
"category": "people",
"name": "man and woman holding hands",
"unicode": "1f46b"
},
":couple_mm:": {
"category": "people",
"name": "couple (man,man)",
"unicode": "1f468-2764-1f468",
"unicode_alt": "1f468-200d-2764-fe0f-200d-1f468"
},
":couple_with_heart:": {
"category": "people",
"name": "couple with heart",
"unicode": "1f491"
},
":couple_ww:": {
"category": "people",
"name": "couple (woman,woman)",
"unicode": "1f469-2764-1f469",
"unicode_alt": "1f469-200d-2764-fe0f-200d-1f469"
},
":couplekiss:": {
"category": "people",
"name": "kiss",
"unicode": "1f48f"
},
":cow2:": {
"category": "nature",
"name": "cow",
"unicode": "1f404"
},
":cow:": {
"category": "nature",
"name": "cow face",
"unicode": "1f42e"
},
":cowboy:": {
"category": "people",
"name": "face with cowboy hat",
"unicode": "1f920"
},
":crab:": {
"category": "nature",
"name": "crab",
"unicode": "1f980"
},
":crayon:": {
"category": "objects",
"name": "lower left crayon",
"unicode": "1f58d",
"unicode_alt": "1f58d-fe0f"
},
":credit_card:": {
"category": "objects",
"name": "credit card",
"unicode": "1f4b3"
},
":crescent_moon:": {
"category": "nature",
"name": "crescent moon",
"unicode": "1f319"
},
":cricket:": {
"category": "activity",
"name": "cricket bat and ball",
"unicode": "1f3cf"
},
":crocodile:": {
"category": "nature",
"name": "crocodile",
"unicode": "1f40a"
},
":croissant:": {
"category": "food",
"name": "croissant",
"unicode": "1f950"
},
":cross:": {
"category": "symbols",
"name": "latin cross",
"unicode": "271d",
"unicode_alt": "271d-fe0f"
},
":crossed_flags:": {
"category": "objects",
"name": "crossed flags",
"unicode": "1f38c"
},
":crossed_swords:": {
"category": "objects",
"name": "crossed swords",
"unicode": "2694",
"unicode_alt": "2694-fe0f"
},
":crown:": {
"category": "people",
"name": "crown",
"unicode": "1f451"
},
":cruise_ship:": {
"category": "travel",
"name": "passenger ship",
"unicode": "1f6f3",
"unicode_alt": "1f6f3-fe0f"
},
":cry:": {
"category": "people",
"name": "crying face",
"unicode": "1f622"
},
":crying_cat_face:": {
"category": "people",
"name": "crying cat face",
"unicode": "1f63f"
},
":crystal_ball:": {
"category": "objects",
"name": "crystal ball",
"unicode": "1f52e"
},
":cucumber:": {
"category": "food",
"name": "cucumber",
"unicode": "1f952"
},
":cupid:": {
"category": "symbols",
"name": "heart with arrow",
"unicode": "1f498"
},
":curly_loop:": {
"category": "symbols",
"name": "curly loop",
"unicode": "27b0"
},
":currency_exchange:": {
"category": "symbols",
"name": "currency exchange",
"unicode": "1f4b1"
},
":curry:": {
"category": "food",
"name": "curry and rice",
"unicode": "1f35b"
},
":custard:": {
"category": "food",
"name": "custard",
"unicode": "1f36e"
},
":customs:": {
"category": "symbols",
"name": "customs",
"unicode": "1f6c3"
},
":cyclone:": {
"category": "symbols",
"name": "cyclone",
"unicode": "1f300"
},
":dagger:": {
"category": "objects",
"name": "dagger knife",
"unicode": "1f5e1",
"unicode_alt": "1f5e1-fe0f"
},
":dancer:": {
"category": "people",
"name": "dancer",
"unicode": "1f483"
},
":dancer_tone1:": {
"category": "people",
"name": "dancer tone 1",
"unicode": "1f483-1f3fb"
},
":dancer_tone2:": {
"category": "people",
"name": "dancer tone 2",
"unicode": "1f483-1f3fc"
},
":dancer_tone3:": {
"category": "people",
"name": "dancer tone 3",
"unicode": "1f483-1f3fd"
},
":dancer_tone4:": {
"category": "people",
"name": "dancer tone 4",
"unicode": "1f483-1f3fe"
},
":dancer_tone5:": {
"category": "people",
"name": "dancer tone 5",
"unicode": "1f483-1f3ff"
},
":dancers:": {
"category": "people",
"name": "woman with bunny ears",
"unicode": "1f46f"
},
":dango:": {
"category": "food",
"name": "dango",
"unicode": "1f361"
},
":dark_sunglasses:": {
"category": "people",
"name": "dark sunglasses",
"unicode": "1f576",
"unicode_alt": "1f576-fe0f"
},
":dart:": {
"category": "activity",
"name": "direct hit",
"unicode": "1f3af"
},
":dash:": {
"category": "nature",
"name": "dash symbol",
"unicode": "1f4a8"
},
":date:": {
"category": "objects",
"name": "calendar",
"unicode": "1f4c5"
},
":deciduous_tree:": {
"category": "nature",
"name": "deciduous tree",
"unicode": "1f333"
},
":deer:": {
"category": "nature",
"name": "deer",
"unicode": "1f98c"
},
":department_store:": {
"category": "travel",
"name": "department store",
"unicode": "1f3ec"
},
":desert:": {
"category": "travel",
"name": "desert",
"unicode": "1f3dc",
"unicode_alt": "1f3dc-fe0f"
},
":desktop:": {
"category": "objects",
"name": "desktop computer",
"unicode": "1f5a5",
"unicode_alt": "1f5a5-fe0f"
},
":diamond_shape_with_a_dot_inside:": {
"category": "symbols",
"name": "diamond shape with a dot inside",
"unicode": "1f4a0"
},
":diamonds:": {
"category": "symbols",
"name": "black diamond suit",
"unicode": "2666",
"unicode_alt": "2666-fe0f"
},
":disappointed:": {
"category": "people",
"name": "disappointed face",
"unicode": "1f61e"
},
":disappointed_relieved:": {
"category": "people",
"name": "disappointed but relieved face",
"unicode": "1f625"
},
":dividers:": {
"category": "objects",
"name": "card index dividers",
"unicode": "1f5c2",
"unicode_alt": "1f5c2-fe0f"
},
":dizzy:": {
"category": "symbols",
"name": "dizzy symbol",
"unicode": "1f4ab"
},
":dizzy_face:": {
"category": "people",
"name": "dizzy face",
"unicode": "1f635"
},
":do_not_litter:": {
"category": "symbols",
"name": "do not litter symbol",
"unicode": "1f6af"
},
":dog2:": {
"category": "nature",
"name": "dog",
"unicode": "1f415"
},
":dog:": {
"category": "nature",
"name": "dog face",
"unicode": "1f436"
},
":dollar:": {
"category": "objects",
"name": "banknote with dollar sign",
"unicode": "1f4b5"
},
":dolls:": {
"category": "objects",
"name": "japanese dolls",
"unicode": "1f38e"
},
":dolphin:": {
"category": "nature",
"name": "dolphin",
"unicode": "1f42c"
},
":door:": {
"category": "objects",
"name": "door",
"unicode": "1f6aa"
},
":doughnut:": {
"category": "food",
"name": "doughnut",
"unicode": "1f369"
},
":dove:": {
"category": "nature",
"name": "dove of peace",
"unicode": "1f54a",
"unicode_alt": "1f54a-fe0f"
},
":dragon:": {
"category": "nature",
"name": "dragon",
"unicode": "1f409"
},
":dragon_face:": {
"category": "nature",
"name": "dragon face",
"unicode": "1f432"
},
":dress:": {
"category": "people",
"name": "dress",
"unicode": "1f457"
},
":dromedary_camel:": {
"category": "nature",
"name": "dromedary camel",
"unicode": "1f42a"
},
":drooling_face:": {
"category": "people",
"name": "drooling face",
"unicode": "1f924"
},
":droplet:": {
"category": "nature",
"name": "droplet",
"unicode": "1f4a7"
},
":drum:": {
"category": "activity",
"name": "drum with drumsticks",
"unicode": "1f941"
},
":duck:": {
"category": "nature",
"name": "duck",
"unicode": "1f986"
},
":dvd:": {
"category": "objects",
"name": "dvd",
"unicode": "1f4c0"
},
":e-mail:": {
"category": "objects",
"name": "e-mail symbol",
"unicode": "1f4e7"
},
":eagle:": {
"category": "nature",
"name": "eagle",
"unicode": "1f985"
},
":ear:": {
"category": "people",
"name": "ear",
"unicode": "1f442"
},
":ear_of_rice:": {
"category": "nature",
"name": "ear of rice",
"unicode": "1f33e"
},
":ear_tone1:": {
"category": "people",
"name": "ear tone 1",
"unicode": "1f442-1f3fb"
},
":ear_tone2:": {
"category": "people",
"name": "ear tone 2",
"unicode": "1f442-1f3fc"
},
":ear_tone3:": {
"category": "people",
"name": "ear tone 3",
"unicode": "1f442-1f3fd"
},
":ear_tone4:": {
"category": "people",
"name": "ear tone 4",
"unicode": "1f442-1f3fe"
},
":ear_tone5:": {
"category": "people",
"name": "ear tone 5",
"unicode": "1f442-1f3ff"
},
":earth_africa:": {
"category": "nature",
"name": "earth globe europe-africa",
"unicode": "1f30d"
},
":earth_americas:": {
"category": "nature",
"name": "earth globe americas",
"unicode": "1f30e"
},
":earth_asia:": {
"category": "nature",
"name": "earth globe asia-australia",
"unicode": "1f30f"
},
":egg:": {
"category": "food",
"name": "egg",
"unicode": "1f95a"
},
":eggplant:": {
"category": "food",
"name": "aubergine",
"unicode": "1f346"
},
":eight:": {
"category": "symbols",
"name": "keycap digit eight",
"unicode": "0038-20e3",
"unicode_alt": "0038-fe0f-20e3"
},
":eight_pointed_black_star:": {
"category": "symbols",
"name": "eight pointed black star",
"unicode": "2734",
"unicode_alt": "2734-fe0f"
},
":eight_spoked_asterisk:": {
"category": "symbols",
"name": "eight spoked asterisk",
"unicode": "2733",
"unicode_alt": "2733-fe0f"
},
":eject:": {
"category": "symbols",
"name": "eject symbol",
"unicode": "23cf",
"unicode_alt": "23cf-fe0f"
},
":electric_plug:": {
"category": "objects",
"name": "electric plug",
"unicode": "1f50c"
},
":elephant:": {
"category": "nature",
"name": "elephant",
"unicode": "1f418"
},
":end:": {
"category": "symbols",
"name": "end with leftwards arrow above",
"unicode": "1f51a"
},
":envelope:": {
"category": "objects",
"name": "envelope",
"unicode": "2709",
"unicode_alt": "2709-fe0f"
},
":envelope_with_arrow:": {
"category": "objects",
"name": "envelope with downwards arrow above",
"unicode": "1f4e9"
},
":euro:": {
"category": "objects",
"name": "banknote with euro sign",
"unicode": "1f4b6"
},
":european_castle:": {
"category": "travel",
"name": "european castle",
"unicode": "1f3f0"
},
":european_post_office:": {
"category": "travel",
"name": "european post office",
"unicode": "1f3e4"
},
":evergreen_tree:": {
"category": "nature",
"name": "evergreen tree",
"unicode": "1f332"
},
":exclamation:": {
"category": "symbols",
"name": "heavy exclamation mark symbol",
"unicode": "2757",
"unicode_alt": "2757-fe0f"
},
":expressionless:": {
"category": "people",
"name": "expressionless face",
"unicode": "1f611"
},
":eye:": {
"category": "people",
"name": "eye",
"unicode": "1f441",
"unicode_alt": "1f441-fe0f"
},
":eye_in_speech_bubble:": {
"category": "symbols",
"name": "eye in speech bubble",
"unicode": "1f441-1f5e8",
"unicode_alt": "1f441-200d-1f5e8"
},
":eyeglasses:": {
"category": "people",
"name": "eyeglasses",
"unicode": "1f453"
},
":eyes:": {
"category": "people",
"name": "eyes",
"unicode": "1f440"
},
":face_palm:": {
"category": "people",
"name": "face palm",
"unicode": "1f926"
},
":face_palm_tone1:": {
"category": "people",
"name": "face palm tone 1",
"unicode": "1f926-1f3fb"
},
":face_palm_tone2:": {
"category": "people",
"name": "face palm tone 2",
"unicode": "1f926-1f3fc"
},
":face_palm_tone3:": {
"category": "people",
"name": "face palm tone 3",
"unicode": "1f926-1f3fd"
},
":face_palm_tone4:": {
"category": "people",
"name": "face palm tone 4",
"unicode": "1f926-1f3fe"
},
":face_palm_tone5:": {
"category": "people",
"name": "face palm tone 5",
"unicode": "1f926-1f3ff"
},
":factory:": {
"category": "travel",
"name": "factory",
"unicode": "1f3ed"
},
":fallen_leaf:": {
"category": "nature",
"name": "fallen leaf",
"unicode": "1f342"
},
":family:": {
"category": "people",
"name": "family",
"unicode": "1f46a"
},
":family_mmb:": {
"category": "people",
"name": "family (man,man,boy)",
"unicode": "1f468-1f468-1f466",
"unicode_alt": "1f468-200d-1f468-200d-1f466"
},
":family_mmbb:": {
"category": "people",
"name": "family (man,man,boy,boy)",
"unicode": "1f468-1f468-1f466-1f466",
"unicode_alt": "1f468-200d-1f468-200d-1f466-200d-1f466"
},
":family_mmg:": {
"category": "people",
"name": "family (man,man,girl)",
"unicode": "1f468-1f468-1f467",
"unicode_alt": "1f468-200d-1f468-200d-1f467"
},
":family_mmgb:": {
"category": "people",
"name": "family (man,man,girl,boy)",
"unicode": "1f468-1f468-1f467-1f466",
"unicode_alt": "1f468-200d-1f468-200d-1f467-200d-1f466"
},
":family_mmgg:": {
"category": "people",
"name": "family (man,man,girl,girl)",
"unicode": "1f468-1f468-1f467-1f467",
"unicode_alt": "1f468-200d-1f468-200d-1f467-200d-1f467"
},
":family_mwbb:": {
"category": "people",
"name": "family (man,woman,boy,boy)",
"unicode": "1f468-1f469-1f466-1f466",
"unicode_alt": "1f468-200d-1f469-200d-1f466-200d-1f466"
},
":family_mwg:": {
"category": "people",
"name": "family (man,woman,girl)",
"unicode": "1f468-1f469-1f467",
"unicode_alt": "1f468-200d-1f469-200d-1f467"
},
":family_mwgb:": {
"category": "people",
"name": "family (man,woman,girl,boy)",
"unicode": "1f468-1f469-1f467-1f466",
"unicode_alt": "1f468-200d-1f469-200d-1f467-200d-1f466"
},
":family_mwgg:": {
"category": "people",
"name": "family (man,woman,girl,girl)",
"unicode": "1f468-1f469-1f467-1f467",
"unicode_alt": "1f468-200d-1f469-200d-1f467-200d-1f467"
},
":family_wwb:": {
"category": "people",
"name": "family (woman,woman,boy)",
"unicode": "1f469-1f469-1f466",
"unicode_alt": "1f469-200d-1f469-200d-1f466"
},
":family_wwbb:": {
"category": "people",
"name": "family (woman,woman,boy,boy)",
"unicode": "1f469-1f469-1f466-1f466",
"unicode_alt": "1f469-200d-1f469-200d-1f466-200d-1f466"
},
":family_wwg:": {
"category": "people",
"name": "family (woman,woman,girl)",
"unicode": "1f469-1f469-1f467",
"unicode_alt": "1f469-200d-1f469-200d-1f467"
},
":family_wwgb:": {
"category": "people",
"name": "family (woman,woman,girl,boy)",
"unicode": "1f469-1f469-1f467-1f466",
"unicode_alt": "1f469-200d-1f469-200d-1f467-200d-1f466"
},
":family_wwgg:": {
"category": "people",
"name": "family (woman,woman,girl,girl)",
"unicode": "1f469-1f469-1f467-1f467",
"unicode_alt": "1f469-200d-1f469-200d-1f467-200d-1f467"
},
":fast_forward:": {
"category": "symbols",
"name": "black right-pointing double triangle",
"unicode": "23e9"
},
":fax:": {
"category": "objects",
"name": "fax machine",
"unicode": "1f4e0"
},
":fearful:": {
"category": "people",
"name": "fearful face",
"unicode": "1f628"
},
":feet:": {
"category": "nature",
"name": "paw prints",
"unicode": "1f43e"
},
":fencer:": {
"category": "activity",
"name": "fencer",
"unicode": "1f93a"
},
":ferris_wheel:": {
"category": "travel",
"name": "ferris wheel",
"unicode": "1f3a1"
},
":ferry:": {
"category": "travel",
"name": "ferry",
"unicode": "26f4",
"unicode_alt": "26f4-fe0f"
},
":field_hockey:": {
"category": "activity",
"name": "field hockey stick and ball",
"unicode": "1f3d1"
},
":file_cabinet:": {
"category": "objects",
"name": "file cabinet",
"unicode": "1f5c4",
"unicode_alt": "1f5c4-fe0f"
},
":file_folder:": {
"category": "objects",
"name": "file folder",
"unicode": "1f4c1"
},
":film_frames:": {
"category": "objects",
"name": "film frames",
"unicode": "1f39e",
"unicode_alt": "1f39e-fe0f"
},
":fingers_crossed:": {
"category": "people",
"name": "hand with first and index finger crossed",
"unicode": "1f91e"
},
":fingers_crossed_tone1:": {
"category": "people",
"name": "hand with index and middle fingers crossed tone 1",
"unicode": "1f91e-1f3fb"
},
":fingers_crossed_tone2:": {
"category": "people",
"name": "hand with index and middle fingers crossed tone 2",
"unicode": "1f91e-1f3fc"
},
":fingers_crossed_tone3:": {
"category": "people",
"name": "hand with index and middle fingers crossed tone 3",
"unicode": "1f91e-1f3fd"
},
":fingers_crossed_tone4:": {
"category": "people",
"name": "hand with index and middle fingers crossed tone 4",
"unicode": "1f91e-1f3fe"
},
":fingers_crossed_tone5:": {
"category": "people",
"name": "hand with index and middle fingers crossed tone 5",
"unicode": "1f91e-1f3ff"
},
":fire:": {
"category": "nature",
"name": "fire",
"unicode": "1f525"
},
":fire_engine:": {
"category": "travel",
"name": "fire engine",
"unicode": "1f692"
},
":fireworks:": {
"category": "travel",
"name": "fireworks",
"unicode": "1f386"
},
":first_place:": {
"category": "activity",
"name": "first place medal",
"unicode": "1f947"
},
":first_quarter_moon:": {
"category": "nature",
"name": "first quarter moon symbol",
"unicode": "1f313"
},
":first_quarter_moon_with_face:": {
"category": "nature",
"name": "first quarter moon with face",
"unicode": "1f31b"
},
":fish:": {
"category": "nature",
"name": "fish",
"unicode": "1f41f"
},
":fish_cake:": {
"category": "food",
"name": "fish cake with swirl design",
"unicode": "1f365"
},
":fishing_pole_and_fish:": {
"category": "activity",
"name": "fishing pole and fish",
"unicode": "1f3a3"
},
":fist:": {
"category": "people",
"name": "raised fist",
"unicode": "270a"
},
":fist_tone1:": {
"category": "people",
"name": "raised fist tone 1",
"unicode": "270a-1f3fb"
},
":fist_tone2:": {
"category": "people",
"name": "raised fist tone 2",
"unicode": "270a-1f3fc"
},
":fist_tone3:": {
"category": "people",
"name": "raised fist tone 3",
"unicode": "270a-1f3fd"
},
":fist_tone4:": {
"category": "people",
"name": "raised fist tone 4",
"unicode": "270a-1f3fe"
},
":fist_tone5:": {
"category": "people",
"name": "raised fist tone 5",
"unicode": "270a-1f3ff"
},
":five:": {
"category": "symbols",
"name": "keycap digit five",
"unicode": "0035-20e3",
"unicode_alt": "0035-fe0f-20e3"
},
":flag_ac:": {
"category": "flags",
"name": "ascension",
"unicode": "1f1e6-1f1e8"
},
":flag_ad:": {
"category": "flags",
"name": "andorra",
"unicode": "1f1e6-1f1e9"
},
":flag_ae:": {
"category": "flags",
"name": "the united arab emirates",
"unicode": "1f1e6-1f1ea"
},
":flag_af:": {
"category": "flags",
"name": "afghanistan",
"unicode": "1f1e6-1f1eb"
},
":flag_ag:": {
"category": "flags",
"name": "antigua and barbuda",
"unicode": "1f1e6-1f1ec"
},
":flag_ai:": {
"category": "flags",
"name": "anguilla",
"unicode": "1f1e6-1f1ee"
},
":flag_al:": {
"category": "flags",
"name": "albania",
"unicode": "1f1e6-1f1f1"
},
":flag_am:": {
"category": "flags",
"name": "armenia",
"unicode": "1f1e6-1f1f2"
},
":flag_ao:": {
"category": "flags",
"name": "angola",
"unicode": "1f1e6-1f1f4"
},
":flag_aq:": {
"category": "flags",
"name": "antarctica",
"unicode": "1f1e6-1f1f6"
},
":flag_ar:": {
"category": "flags",
"name": "argentina",
"unicode": "1f1e6-1f1f7"
},
":flag_as:": {
"category": "flags",
"name": "american samoa",
"unicode": "1f1e6-1f1f8"
},
":flag_at:": {
"category": "flags",
"name": "austria",
"unicode": "1f1e6-1f1f9"
},
":flag_au:": {
"category": "flags",
"name": "australia",
"unicode": "1f1e6-1f1fa"
},
":flag_aw:": {
"category": "flags",
"name": "aruba",
"unicode": "1f1e6-1f1fc"
},
":flag_ax:": {
"category": "flags",
"name": "\u00e5land islands",
"unicode": "1f1e6-1f1fd"
},
":flag_az:": {
"category": "flags",
"name": "azerbaijan",
"unicode": "1f1e6-1f1ff"
},
":flag_ba:": {
"category": "flags",
"name": "bosnia and herzegovina",
"unicode": "1f1e7-1f1e6"
},
":flag_bb:": {
"category": "flags",
"name": "barbados",
"unicode": "1f1e7-1f1e7"
},
":flag_bd:": {
"category": "flags",
"name": "bangladesh",
"unicode": "1f1e7-1f1e9"
},
":flag_be:": {
"category": "flags",
"name": "belgium",
"unicode": "1f1e7-1f1ea"
},
":flag_bf:": {
"category": "flags",
"name": "burkina faso",
"unicode": "1f1e7-1f1eb"
},
":flag_bg:": {
"category": "flags",
"name": "bulgaria",
"unicode": "1f1e7-1f1ec"
},
":flag_bh:": {
"category": "flags",
"name": "bahrain",
"unicode": "1f1e7-1f1ed"
},
":flag_bi:": {
"category": "flags",
"name": "burundi",
"unicode": "1f1e7-1f1ee"
},
":flag_bj:": {
"category": "flags",
"name": "benin",
"unicode": "1f1e7-1f1ef"
},
":flag_bl:": {
"category": "flags",
"name": "saint barth\u00e9lemy",
"unicode": "1f1e7-1f1f1"
},
":flag_black:": {
"category": "objects",
"name": "waving black flag",
"unicode": "1f3f4"
},
":flag_bm:": {
"category": "flags",
"name": "bermuda",
"unicode": "1f1e7-1f1f2"
},
":flag_bn:": {
"category": "flags",
"name": "brunei",
"unicode": "1f1e7-1f1f3"
},
":flag_bo:": {
"category": "flags",
"name": "bolivia",
"unicode": "1f1e7-1f1f4"
},
":flag_bq:": {
"category": "flags",
"name": "caribbean netherlands",
"unicode": "1f1e7-1f1f6"
},
":flag_br:": {
"category": "flags",
"name": "brazil",
"unicode": "1f1e7-1f1f7"
},
":flag_bs:": {
"category": "flags",
"name": "the bahamas",
"unicode": "1f1e7-1f1f8"
},
":flag_bt:": {
"category": "flags",
"name": "bhutan",
"unicode": "1f1e7-1f1f9"
},
":flag_bv:": {
"category": "flags",
"name": "bouvet island",
"unicode": "1f1e7-1f1fb"
},
":flag_bw:": {
"category": "flags",
"name": "botswana",
"unicode": "1f1e7-1f1fc"
},
":flag_by:": {
"category": "flags",
"name": "belarus",
"unicode": "1f1e7-1f1fe"
},
":flag_bz:": {
"category": "flags",
"name": "belize",
"unicode": "1f1e7-1f1ff"
},
":flag_ca:": {
"category": "flags",
"name": "canada",
"unicode": "1f1e8-1f1e6"
},
":flag_cc:": {
"category": "flags",
"name": "cocos (keeling) islands",
"unicode": "1f1e8-1f1e8"
},
":flag_cd:": {
"category": "flags",
"name": "the democratic republic of the congo",
"unicode": "1f1e8-1f1e9"
},
":flag_cf:": {
"category": "flags",
"name": "central african republic",
"unicode": "1f1e8-1f1eb"
},
":flag_cg:": {
"category": "flags",
"name": "the republic of the congo",
"unicode": "1f1e8-1f1ec"
},
":flag_ch:": {
"category": "flags",
"name": "switzerland",
"unicode": "1f1e8-1f1ed"
},
":flag_ci:": {
"category": "flags",
"name": "c\u00f4te d\u2019ivoire",
"unicode": "1f1e8-1f1ee"
},
":flag_ck:": {
"category": "flags",
"name": "cook islands",
"unicode": "1f1e8-1f1f0"
},
":flag_cl:": {
"category": "flags",
"name": "chile",
"unicode": "1f1e8-1f1f1"
},
":flag_cm:": {
"category": "flags",
"name": "cameroon",
"unicode": "1f1e8-1f1f2"
},
":flag_cn:": {
"category": "flags",
"name": "china",
"unicode": "1f1e8-1f1f3"
},
":flag_co:": {
"category": "flags",
"name": "colombia",
"unicode": "1f1e8-1f1f4"
},
":flag_cp:": {
"category": "flags",
"name": "clipperton island",
"unicode": "1f1e8-1f1f5"
},
":flag_cr:": {
"category": "flags",
"name": "costa rica",
"unicode": "1f1e8-1f1f7"
},
":flag_cu:": {
"category": "flags",
"name": "cuba",
"unicode": "1f1e8-1f1fa"
},
":flag_cv:": {
"category": "flags",
"name": "cape verde",
"unicode": "1f1e8-1f1fb"
},
":flag_cw:": {
"category": "flags",
"name": "cura\u00e7ao",
"unicode": "1f1e8-1f1fc"
},
":flag_cx:": {
"category": "flags",
"name": "christmas island",
"unicode": "1f1e8-1f1fd"
},
":flag_cy:": {
"category": "flags",
"name": "cyprus",
"unicode": "1f1e8-1f1fe"
},
":flag_cz:": {
"category": "flags",
"name": "the czech republic",
"unicode": "1f1e8-1f1ff"
},
":flag_de:": {
"category": "flags",
"name": "germany",
"unicode": "1f1e9-1f1ea"
},
":flag_dg:": {
"category": "flags",
"name": "diego garcia",
"unicode": "1f1e9-1f1ec"
},
":flag_dj:": {
"category": "flags",
"name": "djibouti",
"unicode": "1f1e9-1f1ef"
},
":flag_dk:": {
"category": "flags",
"name": "denmark",
"unicode": "1f1e9-1f1f0"
},
":flag_dm:": {
"category": "flags",
"name": "dominica",
"unicode": "1f1e9-1f1f2"
},
":flag_do:": {
"category": "flags",
"name": "the dominican republic",
"unicode": "1f1e9-1f1f4"
},
":flag_dz:": {
"category": "flags",
"name": "algeria",
"unicode": "1f1e9-1f1ff"
},
":flag_ea:": {
"category": "flags",
"name": "ceuta, melilla",
"unicode": "1f1ea-1f1e6"
},
":flag_ec:": {
"category": "flags",
"name": "ecuador",
"unicode": "1f1ea-1f1e8"
},
":flag_ee:": {
"category": "flags",
"name": "estonia",
"unicode": "1f1ea-1f1ea"
},
":flag_eg:": {
"category": "flags",
"name": "egypt",
"unicode": "1f1ea-1f1ec"
},
":flag_eh:": {
"category": "flags",
"name": "western sahara",
"unicode": "1f1ea-1f1ed"
},
":flag_er:": {
"category": "flags",
"name": "eritrea",
"unicode": "1f1ea-1f1f7"
},
":flag_es:": {
"category": "flags",
"name": "spain",
"unicode": "1f1ea-1f1f8"
},
":flag_et:": {
"category": "flags",
"name": "ethiopia",
"unicode": "1f1ea-1f1f9"
},
":flag_eu:": {
"category": "flags",
"name": "european union",
"unicode": "1f1ea-1f1fa"
},
":flag_fi:": {
"category": "flags",
"name": "finland",
"unicode": "1f1eb-1f1ee"
},
":flag_fj:": {
"category": "flags",
"name": "fiji",
"unicode": "1f1eb-1f1ef"
},
":flag_fk:": {
"category": "flags",
"name": "falkland islands",
"unicode": "1f1eb-1f1f0"
},
":flag_fm:": {
"category": "flags",
"name": "micronesia",
"unicode": "1f1eb-1f1f2"
},
":flag_fo:": {
"category": "flags",
"name": "faroe islands",
"unicode": "1f1eb-1f1f4"
},
":flag_fr:": {
"category": "flags",
"name": "france",
"unicode": "1f1eb-1f1f7"
},
":flag_ga:": {
"category": "flags",
"name": "gabon",
"unicode": "1f1ec-1f1e6"
},
":flag_gb:": {
"category": "flags",
"name": "great britain",
"unicode": "1f1ec-1f1e7"
},
":flag_gd:": {
"category": "flags",
"name": "grenada",
"unicode": "1f1ec-1f1e9"
},
":flag_ge:": {
"category": "flags",
"name": "georgia",
"unicode": "1f1ec-1f1ea"
},
":flag_gf:": {
"category": "flags",
"name": "french guiana",
"unicode": "1f1ec-1f1eb"
},
":flag_gg:": {
"category": "flags",
"name": "guernsey",
"unicode": "1f1ec-1f1ec"
},
":flag_gh:": {
"category": "flags",
"name": "ghana",
"unicode": "1f1ec-1f1ed"
},
":flag_gi:": {
"category": "flags",
"name": "gibraltar",
"unicode": "1f1ec-1f1ee"
},
":flag_gl:": {
"category": "flags",
"name": "greenland",
"unicode": "1f1ec-1f1f1"
},
":flag_gm:": {
"category": "flags",
"name": "the gambia",
"unicode": "1f1ec-1f1f2"
},
":flag_gn:": {
"category": "flags",
"name": "guinea",
"unicode": "1f1ec-1f1f3"
},
":flag_gp:": {
"category": "flags",
"name": "guadeloupe",
"unicode": "1f1ec-1f1f5"
},
":flag_gq:": {
"category": "flags",
"name": "equatorial guinea",
"unicode": "1f1ec-1f1f6"
},
":flag_gr:": {
"category": "flags",
"name": "greece",
"unicode": "1f1ec-1f1f7"
},
":flag_gs:": {
"category": "flags",
"name": "south georgia",
"unicode": "1f1ec-1f1f8"
},
":flag_gt:": {
"category": "flags",
"name": "guatemala",
"unicode": "1f1ec-1f1f9"
},
":flag_gu:": {
"category": "flags",
"name": "guam",
"unicode": "1f1ec-1f1fa"
},
":flag_gw:": {
"category": "flags",
"name": "guinea-bissau",
"unicode": "1f1ec-1f1fc"
},
":flag_gy:": {
"category": "flags",
"name": "guyana",
"unicode": "1f1ec-1f1fe"
},
":flag_hk:": {
"category": "flags",
"name": "hong kong",
"unicode": "1f1ed-1f1f0"
},
":flag_hm:": {
"category": "flags",
"name": "heard island and mcdonald islands",
"unicode": "1f1ed-1f1f2"
},
":flag_hn:": {
"category": "flags",
"name": "honduras",
"unicode": "1f1ed-1f1f3"
},
":flag_hr:": {
"category": "flags",
"name": "croatia",
"unicode": "1f1ed-1f1f7"
},
":flag_ht:": {
"category": "flags",
"name": "haiti",
"unicode": "1f1ed-1f1f9"
},
":flag_hu:": {
"category": "flags",
"name": "hungary",
"unicode": "1f1ed-1f1fa"
},
":flag_ic:": {
"category": "flags",
"name": "canary islands",
"unicode": "1f1ee-1f1e8"
},
":flag_id:": {
"category": "flags",
"name": "indonesia",
"unicode": "1f1ee-1f1e9"
},
":flag_ie:": {
"category": "flags",
"name": "ireland",
"unicode": "1f1ee-1f1ea"
},
":flag_il:": {
"category": "flags",
"name": "israel",
"unicode": "1f1ee-1f1f1"
},
":flag_im:": {
"category": "flags",
"name": "isle of man",
"unicode": "1f1ee-1f1f2"
},
":flag_in:": {
"category": "flags",
"name": "india",
"unicode": "1f1ee-1f1f3"
},
":flag_io:": {
"category": "flags",
"name": "british indian ocean territory",
"unicode": "1f1ee-1f1f4"
},
":flag_iq:": {
"category": "flags",
"name": "iraq",
"unicode": "1f1ee-1f1f6"
},
":flag_ir:": {
"category": "flags",
"name": "iran",
"unicode": "1f1ee-1f1f7"
},
":flag_is:": {
"category": "flags",
"name": "iceland",
"unicode": "1f1ee-1f1f8"
},
":flag_it:": {
"category": "flags",
"name": "italy",
"unicode": "1f1ee-1f1f9"
},
":flag_je:": {
"category": "flags",
"name": "jersey",
"unicode": "1f1ef-1f1ea"
},
":flag_jm:": {
"category": "flags",
"name": "jamaica",
"unicode": "1f1ef-1f1f2"
},
":flag_jo:": {
"category": "flags",
"name": "jordan",
"unicode": "1f1ef-1f1f4"
},
":flag_jp:": {
"category": "flags",
"name": "japan",
"unicode": "1f1ef-1f1f5"
},
":flag_ke:": {
"category": "flags",
"name": "kenya",
"unicode": "1f1f0-1f1ea"
},
":flag_kg:": {
"category": "flags",
"name": "kyrgyzstan",
"unicode": "1f1f0-1f1ec"
},
":flag_kh:": {
"category": "flags",
"name": "cambodia",
"unicode": "1f1f0-1f1ed"
},
":flag_ki:": {
"category": "flags",
"name": "kiribati",
"unicode": "1f1f0-1f1ee"
},
":flag_km:": {
"category": "flags",
"name": "the comoros",
"unicode": "1f1f0-1f1f2"
},
":flag_kn:": {
"category": "flags",
"name": "saint kitts and nevis",
"unicode": "1f1f0-1f1f3"
},
":flag_kp:": {
"category": "flags",
"name": "north korea",
"unicode": "1f1f0-1f1f5"
},
":flag_kr:": {
"category": "flags",
"name": "korea",
"unicode": "1f1f0-1f1f7"
},
":flag_kw:": {
"category": "flags",
"name": "kuwait",
"unicode": "1f1f0-1f1fc"
},
":flag_ky:": {
"category": "flags",
"name": "cayman islands",
"unicode": "1f1f0-1f1fe"
},
":flag_kz:": {
"category": "flags",
"name": "kazakhstan",
"unicode": "1f1f0-1f1ff"
},
":flag_la:": {
"category": "flags",
"name": "laos",
"unicode": "1f1f1-1f1e6"
},
":flag_lb:": {
"category": "flags",
"name": "lebanon",
"unicode": "1f1f1-1f1e7"
},
":flag_lc:": {
"category": "flags",
"name": "saint lucia",
"unicode": "1f1f1-1f1e8"
},
":flag_li:": {
"category": "flags",
"name": "liechtenstein",
"unicode": "1f1f1-1f1ee"
},
":flag_lk:": {
"category": "flags",
"name": "sri lanka",
"unicode": "1f1f1-1f1f0"
},
":flag_lr:": {
"category": "flags",
"name": "liberia",
"unicode": "1f1f1-1f1f7"
},
":flag_ls:": {
"category": "flags",
"name": "lesotho",
"unicode": "1f1f1-1f1f8"
},
":flag_lt:": {
"category": "flags",
"name": "lithuania",
"unicode": "1f1f1-1f1f9"
},
":flag_lu:": {
"category": "flags",
"name": "luxembourg",
"unicode": "1f1f1-1f1fa"
},
":flag_lv:": {
"category": "flags",
"name": "latvia",
"unicode": "1f1f1-1f1fb"
},
":flag_ly:": {
"category": "flags",
"name": "libya",
"unicode": "1f1f1-1f1fe"
},
":flag_ma:": {
"category": "flags",
"name": "morocco",
"unicode": "1f1f2-1f1e6"
},
":flag_mc:": {
"category": "flags",
"name": "monaco",
"unicode": "1f1f2-1f1e8"
},
":flag_md:": {
"category": "flags",
"name": "moldova",
"unicode": "1f1f2-1f1e9"
},
":flag_me:": {
"category": "flags",
"name": "montenegro",
"unicode": "1f1f2-1f1ea"
},
":flag_mf:": {
"category": "flags",
"name": "saint martin",
"unicode": "1f1f2-1f1eb"
},
":flag_mg:": {
"category": "flags",
"name": "madagascar",
"unicode": "1f1f2-1f1ec"
},
":flag_mh:": {
"category": "flags",
"name": "the marshall islands",
"unicode": "1f1f2-1f1ed"
},
":flag_mk:": {
"category": "flags",
"name": "macedonia",
"unicode": "1f1f2-1f1f0"
},
":flag_ml:": {
"category": "flags",
"name": "mali",
"unicode": "1f1f2-1f1f1"
},
":flag_mm:": {
"category": "flags",
"name": "myanmar",
"unicode": "1f1f2-1f1f2"
},
":flag_mn:": {
"category": "flags",
"name": "mongolia",
"unicode": "1f1f2-1f1f3"
},
":flag_mo:": {
"category": "flags",
"name": "macau",
"unicode": "1f1f2-1f1f4"
},
":flag_mp:": {
"category": "flags",
"name": "northern mariana islands",
"unicode": "1f1f2-1f1f5"
},
":flag_mq:": {
"category": "flags",
"name": "martinique",
"unicode": "1f1f2-1f1f6"
},
":flag_mr:": {
"category": "flags",
"name": "mauritania",
"unicode": "1f1f2-1f1f7"
},
":flag_ms:": {
"category": "flags",
"name": "montserrat",
"unicode": "1f1f2-1f1f8"
},
":flag_mt:": {
"category": "flags",
"name": "malta",
"unicode": "1f1f2-1f1f9"
},
":flag_mu:": {
"category": "flags",
"name": "mauritius",
"unicode": "1f1f2-1f1fa"
},
":flag_mv:": {
"category": "flags",
"name": "maldives",
"unicode": "1f1f2-1f1fb"
},
":flag_mw:": {
"category": "flags",
"name": "malawi",
"unicode": "1f1f2-1f1fc"
},
":flag_mx:": {
"category": "flags",
"name": "mexico",
"unicode": "1f1f2-1f1fd"
},
":flag_my:": {
"category": "flags",
"name": "malaysia",
"unicode": "1f1f2-1f1fe"
},
":flag_mz:": {
"category": "flags",
"name": "mozambique",
"unicode": "1f1f2-1f1ff"
},
":flag_na:": {
"category": "flags",
"name": "namibia",
"unicode": "1f1f3-1f1e6"
},
":flag_nc:": {
"category": "flags",
"name": "new caledonia",
"unicode": "1f1f3-1f1e8"
},
":flag_ne:": {
"category": "flags",
"name": "niger",
"unicode": "1f1f3-1f1ea"
},
":flag_nf:": {
"category": "flags",
"name": "norfolk island",
"unicode": "1f1f3-1f1eb"
},
":flag_ng:": {
"category": "flags",
"name": "nigeria",
"unicode": "1f1f3-1f1ec"
},
":flag_ni:": {
"category": "flags",
"name": "nicaragua",
"unicode": "1f1f3-1f1ee"
},
":flag_nl:": {
"category": "flags",
"name": "the netherlands",
"unicode": "1f1f3-1f1f1"
},
":flag_no:": {
"category": "flags",
"name": "norway",
"unicode": "1f1f3-1f1f4"
},
":flag_np:": {
"category": "flags",
"name": "nepal",
"unicode": "1f1f3-1f1f5"
},
":flag_nr:": {
"category": "flags",
"name": "nauru",
"unicode": "1f1f3-1f1f7"
},
":flag_nu:": {
"category": "flags",
"name": "niue",
"unicode": "1f1f3-1f1fa"
},
":flag_nz:": {
"category": "flags",
"name": "new zealand",
"unicode": "1f1f3-1f1ff"
},
":flag_om:": {
"category": "flags",
"name": "oman",
"unicode": "1f1f4-1f1f2"
},
":flag_pa:": {
"category": "flags",
"name": "panama",
"unicode": "1f1f5-1f1e6"
},
":flag_pe:": {
"category": "flags",
"name": "peru",
"unicode": "1f1f5-1f1ea"
},
":flag_pf:": {
"category": "flags",
"name": "french polynesia",
"unicode": "1f1f5-1f1eb"
},
":flag_pg:": {
"category": "flags",
"name": "papua new guinea",
"unicode": "1f1f5-1f1ec"
},
":flag_ph:": {
"category": "flags",
"name": "the philippines",
"unicode": "1f1f5-1f1ed"
},
":flag_pk:": {
"category": "flags",
"name": "pakistan",
"unicode": "1f1f5-1f1f0"
},
":flag_pl:": {
"category": "flags",
"name": "poland",
"unicode": "1f1f5-1f1f1"
},
":flag_pm:": {
"category": "flags",
"name": "saint pierre and miquelon",
"unicode": "1f1f5-1f1f2"
},
":flag_pn:": {
"category": "flags",
"name": "pitcairn",
"unicode": "1f1f5-1f1f3"
},
":flag_pr:": {
"category": "flags",
"name": "puerto rico",
"unicode": "1f1f5-1f1f7"
},
":flag_ps:": {
"category": "flags",
"name": "palestinian authority",
"unicode": "1f1f5-1f1f8"
},
":flag_pt:": {
"category": "flags",
"name": "portugal",
"unicode": "1f1f5-1f1f9"
},
":flag_pw:": {
"category": "flags",
"name": "palau",
"unicode": "1f1f5-1f1fc"
},
":flag_py:": {
"category": "flags",
"name": "paraguay",
"unicode": "1f1f5-1f1fe"
},
":flag_qa:": {
"category": "flags",
"name": "qatar",
"unicode": "1f1f6-1f1e6"
},
":flag_re:": {
"category": "flags",
"name": "r\u00e9union",
"unicode": "1f1f7-1f1ea"
},
":flag_ro:": {
"category": "flags",
"name": "romania",
"unicode": "1f1f7-1f1f4"
},
":flag_rs:": {
"category": "flags",
"name": "serbia",
"unicode": "1f1f7-1f1f8"
},
":flag_ru:": {
"category": "flags",
"name": "russia",
"unicode": "1f1f7-1f1fa"
},
":flag_rw:": {
"category": "flags",
"name": "rwanda",
"unicode": "1f1f7-1f1fc"
},
":flag_sa:": {
"category": "flags",
"name": "saudi arabia",
"unicode": "1f1f8-1f1e6"
},
":flag_sb:": {
"category": "flags",
"name": "the solomon islands",
"unicode": "1f1f8-1f1e7"
},
":flag_sc:": {
"category": "flags",
"name": "the seychelles",
"unicode": "1f1f8-1f1e8"
},
":flag_sd:": {
"category": "flags",
"name": "sudan",
"unicode": "1f1f8-1f1e9"
},
":flag_se:": {
"category": "flags",
"name": "sweden",
"unicode": "1f1f8-1f1ea"
},
":flag_sg:": {
"category": "flags",
"name": "singapore",
"unicode": "1f1f8-1f1ec"
},
":flag_sh:": {
"category": "flags",
"name": "saint helena",
"unicode": "1f1f8-1f1ed"
},
":flag_si:": {
"category": "flags",
"name": "slovenia",
"unicode": "1f1f8-1f1ee"
},
":flag_sj:": {
"category": "flags",
"name": "svalbard and jan mayen",
"unicode": "1f1f8-1f1ef"
},
":flag_sk:": {
"category": "flags",
"name": "slovakia",
"unicode": "1f1f8-1f1f0"
},
":flag_sl:": {
"category": "flags",
"name": "sierra leone",
"unicode": "1f1f8-1f1f1"
},
":flag_sm:": {
"category": "flags",
"name": "san marino",
"unicode": "1f1f8-1f1f2"
},
":flag_sn:": {
"category": "flags",
"name": "senegal",
"unicode": "1f1f8-1f1f3"
},
":flag_so:": {
"category": "flags",
"name": "somalia",
"unicode": "1f1f8-1f1f4"
},
":flag_sr:": {
"category": "flags",
"name": "suriname",
"unicode": "1f1f8-1f1f7"
},
":flag_ss:": {
"category": "flags",
"name": "south sudan",
"unicode": "1f1f8-1f1f8"
},
":flag_st:": {
"category": "flags",
"name": "s\u00e3o tom\u00e9 and pr\u00edncipe",
"unicode": "1f1f8-1f1f9"
},
":flag_sv:": {
"category": "flags",
"name": "el salvador",
"unicode": "1f1f8-1f1fb"
},
":flag_sx:": {
"category": "flags",
"name": "sint maarten",
"unicode": "1f1f8-1f1fd"
},
":flag_sy:": {
"category": "flags",
"name": "syria",
"unicode": "1f1f8-1f1fe"
},
":flag_sz:": {
"category": "flags",
"name": "swaziland",
"unicode": "1f1f8-1f1ff"
},
":flag_ta:": {
"category": "flags",
"name": "tristan da cunha",
"unicode": "1f1f9-1f1e6"
},
":flag_tc:": {
"category": "flags",
"name": "turks and caicos islands",
"unicode": "1f1f9-1f1e8"
},
":flag_td:": {
"category": "flags",
"name": "chad",
"unicode": "1f1f9-1f1e9"
},
":flag_tf:": {
"category": "flags",
"name": "french southern territories",
"unicode": "1f1f9-1f1eb"
},
":flag_tg:": {
"category": "flags",
"name": "togo",
"unicode": "1f1f9-1f1ec"
},
":flag_th:": {
"category": "flags",
"name": "thailand",
"unicode": "1f1f9-1f1ed"
},
":flag_tj:": {
"category": "flags",
"name": "tajikistan",
"unicode": "1f1f9-1f1ef"
},
":flag_tk:": {
"category": "flags",
"name": "tokelau",
"unicode": "1f1f9-1f1f0"
},
":flag_tl:": {
"category": "flags",
"name": "timor-leste",
"unicode": "1f1f9-1f1f1"
},
":flag_tm:": {
"category": "flags",
"name": "turkmenistan",
"unicode": "1f1f9-1f1f2"
},
":flag_tn:": {
"category": "flags",
"name": "tunisia",
"unicode": "1f1f9-1f1f3"
},
":flag_to:": {
"category": "flags",
"name": "tonga",
"unicode": "1f1f9-1f1f4"
},
":flag_tr:": {
"category": "flags",
"name": "turkey",
"unicode": "1f1f9-1f1f7"
},
":flag_tt:": {
"category": "flags",
"name": "trinidad and tobago",
"unicode": "1f1f9-1f1f9"
},
":flag_tv:": {
"category": "flags",
"name": "tuvalu",
"unicode": "1f1f9-1f1fb"
},
":flag_tw:": {
"category": "flags",
"name": "the republic of china",
"unicode": "1f1f9-1f1fc"
},
":flag_tz:": {
"category": "flags",
"name": "tanzania",
"unicode": "1f1f9-1f1ff"
},
":flag_ua:": {
"category": "flags",
"name": "ukraine",
"unicode": "1f1fa-1f1e6"
},
":flag_ug:": {
"category": "flags",
"name": "uganda",
"unicode": "1f1fa-1f1ec"
},
":flag_um:": {
"category": "flags",
"name": "united states minor outlying islands",
"unicode": "1f1fa-1f1f2"
},
":flag_us:": {
"category": "flags",
"name": "united states",
"unicode": "1f1fa-1f1f8"
},
":flag_uy:": {
"category": "flags",
"name": "uruguay",
"unicode": "1f1fa-1f1fe"
},
":flag_uz:": {
"category": "flags",
"name": "uzbekistan",
"unicode": "1f1fa-1f1ff"
},
":flag_va:": {
"category": "flags",
"name": "the vatican city",
"unicode": "1f1fb-1f1e6"
},
":flag_vc:": {
"category": "flags",
"name": "saint vincent and the grenadines",
"unicode": "1f1fb-1f1e8"
},
":flag_ve:": {
"category": "flags",
"name": "venezuela",
"unicode": "1f1fb-1f1ea"
},
":flag_vg:": {
"category": "flags",
"name": "british virgin islands",
"unicode": "1f1fb-1f1ec"
},
":flag_vi:": {
"category": "flags",
"name": "u.s. virgin islands",
"unicode": "1f1fb-1f1ee"
},
":flag_vn:": {
"category": "flags",
"name": "vietnam",
"unicode": "1f1fb-1f1f3"
},
":flag_vu:": {
"category": "flags",
"name": "vanuatu",
"unicode": "1f1fb-1f1fa"
},
":flag_wf:": {
"category": "flags",
"name": "wallis and futuna",
"unicode": "1f1fc-1f1eb"
},
":flag_white:": {
"category": "objects",
"name": "waving white flag",
"unicode": "1f3f3",
"unicode_alt": "1f3f3-fe0f"
},
":flag_ws:": {
"category": "flags",
"name": "samoa",
"unicode": "1f1fc-1f1f8"
},
":flag_xk:": {
"category": "flags",
"name": "kosovo",
"unicode": "1f1fd-1f1f0"
},
":flag_ye:": {
"category": "flags",
"name": "yemen",
"unicode": "1f1fe-1f1ea"
},
":flag_yt:": {
"category": "flags",
"name": "mayotte",
"unicode": "1f1fe-1f1f9"
},
":flag_za:": {
"category": "flags",
"name": "south africa",
"unicode": "1f1ff-1f1e6"
},
":flag_zm:": {
"category": "flags",
"name": "zambia",
"unicode": "1f1ff-1f1f2"
},
":flag_zw:": {
"category": "flags",
"name": "zimbabwe",
"unicode": "1f1ff-1f1fc"
},
":flags:": {
"category": "objects",
"name": "carp streamer",
"unicode": "1f38f"
},
":flashlight:": {
"category": "objects",
"name": "electric torch",
"unicode": "1f526"
},
":fleur-de-lis:": {
"category": "symbols",
"name": "fleur-de-lis",
"unicode": "269c",
"unicode_alt": "269c-fe0f"
},
":floppy_disk:": {
"category": "objects",
"name": "floppy disk",
"unicode": "1f4be"
},
":flower_playing_cards:": {
"category": "symbols",
"name": "flower playing cards",
"unicode": "1f3b4"
},
":flushed:": {
"category": "people",
"name": "flushed face",
"unicode": "1f633"
},
":fog:": {
"category": "nature",
"name": "fog",
"unicode": "1f32b",
"unicode_alt": "1f32b-fe0f"
},
":foggy:": {
"category": "travel",
"name": "foggy",
"unicode": "1f301"
},
":football:": {
"category": "activity",
"name": "american football",
"unicode": "1f3c8"
},
":footprints:": {
"category": "people",
"name": "footprints",
"unicode": "1f463"
},
":fork_and_knife:": {
"category": "food",
"name": "fork and knife",
"unicode": "1f374"
},
":fork_knife_plate:": {
"category": "food",
"name": "fork and knife with plate",
"unicode": "1f37d",
"unicode_alt": "1f37d-fe0f"
},
":fountain:": {
"category": "travel",
"name": "fountain",
"unicode": "26f2",
"unicode_alt": "26f2-fe0f"
},
":four:": {
"category": "symbols",
"name": "keycap digit four",
"unicode": "0034-20e3",
"unicode_alt": "0034-fe0f-20e3"
},
":four_leaf_clover:": {
"category": "nature",
"name": "four leaf clover",
"unicode": "1f340"
},
":fox:": {
"category": "nature",
"name": "fox face",
"unicode": "1f98a"
},
":frame_photo:": {
"category": "objects",
"name": "frame with picture",
"unicode": "1f5bc",
"unicode_alt": "1f5bc-fe0f"
},
":free:": {
"category": "symbols",
"name": "squared free",
"unicode": "1f193"
},
":french_bread:": {
"category": "food",
"name": "baguette bread",
"unicode": "1f956"
},
":fried_shrimp:": {
"category": "food",
"name": "fried shrimp",
"unicode": "1f364"
},
":fries:": {
"category": "food",
"name": "french fries",
"unicode": "1f35f"
},
":frog:": {
"category": "nature",
"name": "frog face",
"unicode": "1f438"
},
":frowning2:": {
"category": "people",
"name": "white frowning face",
"unicode": "2639",
"unicode_alt": "2639-fe0f"
},
":frowning:": {
"category": "people",
"name": "frowning face with open mouth",
"unicode": "1f626"
},
":fuelpump:": {
"category": "travel",
"name": "fuel pump",
"unicode": "26fd",
"unicode_alt": "26fd-fe0f"
},
":full_moon:": {
"category": "nature",
"name": "full moon symbol",
"unicode": "1f315"
},
":full_moon_with_face:": {
"category": "nature",
"name": "full moon with face",
"unicode": "1f31d"
},
":game_die:": {
"category": "activity",
"name": "game die",
"unicode": "1f3b2"
},
":gear:": {
"category": "objects",
"name": "gear",
"unicode": "2699",
"unicode_alt": "2699-fe0f"
},
":gem:": {
"category": "objects",
"name": "gem stone",
"unicode": "1f48e"
},
":gemini:": {
"category": "symbols",
"name": "gemini",
"unicode": "264a",
"unicode_alt": "264a-fe0f"
},
":ghost:": {
"category": "people",
"name": "ghost",
"unicode": "1f47b"
},
":gift:": {
"category": "objects",
"name": "wrapped present",
"unicode": "1f381"
},
":gift_heart:": {
"category": "symbols",
"name": "heart with ribbon",
"unicode": "1f49d"
},
":girl:": {
"category": "people",
"name": "girl",
"unicode": "1f467"
},
":girl_tone1:": {
"category": "people",
"name": "girl tone 1",
"unicode": "1f467-1f3fb"
},
":girl_tone2:": {
"category": "people",
"name": "girl tone 2",
"unicode": "1f467-1f3fc"
},
":girl_tone3:": {
"category": "people",
"name": "girl tone 3",
"unicode": "1f467-1f3fd"
},
":girl_tone4:": {
"category": "people",
"name": "girl tone 4",
"unicode": "1f467-1f3fe"
},
":girl_tone5:": {
"category": "people",
"name": "girl tone 5",
"unicode": "1f467-1f3ff"
},
":globe_with_meridians:": {
"category": "symbols",
"name": "globe with meridians",
"unicode": "1f310"
},
":goal:": {
"category": "activity",
"name": "goal net",
"unicode": "1f945"
},
":goat:": {
"category": "nature",
"name": "goat",
"unicode": "1f410"
},
":golf:": {
"category": "activity",
"name": "flag in hole",
"unicode": "26f3",
"unicode_alt": "26f3-fe0f"
},
":golfer:": {
"category": "activity",
"name": "golfer",
"unicode": "1f3cc",
"unicode_alt": "1f3cc-fe0f"
},
":gorilla:": {
"category": "nature",
"name": "gorilla",
"unicode": "1f98d"
},
":grapes:": {
"category": "food",
"name": "grapes",
"unicode": "1f347"
},
":green_apple:": {
"category": "food",
"name": "green apple",
"unicode": "1f34f"
},
":green_book:": {
"category": "objects",
"name": "green book",
"unicode": "1f4d7"
},
":green_heart:": {
"category": "symbols",
"name": "green heart",
"unicode": "1f49a"
},
":grey_exclamation:": {
"category": "symbols",
"name": "white exclamation mark ornament",
"unicode": "2755"
},
":grey_question:": {
"category": "symbols",
"name": "white question mark ornament",
"unicode": "2754"
},
":grimacing:": {
"category": "people",
"name": "grimacing face",
"unicode": "1f62c"
},
":grin:": {
"category": "people",
"name": "grinning face with smiling eyes",
"unicode": "1f601"
},
":grinning:": {
"category": "people",
"name": "grinning face",
"unicode": "1f600"
},
":guardsman:": {
"category": "people",
"name": "guardsman",
"unicode": "1f482"
},
":guardsman_tone1:": {
"category": "people",
"name": "guardsman tone 1",
"unicode": "1f482-1f3fb"
},
":guardsman_tone2:": {
"category": "people",
"name": "guardsman tone 2",
"unicode": "1f482-1f3fc"
},
":guardsman_tone3:": {
"category": "people",
"name": "guardsman tone 3",
"unicode": "1f482-1f3fd"
},
":guardsman_tone4:": {
"category": "people",
"name": "guardsman tone 4",
"unicode": "1f482-1f3fe"
},
":guardsman_tone5:": {
"category": "people",
"name": "guardsman tone 5",
"unicode": "1f482-1f3ff"
},
":guitar:": {
"category": "activity",
"name": "guitar",
"unicode": "1f3b8"
},
":gun:": {
"category": "objects",
"name": "pistol",
"unicode": "1f52b"
},
":haircut:": {
"category": "people",
"name": "haircut",
"unicode": "1f487"
},
":haircut_tone1:": {
"category": "people",
"name": "haircut tone 1",
"unicode": "1f487-1f3fb"
},
":haircut_tone2:": {
"category": "people",
"name": "haircut tone 2",
"unicode": "1f487-1f3fc"
},
":haircut_tone3:": {
"category": "people",
"name": "haircut tone 3",
"unicode": "1f487-1f3fd"
},
":haircut_tone4:": {
"category": "people",
"name": "haircut tone 4",
"unicode": "1f487-1f3fe"
},
":haircut_tone5:": {
"category": "people",
"name": "haircut tone 5",
"unicode": "1f487-1f3ff"
},
":hamburger:": {
"category": "food",
"name": "hamburger",
"unicode": "1f354"
},
":hammer:": {
"category": "objects",
"name": "hammer",
"unicode": "1f528"
},
":hammer_pick:": {
"category": "objects",
"name": "hammer and pick",
"unicode": "2692",
"unicode_alt": "2692-fe0f"
},
":hamster:": {
"category": "nature",
"name": "hamster face",
"unicode": "1f439"
},
":hand_splayed:": {
"category": "people",
"name": "raised hand with fingers splayed",
"unicode": "1f590",
"unicode_alt": "1f590-fe0f"
},
":hand_splayed_tone1:": {
"category": "people",
"name": "raised hand with fingers splayed tone 1",
"unicode": "1f590-1f3fb"
},
":hand_splayed_tone2:": {
"category": "people",
"name": "raised hand with fingers splayed tone 2",
"unicode": "1f590-1f3fc"
},
":hand_splayed_tone3:": {
"category": "people",
"name": "raised hand with fingers splayed tone 3",
"unicode": "1f590-1f3fd"
},
":hand_splayed_tone4:": {
"category": "people",
"name": "raised hand with fingers splayed tone 4",
"unicode": "1f590-1f3fe"
},
":hand_splayed_tone5:": {
"category": "people",
"name": "raised hand with fingers splayed tone 5",
"unicode": "1f590-1f3ff"
},
":handbag:": {
"category": "people",
"name": "handbag",
"unicode": "1f45c"
},
":handball:": {
"category": "activity",
"name": "handball",
"unicode": "1f93e"
},
":handball_tone1:": {
"category": "activity",
"name": "handball tone 1",
"unicode": "1f93e-1f3fb"
},
":handball_tone2:": {
"category": "activity",
"name": "handball tone 2",
"unicode": "1f93e-1f3fc"
},
":handball_tone3:": {
"category": "activity",
"name": "handball tone 3",
"unicode": "1f93e-1f3fd"
},
":handball_tone4:": {
"category": "activity",
"name": "handball tone 4",
"unicode": "1f93e-1f3fe"
},
":handball_tone5:": {
"category": "activity",
"name": "handball tone 5",
"unicode": "1f93e-1f3ff"
},
":handshake:": {
"category": "people",
"name": "handshake",
"unicode": "1f91d"
},
":handshake_tone1:": {
"category": "people",
"name": "handshake tone 1",
"unicode": "1f91d-1f3fb"
},
":handshake_tone2:": {
"category": "people",
"name": "handshake tone 2",
"unicode": "1f91d-1f3fc"
},
":handshake_tone3:": {
"category": "people",
"name": "handshake tone 3",
"unicode": "1f91d-1f3fd"
},
":handshake_tone4:": {
"category": "people",
"name": "handshake tone 4",
"unicode": "1f91d-1f3fe"
},
":handshake_tone5:": {
"category": "people",
"name": "handshake tone 5",
"unicode": "1f91d-1f3ff"
},
":hash:": {
"category": "symbols",
"name": "keycap number sign",
"unicode": "0023-20e3",
"unicode_alt": "0023-fe0f-20e3"
},
":hatched_chick:": {
"category": "nature",
"name": "front-facing baby chick",
"unicode": "1f425"
},
":hatching_chick:": {
"category": "nature",
"name": "hatching chick",
"unicode": "1f423"
},
":head_bandage:": {
"category": "people",
"name": "face with head-bandage",
"unicode": "1f915"
},
":headphones:": {
"category": "activity",
"name": "headphone",
"unicode": "1f3a7"
},
":hear_no_evil:": {
"category": "nature",
"name": "hear-no-evil monkey",
"unicode": "1f649"
},
":heart:": {
"category": "symbols",
"name": "heavy black heart",
"unicode": "2764",
"unicode_alt": "2764-fe0f"
},
":heart_decoration:": {
"category": "symbols",
"name": "heart decoration",
"unicode": "1f49f"
},
":heart_exclamation:": {
"category": "symbols",
"name": "heavy heart exclamation mark ornament",
"unicode": "2763",
"unicode_alt": "2763-fe0f"
},
":heart_eyes:": {
"category": "people",
"name": "smiling face with heart-shaped eyes",
"unicode": "1f60d"
},
":heart_eyes_cat:": {
"category": "people",
"name": "smiling cat face with heart-shaped eyes",
"unicode": "1f63b"
},
":heartbeat:": {
"category": "symbols",
"name": "beating heart",
"unicode": "1f493"
},
":heartpulse:": {
"category": "symbols",
"name": "growing heart",
"unicode": "1f497"
},
":hearts:": {
"category": "symbols",
"name": "black heart suit",
"unicode": "2665",
"unicode_alt": "2665-fe0f"
},
":heavy_check_mark:": {
"category": "symbols",
"name": "heavy check mark",
"unicode": "2714",
"unicode_alt": "2714-fe0f"
},
":heavy_division_sign:": {
"category": "symbols",
"name": "heavy division sign",
"unicode": "2797"
},
":heavy_dollar_sign:": {
"category": "symbols",
"name": "heavy dollar sign",
"unicode": "1f4b2"
},
":heavy_minus_sign:": {
"category": "symbols",
"name": "heavy minus sign",
"unicode": "2796"
},
":heavy_multiplication_x:": {
"category": "symbols",
"name": "heavy multiplication x",
"unicode": "2716",
"unicode_alt": "2716-fe0f"
},
":heavy_plus_sign:": {
"category": "symbols",
"name": "heavy plus sign",
"unicode": "2795"
},
":helicopter:": {
"category": "travel",
"name": "helicopter",
"unicode": "1f681"
},
":helmet_with_cross:": {
"category": "people",
"name": "helmet with white cross",
"unicode": "26d1",
"unicode_alt": "26d1-fe0f"
},
":herb:": {
"category": "nature",
"name": "herb",
"unicode": "1f33f"
},
":hibiscus:": {
"category": "nature",
"name": "hibiscus",
"unicode": "1f33a"
},
":high_brightness:": {
"category": "symbols",
"name": "high brightness symbol",
"unicode": "1f506"
},
":high_heel:": {
"category": "people",
"name": "high-heeled shoe",
"unicode": "1f460"
},
":hockey:": {
"category": "activity",
"name": "ice hockey stick and puck",
"unicode": "1f3d2"
},
":hole:": {
"category": "objects",
"name": "hole",
"unicode": "1f573",
"unicode_alt": "1f573-fe0f"
},
":homes:": {
"category": "travel",
"name": "house buildings",
"unicode": "1f3d8",
"unicode_alt": "1f3d8-fe0f"
},
":honey_pot:": {
"category": "food",
"name": "honey pot",
"unicode": "1f36f"
},
":horse:": {
"category": "nature",
"name": "horse face",
"unicode": "1f434"
},
":horse_racing:": {
"category": "activity",
"name": "horse racing",
"unicode": "1f3c7"
},
":horse_racing_tone1:": {
"category": "activity",
"name": "horse racing tone 1",
"unicode": "1f3c7-1f3fb"
},
":horse_racing_tone2:": {
"category": "activity",
"name": "horse racing tone 2",
"unicode": "1f3c7-1f3fc"
},
":horse_racing_tone3:": {
"category": "activity",
"name": "horse racing tone 3",
"unicode": "1f3c7-1f3fd"
},
":horse_racing_tone4:": {
"category": "activity",
"name": "horse racing tone 4",
"unicode": "1f3c7-1f3fe"
},
":horse_racing_tone5:": {
"category": "activity",
"name": "horse racing tone 5",
"unicode": "1f3c7-1f3ff"
},
":hospital:": {
"category": "travel",
"name": "hospital",
"unicode": "1f3e5"
},
":hot_pepper:": {
"category": "food",
"name": "hot pepper",
"unicode": "1f336",
"unicode_alt": "1f336-fe0f"
},
":hotdog:": {
"category": "food",
"name": "hot dog",
"unicode": "1f32d"
},
":hotel:": {
"category": "travel",
"name": "hotel",
"unicode": "1f3e8"
},
":hotsprings:": {
"category": "symbols",
"name": "hot springs",
"unicode": "2668",
"unicode_alt": "2668-fe0f"
},
":hourglass:": {
"category": "objects",
"name": "hourglass",
"unicode": "231b",
"unicode_alt": "231b-fe0f"
},
":hourglass_flowing_sand:": {
"category": "objects",
"name": "hourglass with flowing sand",
"unicode": "23f3"
},
":house:": {
"category": "travel",
"name": "house building",
"unicode": "1f3e0"
},
":house_abandoned:": {
"category": "travel",
"name": "derelict house building",
"unicode": "1f3da",
"unicode_alt": "1f3da-fe0f"
},
":house_with_garden:": {
"category": "travel",
"name": "house with garden",
"unicode": "1f3e1"
},
":hugging:": {
"category": "people",
"name": "hugging face",
"unicode": "1f917"
},
":hushed:": {
"category": "people",
"name": "hushed face",
"unicode": "1f62f"
},
":ice_cream:": {
"category": "food",
"name": "ice cream",
"unicode": "1f368"
},
":ice_skate:": {
"category": "activity",
"name": "ice skate",
"unicode": "26f8",
"unicode_alt": "26f8-fe0f"
},
":icecream:": {
"category": "food",
"name": "soft ice cream",
"unicode": "1f366"
},
":id:": {
"category": "symbols",
"name": "squared id",
"unicode": "1f194"
},
":ideograph_advantage:": {
"category": "symbols",
"name": "circled ideograph advantage",
"unicode": "1f250"
},
":imp:": {
"category": "people",
"name": "imp",
"unicode": "1f47f"
},
":inbox_tray:": {
"category": "objects",
"name": "inbox tray",
"unicode": "1f4e5"
},
":incoming_envelope:": {
"category": "objects",
"name": "incoming envelope",
"unicode": "1f4e8"
},
":information_desk_person:": {
"category": "people",
"name": "information desk person",
"unicode": "1f481"
},
":information_desk_person_tone1:": {
"category": "people",
"name": "information desk person tone 1",
"unicode": "1f481-1f3fb"
},
":information_desk_person_tone2:": {
"category": "people",
"name": "information desk person tone 2",
"unicode": "1f481-1f3fc"
},
":information_desk_person_tone3:": {
"category": "people",
"name": "information desk person tone 3",
"unicode": "1f481-1f3fd"
},
":information_desk_person_tone4:": {
"category": "people",
"name": "information desk person tone 4",
"unicode": "1f481-1f3fe"
},
":information_desk_person_tone5:": {
"category": "people",
"name": "information desk person tone 5",
"unicode": "1f481-1f3ff"
},
":information_source:": {
"category": "symbols",
"name": "information source",
"unicode": "2139",
"unicode_alt": "2139-fe0f"
},
":innocent:": {
"category": "people",
"name": "smiling face with halo",
"unicode": "1f607"
},
":interrobang:": {
"category": "symbols",
"name": "exclamation question mark",
"unicode": "2049",
"unicode_alt": "2049-fe0f"
},
":iphone:": {
"category": "objects",
"name": "mobile phone",
"unicode": "1f4f1"
},
":island:": {
"category": "travel",
"name": "desert island",
"unicode": "1f3dd",
"unicode_alt": "1f3dd-fe0f"
},
":izakaya_lantern:": {
"category": "objects",
"name": "izakaya lantern",
"unicode": "1f3ee"
},
":jack_o_lantern:": {
"category": "nature",
"name": "jack-o-lantern",
"unicode": "1f383"
},
":japan:": {
"category": "travel",
"name": "silhouette of japan",
"unicode": "1f5fe"
},
":japanese_castle:": {
"category": "travel",
"name": "japanese castle",
"unicode": "1f3ef"
},
":japanese_goblin:": {
"category": "people",
"name": "japanese goblin",
"unicode": "1f47a"
},
":japanese_ogre:": {
"category": "people",
"name": "japanese ogre",
"unicode": "1f479"
},
":jeans:": {
"category": "people",
"name": "jeans",
"unicode": "1f456"
},
":joy:": {
"category": "people",
"name": "face with tears of joy",
"unicode": "1f602"
},
":joy_cat:": {
"category": "people",
"name": "cat face with tears of joy",
"unicode": "1f639"
},
":joystick:": {
"category": "objects",
"name": "joystick",
"unicode": "1f579",
"unicode_alt": "1f579-fe0f"
},
":juggling:": {
"category": "activity",
"name": "juggling",
"unicode": "1f939"
},
":juggling_tone1:": {
"category": "activity",
"name": "juggling tone 1",
"unicode": "1f939-1f3fb"
},
":juggling_tone2:": {
"category": "activity",
"name": "juggling tone 2",
"unicode": "1f939-1f3fc"
},
":juggling_tone3:": {
"category": "activity",
"name": "juggling tone 3",
"unicode": "1f939-1f3fd"
},
":juggling_tone4:": {
"category": "activity",
"name": "juggling tone 4",
"unicode": "1f939-1f3fe"
},
":juggling_tone5:": {
"category": "activity",
"name": "juggling tone 5",
"unicode": "1f939-1f3ff"
},
":kaaba:": {
"category": "travel",
"name": "kaaba",
"unicode": "1f54b"
},
":key2:": {
"category": "objects",
"name": "old key",
"unicode": "1f5dd",
"unicode_alt": "1f5dd-fe0f"
},
":key:": {
"category": "objects",
"name": "key",
"unicode": "1f511"
},
":keyboard:": {
"category": "objects",
"name": "keyboard",
"unicode": "2328",
"unicode_alt": "2328-fe0f"
},
":keycap_ten:": {
"category": "symbols",
"name": "keycap ten",
"unicode": "1f51f"
},
":kimono:": {
"category": "people",
"name": "kimono",
"unicode": "1f458"
},
":kiss:": {
"category": "people",
"name": "kiss mark",
"unicode": "1f48b"
},
":kiss_mm:": {
"category": "people",
"name": "kiss (man,man)",
"unicode": "1f468-2764-1f48b-1f468",
"unicode_alt": "1f468-200d-2764-fe0f-200d-1f48b-200d-1f468"
},
":kiss_ww:": {
"category": "people",
"name": "kiss (woman,woman)",
"unicode": "1f469-2764-1f48b-1f469",
"unicode_alt": "1f469-200d-2764-fe0f-200d-1f48b-200d-1f469"
},
":kissing:": {
"category": "people",
"name": "kissing face",
"unicode": "1f617"
},
":kissing_cat:": {
"category": "people",
"name": "kissing cat face with closed eyes",
"unicode": "1f63d"
},
":kissing_closed_eyes:": {
"category": "people",
"name": "kissing face with closed eyes",
"unicode": "1f61a"
},
":kissing_heart:": {
"category": "people",
"name": "face throwing a kiss",
"unicode": "1f618"
},
":kissing_smiling_eyes:": {
"category": "people",
"name": "kissing face with smiling eyes",
"unicode": "1f619"
},
":kiwi:": {
"category": "food",
"name": "kiwifruit",
"unicode": "1f95d"
},
":knife:": {
"category": "objects",
"name": "hocho",
"unicode": "1f52a"
},
":koala:": {
"category": "nature",
"name": "koala",
"unicode": "1f428"
},
":koko:": {
"category": "symbols",
"name": "squared katakana koko",
"unicode": "1f201"
},
":label:": {
"category": "objects",
"name": "label",
"unicode": "1f3f7",
"unicode_alt": "1f3f7-fe0f"
},
":large_blue_diamond:": {
"category": "symbols",
"name": "large blue diamond",
"unicode": "1f537"
},
":large_orange_diamond:": {
"category": "symbols",
"name": "large orange diamond",
"unicode": "1f536"
},
":last_quarter_moon:": {
"category": "nature",
"name": "last quarter moon symbol",
"unicode": "1f317"
},
":last_quarter_moon_with_face:": {
"category": "nature",
"name": "last quarter moon with face",
"unicode": "1f31c"
},
":laughing:": {
"category": "people",
"name": "smiling face with open mouth and tightly-closed eyes",
"unicode": "1f606"
},
":leaves:": {
"category": "nature",
"name": "leaf fluttering in wind",
"unicode": "1f343"
},
":ledger:": {
"category": "objects",
"name": "ledger",
"unicode": "1f4d2"
},
":left_facing_fist:": {
"category": "people",
"name": "left-facing fist",
"unicode": "1f91b"
},
":left_facing_fist_tone1:": {
"category": "people",
"name": "left facing fist tone 1",
"unicode": "1f91b-1f3fb"
},
":left_facing_fist_tone2:": {
"category": "people",
"name": "left facing fist tone 2",
"unicode": "1f91b-1f3fc"
},
":left_facing_fist_tone3:": {
"category": "people",
"name": "left facing fist tone 3",
"unicode": "1f91b-1f3fd"
},
":left_facing_fist_tone4:": {
"category": "people",
"name": "left facing fist tone 4",
"unicode": "1f91b-1f3fe"
},
":left_facing_fist_tone5:": {
"category": "people",
"name": "left facing fist tone 5",
"unicode": "1f91b-1f3ff"
},
":left_luggage:": {
"category": "symbols",
"name": "left luggage",
"unicode": "1f6c5"
},
":left_right_arrow:": {
"category": "symbols",
"name": "left right arrow",
"unicode": "2194",
"unicode_alt": "2194-fe0f"
},
":leftwards_arrow_with_hook:": {
"category": "symbols",
"name": "leftwards arrow with hook",
"unicode": "21a9",
"unicode_alt": "21a9-fe0f"
},
":lemon:": {
"category": "food",
"name": "lemon",
"unicode": "1f34b"
},
":leo:": {
"category": "symbols",
"name": "leo",
"unicode": "264c",
"unicode_alt": "264c-fe0f"
},
":leopard:": {
"category": "nature",
"name": "leopard",
"unicode": "1f406"
},
":level_slider:": {
"category": "objects",
"name": "level slider",
"unicode": "1f39a",
"unicode_alt": "1f39a-fe0f"
},
":levitate:": {
"category": "activity",
"name": "man in business suit levitating",
"unicode": "1f574",
"unicode_alt": "1f574-fe0f"
},
":libra:": {
"category": "symbols",
"name": "libra",
"unicode": "264e",
"unicode_alt": "264e-fe0f"
},
":lifter:": {
"category": "activity",
"name": "weight lifter",
"unicode": "1f3cb",
"unicode_alt": "1f3cb-fe0f"
},
":lifter_tone1:": {
"category": "activity",
"name": "weight lifter tone 1",
"unicode": "1f3cb-1f3fb"
},
":lifter_tone2:": {
"category": "activity",
"name": "weight lifter tone 2",
"unicode": "1f3cb-1f3fc"
},
":lifter_tone3:": {
"category": "activity",
"name": "weight lifter tone 3",
"unicode": "1f3cb-1f3fd"
},
":lifter_tone4:": {
"category": "activity",
"name": "weight lifter tone 4",
"unicode": "1f3cb-1f3fe"
},
":lifter_tone5:": {
"category": "activity",
"name": "weight lifter tone 5",
"unicode": "1f3cb-1f3ff"
},
":light_rail:": {
"category": "travel",
"name": "light rail",
"unicode": "1f688"
},
":link:": {
"category": "objects",
"name": "link symbol",
"unicode": "1f517"
},
":lion_face:": {
"category": "nature",
"name": "lion face",
"unicode": "1f981"
},
":lips:": {
"category": "people",
"name": "mouth",
"unicode": "1f444"
},
":lipstick:": {
"category": "people",
"name": "lipstick",
"unicode": "1f484"
},
":lizard:": {
"category": "nature",
"name": "lizard",
"unicode": "1f98e"
},
":lock:": {
"category": "objects",
"name": "lock",
"unicode": "1f512"
},
":lock_with_ink_pen:": {
"category": "objects",
"name": "lock with ink pen",
"unicode": "1f50f"
},
":lollipop:": {
"category": "food",
"name": "lollipop",
"unicode": "1f36d"
},
":loop:": {
"category": "symbols",
"name": "double curly loop",
"unicode": "27bf"
},
":loud_sound:": {
"category": "symbols",
"name": "speaker with three sound waves",
"unicode": "1f50a"
},
":loudspeaker:": {
"category": "symbols",
"name": "public address loudspeaker",
"unicode": "1f4e2"
},
":love_hotel:": {
"category": "travel",
"name": "love hotel",
"unicode": "1f3e9"
},
":love_letter:": {
"category": "objects",
"name": "love letter",
"unicode": "1f48c"
},
":low_brightness:": {
"category": "symbols",
"name": "low brightness symbol",
"unicode": "1f505"
},
":lying_face:": {
"category": "people",
"name": "lying face",
"unicode": "1f925"
},
":m:": {
"category": "symbols",
"name": "circled latin capital letter m",
"unicode": "24c2",
"unicode_alt": "24c2-fe0f"
},
":mag:": {
"category": "objects",
"name": "left-pointing magnifying glass",
"unicode": "1f50d"
},
":mag_right:": {
"category": "objects",
"name": "right-pointing magnifying glass",
"unicode": "1f50e"
},
":mahjong:": {
"category": "symbols",
"name": "mahjong tile red dragon",
"unicode": "1f004",
"unicode_alt": "1f004-fe0f"
},
":mailbox:": {
"category": "objects",
"name": "closed mailbox with raised flag",
"unicode": "1f4eb"
},
":mailbox_closed:": {
"category": "objects",
"name": "closed mailbox with lowered flag",
"unicode": "1f4ea"
},
":mailbox_with_mail:": {
"category": "objects",
"name": "open mailbox with raised flag",
"unicode": "1f4ec"
},
":mailbox_with_no_mail:": {
"category": "objects",
"name": "open mailbox with lowered flag",
"unicode": "1f4ed"
},
":man:": {
"category": "people",
"name": "man",
"unicode": "1f468"
},
":man_dancing:": {
"category": "people",
"name": "man dancing",
"unicode": "1f57a"
},
":man_dancing_tone1:": {
"category": "people",
"name": "man dancing tone 1",
"unicode": "1f57a-1f3fb"
},
":man_dancing_tone2:": {
"category": "people",
"name": "man dancing tone 2",
"unicode": "1f57a-1f3fc"
},
":man_dancing_tone3:": {
"category": "people",
"name": "man dancing tone 3",
"unicode": "1f57a-1f3fd"
},
":man_dancing_tone4:": {
"category": "people",
"name": "man dancing tone 4",
"unicode": "1f57a-1f3fe"
},
":man_dancing_tone5:": {
"category": "people",
"name": "man dancing tone 5",
"unicode": "1f57a-1f3ff"
},
":man_in_tuxedo:": {
"category": "people",
"name": "man in tuxedo",
"unicode": "1f935"
},
":man_in_tuxedo_tone1:": {
"category": "people",
"name": "man in tuxedo tone 1",
"unicode": "1f935-1f3fb"
},
":man_in_tuxedo_tone2:": {
"category": "people",
"name": "man in tuxedo tone 2",
"unicode": "1f935-1f3fc"
},
":man_in_tuxedo_tone3:": {
"category": "people",
"name": "man in tuxedo tone 3",
"unicode": "1f935-1f3fd"
},
":man_in_tuxedo_tone4:": {
"category": "people",
"name": "man in tuxedo tone 4",
"unicode": "1f935-1f3fe"
},
":man_in_tuxedo_tone5:": {
"category": "people",
"name": "man in tuxedo tone 5",
"unicode": "1f935-1f3ff"
},
":man_tone1:": {
"category": "people",
"name": "man tone 1",
"unicode": "1f468-1f3fb"
},
":man_tone2:": {
"category": "people",
"name": "man tone 2",
"unicode": "1f468-1f3fc"
},
":man_tone3:": {
"category": "people",
"name": "man tone 3",
"unicode": "1f468-1f3fd"
},
":man_tone4:": {
"category": "people",
"name": "man tone 4",
"unicode": "1f468-1f3fe"
},
":man_tone5:": {
"category": "people",
"name": "man tone 5",
"unicode": "1f468-1f3ff"
},
":man_with_gua_pi_mao:": {
"category": "people",
"name": "man with gua pi mao",
"unicode": "1f472"
},
":man_with_gua_pi_mao_tone1:": {
"category": "people",
"name": "man with gua pi mao tone 1",
"unicode": "1f472-1f3fb"
},
":man_with_gua_pi_mao_tone2:": {
"category": "people",
"name": "man with gua pi mao tone 2",
"unicode": "1f472-1f3fc"
},
":man_with_gua_pi_mao_tone3:": {
"category": "people",
"name": "man with gua pi mao tone 3",
"unicode": "1f472-1f3fd"
},
":man_with_gua_pi_mao_tone4:": {
"category": "people",
"name": "man with gua pi mao tone 4",
"unicode": "1f472-1f3fe"
},
":man_with_gua_pi_mao_tone5:": {
"category": "people",
"name": "man with gua pi mao tone 5",
"unicode": "1f472-1f3ff"
},
":man_with_turban:": {
"category": "people",
"name": "man with turban",
"unicode": "1f473"
},
":man_with_turban_tone1:": {
"category": "people",
"name": "man with turban tone 1",
"unicode": "1f473-1f3fb"
},
":man_with_turban_tone2:": {
"category": "people",
"name": "man with turban tone 2",
"unicode": "1f473-1f3fc"
},
":man_with_turban_tone3:": {
"category": "people",
"name": "man with turban tone 3",
"unicode": "1f473-1f3fd"
},
":man_with_turban_tone4:": {
"category": "people",
"name": "man with turban tone 4",
"unicode": "1f473-1f3fe"
},
":man_with_turban_tone5:": {
"category": "people",
"name": "man with turban tone 5",
"unicode": "1f473-1f3ff"
},
":mans_shoe:": {
"category": "people",
"name": "mans shoe",
"unicode": "1f45e"
},
":map:": {
"category": "objects",
"name": "world map",
"unicode": "1f5fa",
"unicode_alt": "1f5fa-fe0f"
},
":maple_leaf:": {
"category": "nature",
"name": "maple leaf",
"unicode": "1f341"
},
":martial_arts_uniform:": {
"category": "activity",
"name": "martial arts uniform",
"unicode": "1f94b"
},
":mask:": {
"category": "people",
"name": "face with medical mask",
"unicode": "1f637"
},
":massage:": {
"category": "people",
"name": "face massage",
"unicode": "1f486"
},
":massage_tone1:": {
"category": "people",
"name": "face massage tone 1",
"unicode": "1f486-1f3fb"
},
":massage_tone2:": {
"category": "people",
"name": "face massage tone 2",
"unicode": "1f486-1f3fc"
},
":massage_tone3:": {
"category": "people",
"name": "face massage tone 3",
"unicode": "1f486-1f3fd"
},
":massage_tone4:": {
"category": "people",
"name": "face massage tone 4",
"unicode": "1f486-1f3fe"
},
":massage_tone5:": {
"category": "people",
"name": "face massage tone 5",
"unicode": "1f486-1f3ff"
},
":meat_on_bone:": {
"category": "food",
"name": "meat on bone",
"unicode": "1f356"
},
":medal:": {
"category": "activity",
"name": "sports medal",
"unicode": "1f3c5"
},
":mega:": {
"category": "symbols",
"name": "cheering megaphone",
"unicode": "1f4e3"
},
":melon:": {
"category": "food",
"name": "melon",
"unicode": "1f348"
},
":menorah:": {
"category": "symbols",
"name": "menorah with nine branches",
"unicode": "1f54e"
},
":mens:": {
"category": "symbols",
"name": "mens symbol",
"unicode": "1f6b9"
},
":metal:": {
"category": "people",
"name": "sign of the horns",
"unicode": "1f918"
},
":metal_tone1:": {
"category": "people",
"name": "sign of the horns tone 1",
"unicode": "1f918-1f3fb"
},
":metal_tone2:": {
"category": "people",
"name": "sign of the horns tone 2",
"unicode": "1f918-1f3fc"
},
":metal_tone3:": {
"category": "people",
"name": "sign of the horns tone 3",
"unicode": "1f918-1f3fd"
},
":metal_tone4:": {
"category": "people",
"name": "sign of the horns tone 4",
"unicode": "1f918-1f3fe"
},
":metal_tone5:": {
"category": "people",
"name": "sign of the horns tone 5",
"unicode": "1f918-1f3ff"
},
":metro:": {
"category": "travel",
"name": "metro",
"unicode": "1f687"
},
":microphone2:": {
"category": "objects",
"name": "studio microphone",
"unicode": "1f399",
"unicode_alt": "1f399-fe0f"
},
":microphone:": {
"category": "activity",
"name": "microphone",
"unicode": "1f3a4"
},
":microscope:": {
"category": "objects",
"name": "microscope",
"unicode": "1f52c"
},
":middle_finger:": {
"category": "people",
"name": "reversed hand with middle finger extended",
"unicode": "1f595"
},
":middle_finger_tone1:": {
"category": "people",
"name": "reversed hand with middle finger extended tone 1",
"unicode": "1f595-1f3fb"
},
":middle_finger_tone2:": {
"category": "people",
"name": "reversed hand with middle finger extended tone 2",
"unicode": "1f595-1f3fc"
},
":middle_finger_tone3:": {
"category": "people",
"name": "reversed hand with middle finger extended tone 3",
"unicode": "1f595-1f3fd"
},
":middle_finger_tone4:": {
"category": "people",
"name": "reversed hand with middle finger extended tone 4",
"unicode": "1f595-1f3fe"
},
":middle_finger_tone5:": {
"category": "people",
"name": "reversed hand with middle finger extended tone 5",
"unicode": "1f595-1f3ff"
},
":military_medal:": {
"category": "activity",
"name": "military medal",
"unicode": "1f396",
"unicode_alt": "1f396-fe0f"
},
":milk:": {
"category": "food",
"name": "glass of milk",
"unicode": "1f95b"
},
":milky_way:": {
"category": "travel",
"name": "milky way",
"unicode": "1f30c"
},
":minibus:": {
"category": "travel",
"name": "minibus",
"unicode": "1f690"
},
":minidisc:": {
"category": "objects",
"name": "minidisc",
"unicode": "1f4bd"
},
":mobile_phone_off:": {
"category": "symbols",
"name": "mobile phone off",
"unicode": "1f4f4"
},
":money_mouth:": {
"category": "people",
"name": "money-mouth face",
"unicode": "1f911"
},
":money_with_wings:": {
"category": "objects",
"name": "money with wings",
"unicode": "1f4b8"
},
":moneybag:": {
"category": "objects",
"name": "money bag",
"unicode": "1f4b0"
},
":monkey:": {
"category": "nature",
"name": "monkey",
"unicode": "1f412"
},
":monkey_face:": {
"category": "nature",
"name": "monkey face",
"unicode": "1f435"
},
":monorail:": {
"category": "travel",
"name": "monorail",
"unicode": "1f69d"
},
":mortar_board:": {
"category": "people",
"name": "graduation cap",
"unicode": "1f393"
},
":mosque:": {
"category": "travel",
"name": "mosque",
"unicode": "1f54c"
},
":motor_scooter:": {
"category": "travel",
"name": "motor scooter",
"unicode": "1f6f5"
},
":motorboat:": {
"category": "travel",
"name": "motorboat",
"unicode": "1f6e5",
"unicode_alt": "1f6e5-fe0f"
},
":motorcycle:": {
"category": "travel",
"name": "racing motorcycle",
"unicode": "1f3cd",
"unicode_alt": "1f3cd-fe0f"
},
":motorway:": {
"category": "travel",
"name": "motorway",
"unicode": "1f6e3",
"unicode_alt": "1f6e3-fe0f"
},
":mount_fuji:": {
"category": "travel",
"name": "mount fuji",
"unicode": "1f5fb"
},
":mountain:": {
"category": "travel",
"name": "mountain",
"unicode": "26f0",
"unicode_alt": "26f0-fe0f"
},
":mountain_bicyclist:": {
"category": "activity",
"name": "mountain bicyclist",
"unicode": "1f6b5"
},
":mountain_bicyclist_tone1:": {
"category": "activity",
"name": "mountain bicyclist tone 1",
"unicode": "1f6b5-1f3fb"
},
":mountain_bicyclist_tone2:": {
"category": "activity",
"name": "mountain bicyclist tone 2",
"unicode": "1f6b5-1f3fc"
},
":mountain_bicyclist_tone3:": {
"category": "activity",
"name": "mountain bicyclist tone 3",
"unicode": "1f6b5-1f3fd"
},
":mountain_bicyclist_tone4:": {
"category": "activity",
"name": "mountain bicyclist tone 4",
"unicode": "1f6b5-1f3fe"
},
":mountain_bicyclist_tone5:": {
"category": "activity",
"name": "mountain bicyclist tone 5",
"unicode": "1f6b5-1f3ff"
},
":mountain_cableway:": {
"category": "travel",
"name": "mountain cableway",
"unicode": "1f6a0"
},
":mountain_railway:": {
"category": "travel",
"name": "mountain railway",
"unicode": "1f69e"
},
":mountain_snow:": {
"category": "travel",
"name": "snow capped mountain",
"unicode": "1f3d4",
"unicode_alt": "1f3d4-fe0f"
},
":mouse2:": {
"category": "nature",
"name": "mouse",
"unicode": "1f401"
},
":mouse:": {
"category": "nature",
"name": "mouse face",
"unicode": "1f42d"
},
":mouse_three_button:": {
"category": "objects",
"name": "three button mouse",
"unicode": "1f5b1",
"unicode_alt": "1f5b1-fe0f"
},
":movie_camera:": {
"category": "objects",
"name": "movie camera",
"unicode": "1f3a5"
},
":moyai:": {
"category": "objects",
"name": "moyai",
"unicode": "1f5ff"
},
":mrs_claus:": {
"category": "people",
"name": "mother christmas",
"unicode": "1f936"
},
":mrs_claus_tone1:": {
"category": "people",
"name": "mother christmas tone 1",
"unicode": "1f936-1f3fb"
},
":mrs_claus_tone2:": {
"category": "people",
"name": "mother christmas tone 2",
"unicode": "1f936-1f3fc"
},
":mrs_claus_tone3:": {
"category": "people",
"name": "mother christmas tone 3",
"unicode": "1f936-1f3fd"
},
":mrs_claus_tone4:": {
"category": "people",
"name": "mother christmas tone 4",
"unicode": "1f936-1f3fe"
},
":mrs_claus_tone5:": {
"category": "people",
"name": "mother christmas tone 5",
"unicode": "1f936-1f3ff"
},
":muscle:": {
"category": "people",
"name": "flexed biceps",
"unicode": "1f4aa"
},
":muscle_tone1:": {
"category": "people",
"name": "flexed biceps tone 1",
"unicode": "1f4aa-1f3fb"
},
":muscle_tone2:": {
"category": "people",
"name": "flexed biceps tone 2",
"unicode": "1f4aa-1f3fc"
},
":muscle_tone3:": {
"category": "people",
"name": "flexed biceps tone 3",
"unicode": "1f4aa-1f3fd"
},
":muscle_tone4:": {
"category": "people",
"name": "flexed biceps tone 4",
"unicode": "1f4aa-1f3fe"
},
":muscle_tone5:": {
"category": "people",
"name": "flexed biceps tone 5",
"unicode": "1f4aa-1f3ff"
},
":mushroom:": {
"category": "nature",
"name": "mushroom",
"unicode": "1f344"
},
":musical_keyboard:": {
"category": "activity",
"name": "musical keyboard",
"unicode": "1f3b9"
},
":musical_note:": {
"category": "symbols",
"name": "musical note",
"unicode": "1f3b5"
},
":musical_score:": {
"category": "activity",
"name": "musical score",
"unicode": "1f3bc"
},
":mute:": {
"category": "symbols",
"name": "speaker with cancellation stroke",
"unicode": "1f507"
},
":nail_care:": {
"category": "people",
"name": "nail polish",
"unicode": "1f485"
},
":nail_care_tone1:": {
"category": "people",
"name": "nail polish tone 1",
"unicode": "1f485-1f3fb"
},
":nail_care_tone2:": {
"category": "people",
"name": "nail polish tone 2",
"unicode": "1f485-1f3fc"
},
":nail_care_tone3:": {
"category": "people",
"name": "nail polish tone 3",
"unicode": "1f485-1f3fd"
},
":nail_care_tone4:": {
"category": "people",
"name": "nail polish tone 4",
"unicode": "1f485-1f3fe"
},
":nail_care_tone5:": {
"category": "people",
"name": "nail polish tone 5",
"unicode": "1f485-1f3ff"
},
":name_badge:": {
"category": "symbols",
"name": "name badge",
"unicode": "1f4db"
},
":nauseated_face:": {
"category": "people",
"name": "nauseated face",
"unicode": "1f922"
},
":necktie:": {
"category": "people",
"name": "necktie",
"unicode": "1f454"
},
":negative_squared_cross_mark:": {
"category": "symbols",
"name": "negative squared cross mark",
"unicode": "274e"
},
":nerd:": {
"category": "people",
"name": "nerd face",
"unicode": "1f913"
},
":neutral_face:": {
"category": "people",
"name": "neutral face",
"unicode": "1f610"
},
":new:": {
"category": "symbols",
"name": "squared new",
"unicode": "1f195"
},
":new_moon:": {
"category": "nature",
"name": "new moon symbol",
"unicode": "1f311"
},
":new_moon_with_face:": {
"category": "nature",
"name": "new moon with face",
"unicode": "1f31a"
},
":newspaper2:": {
"category": "objects",
"name": "rolled-up newspaper",
"unicode": "1f5de",
"unicode_alt": "1f5de-fe0f"
},
":newspaper:": {
"category": "objects",
"name": "newspaper",
"unicode": "1f4f0"
},
":ng:": {
"category": "symbols",
"name": "squared ng",
"unicode": "1f196"
},
":night_with_stars:": {
"category": "travel",
"name": "night with stars",
"unicode": "1f303"
},
":nine:": {
"category": "symbols",
"name": "keycap digit nine",
"unicode": "0039-20e3",
"unicode_alt": "0039-fe0f-20e3"
},
":no_bell:": {
"category": "symbols",
"name": "bell with cancellation stroke",
"unicode": "1f515"
},
":no_bicycles:": {
"category": "symbols",
"name": "no bicycles",
"unicode": "1f6b3"
},
":no_entry:": {
"category": "symbols",
"name": "no entry",
"unicode": "26d4",
"unicode_alt": "26d4-fe0f"
},
":no_entry_sign:": {
"category": "symbols",
"name": "no entry sign",
"unicode": "1f6ab"
},
":no_good:": {
"category": "people",
"name": "face with no good gesture",
"unicode": "1f645"
},
":no_good_tone1:": {
"category": "people",
"name": "face with no good gesture tone 1",
"unicode": "1f645-1f3fb"
},
":no_good_tone2:": {
"category": "people",
"name": "face with no good gesture tone 2",
"unicode": "1f645-1f3fc"
},
":no_good_tone3:": {
"category": "people",
"name": "face with no good gesture tone 3",
"unicode": "1f645-1f3fd"
},
":no_good_tone4:": {
"category": "people",
"name": "face with no good gesture tone 4",
"unicode": "1f645-1f3fe"
},
":no_good_tone5:": {
"category": "people",
"name": "face with no good gesture tone 5",
"unicode": "1f645-1f3ff"
},
":no_mobile_phones:": {
"category": "symbols",
"name": "no mobile phones",
"unicode": "1f4f5"
},
":no_mouth:": {
"category": "people",
"name": "face without mouth",
"unicode": "1f636"
},
":no_pedestrians:": {
"category": "symbols",
"name": "no pedestrians",
"unicode": "1f6b7"
},
":no_smoking:": {
"category": "symbols",
"name": "no smoking symbol",
"unicode": "1f6ad"
},
":non-potable_water:": {
"category": "symbols",
"name": "non-potable water symbol",
"unicode": "1f6b1"
},
":nose:": {
"category": "people",
"name": "nose",
"unicode": "1f443"
},
":nose_tone1:": {
"category": "people",
"name": "nose tone 1",
"unicode": "1f443-1f3fb"
},
":nose_tone2:": {
"category": "people",
"name": "nose tone 2",
"unicode": "1f443-1f3fc"
},
":nose_tone3:": {
"category": "people",
"name": "nose tone 3",
"unicode": "1f443-1f3fd"
},
":nose_tone4:": {
"category": "people",
"name": "nose tone 4",
"unicode": "1f443-1f3fe"
},
":nose_tone5:": {
"category": "people",
"name": "nose tone 5",
"unicode": "1f443-1f3ff"
},
":notebook:": {
"category": "objects",
"name": "notebook",
"unicode": "1f4d3"
},
":notebook_with_decorative_cover:": {
"category": "objects",
"name": "notebook with decorative cover",
"unicode": "1f4d4"
},
":notepad_spiral:": {
"category": "objects",
"name": "spiral note pad",
"unicode": "1f5d2",
"unicode_alt": "1f5d2-fe0f"
},
":notes:": {
"category": "symbols",
"name": "multiple musical notes",
"unicode": "1f3b6"
},
":nut_and_bolt:": {
"category": "objects",
"name": "nut and bolt",
"unicode": "1f529"
},
":o2:": {
"category": "symbols",
"name": "negative squared latin capital letter o",
"unicode": "1f17e"
},
":o:": {
"category": "symbols",
"name": "heavy large circle",
"unicode": "2b55",
"unicode_alt": "2b55-fe0f"
},
":ocean:": {
"category": "nature",
"name": "water wave",
"unicode": "1f30a"
},
":octagonal_sign:": {
"category": "symbols",
"name": "octagonal sign",
"unicode": "1f6d1"
},
":octopus:": {
"category": "nature",
"name": "octopus",
"unicode": "1f419"
},
":oden:": {
"category": "food",
"name": "oden",
"unicode": "1f362"
},
":office:": {
"category": "travel",
"name": "office building",
"unicode": "1f3e2"
},
":oil:": {
"category": "objects",
"name": "oil drum",
"unicode": "1f6e2",
"unicode_alt": "1f6e2-fe0f"
},
":ok:": {
"category": "symbols",
"name": "squared ok",
"unicode": "1f197"
},
":ok_hand:": {
"category": "people",
"name": "ok hand sign",
"unicode": "1f44c"
},
":ok_hand_tone1:": {
"category": "people",
"name": "ok hand sign tone 1",
"unicode": "1f44c-1f3fb"
},
":ok_hand_tone2:": {
"category": "people",
"name": "ok hand sign tone 2",
"unicode": "1f44c-1f3fc"
},
":ok_hand_tone3:": {
"category": "people",
"name": "ok hand sign tone 3",
"unicode": "1f44c-1f3fd"
},
":ok_hand_tone4:": {
"category": "people",
"name": "ok hand sign tone 4",
"unicode": "1f44c-1f3fe"
},
":ok_hand_tone5:": {
"category": "people",
"name": "ok hand sign tone 5",
"unicode": "1f44c-1f3ff"
},
":ok_woman:": {
"category": "people",
"name": "face with ok gesture",
"unicode": "1f646"
},
":ok_woman_tone1:": {
"category": "people",
"name": "face with ok gesture tone1",
"unicode": "1f646-1f3fb"
},
":ok_woman_tone2:": {
"category": "people",
"name": "face with ok gesture tone2",
"unicode": "1f646-1f3fc"
},
":ok_woman_tone3:": {
"category": "people",
"name": "face with ok gesture tone3",
"unicode": "1f646-1f3fd"
},
":ok_woman_tone4:": {
"category": "people",
"name": "face with ok gesture tone4",
"unicode": "1f646-1f3fe"
},
":ok_woman_tone5:": {
"category": "people",
"name": "face with ok gesture tone5",
"unicode": "1f646-1f3ff"
},
":older_man:": {
"category": "people",
"name": "older man",
"unicode": "1f474"
},
":older_man_tone1:": {
"category": "people",
"name": "older man tone 1",
"unicode": "1f474-1f3fb"
},
":older_man_tone2:": {
"category": "people",
"name": "older man tone 2",
"unicode": "1f474-1f3fc"
},
":older_man_tone3:": {
"category": "people",
"name": "older man tone 3",
"unicode": "1f474-1f3fd"
},
":older_man_tone4:": {
"category": "people",
"name": "older man tone 4",
"unicode": "1f474-1f3fe"
},
":older_man_tone5:": {
"category": "people",
"name": "older man tone 5",
"unicode": "1f474-1f3ff"
},
":older_woman:": {
"category": "people",
"name": "older woman",
"unicode": "1f475"
},
":older_woman_tone1:": {
"category": "people",
"name": "older woman tone 1",
"unicode": "1f475-1f3fb"
},
":older_woman_tone2:": {
"category": "people",
"name": "older woman tone 2",
"unicode": "1f475-1f3fc"
},
":older_woman_tone3:": {
"category": "people",
"name": "older woman tone 3",
"unicode": "1f475-1f3fd"
},
":older_woman_tone4:": {
"category": "people",
"name": "older woman tone 4",
"unicode": "1f475-1f3fe"
},
":older_woman_tone5:": {
"category": "people",
"name": "older woman tone 5",
"unicode": "1f475-1f3ff"
},
":om_symbol:": {
"category": "symbols",
"name": "om symbol",
"unicode": "1f549",
"unicode_alt": "1f549-fe0f"
},
":on:": {
"category": "symbols",
"name": "on with exclamation mark with left right arrow abo",
"unicode": "1f51b"
},
":oncoming_automobile:": {
"category": "travel",
"name": "oncoming automobile",
"unicode": "1f698"
},
":oncoming_bus:": {
"category": "travel",
"name": "oncoming bus",
"unicode": "1f68d"
},
":oncoming_police_car:": {
"category": "travel",
"name": "oncoming police car",
"unicode": "1f694"
},
":oncoming_taxi:": {
"category": "travel",
"name": "oncoming taxi",
"unicode": "1f696"
},
":one:": {
"category": "symbols",
"name": "keycap digit one",
"unicode": "0031-20e3",
"unicode_alt": "0031-fe0f-20e3"
},
":open_file_folder:": {
"category": "objects",
"name": "open file folder",
"unicode": "1f4c2"
},
":open_hands:": {
"category": "people",
"name": "open hands sign",
"unicode": "1f450"
},
":open_hands_tone1:": {
"category": "people",
"name": "open hands sign tone 1",
"unicode": "1f450-1f3fb"
},
":open_hands_tone2:": {
"category": "people",
"name": "open hands sign tone 2",
"unicode": "1f450-1f3fc"
},
":open_hands_tone3:": {
"category": "people",
"name": "open hands sign tone 3",
"unicode": "1f450-1f3fd"
},
":open_hands_tone4:": {
"category": "people",
"name": "open hands sign tone 4",
"unicode": "1f450-1f3fe"
},
":open_hands_tone5:": {
"category": "people",
"name": "open hands sign tone 5",
"unicode": "1f450-1f3ff"
},
":open_mouth:": {
"category": "people",
"name": "face with open mouth",
"unicode": "1f62e"
},
":ophiuchus:": {
"category": "symbols",
"name": "ophiuchus",
"unicode": "26ce"
},
":orange_book:": {
"category": "objects",
"name": "orange book",
"unicode": "1f4d9"
},
":orthodox_cross:": {
"category": "symbols",
"name": "orthodox cross",
"unicode": "2626",
"unicode_alt": "2626-fe0f"
},
":outbox_tray:": {
"category": "objects",
"name": "outbox tray",
"unicode": "1f4e4"
},
":owl:": {
"category": "nature",
"name": "owl",
"unicode": "1f989"
},
":ox:": {
"category": "nature",
"name": "ox",
"unicode": "1f402"
},
":package:": {
"category": "objects",
"name": "package",
"unicode": "1f4e6"
},
":page_facing_up:": {
"category": "objects",
"name": "page facing up",
"unicode": "1f4c4"
},
":page_with_curl:": {
"category": "objects",
"name": "page with curl",
"unicode": "1f4c3"
},
":pager:": {
"category": "objects",
"name": "pager",
"unicode": "1f4df"
},
":paintbrush:": {
"category": "objects",
"name": "lower left paintbrush",
"unicode": "1f58c",
"unicode_alt": "1f58c-fe0f"
},
":palm_tree:": {
"category": "nature",
"name": "palm tree",
"unicode": "1f334"
},
":pancakes:": {
"category": "food",
"name": "pancakes",
"unicode": "1f95e"
},
":panda_face:": {
"category": "nature",
"name": "panda face",
"unicode": "1f43c"
},
":paperclip:": {
"category": "objects",
"name": "paperclip",
"unicode": "1f4ce"
},
":paperclips:": {
"category": "objects",
"name": "linked paperclips",
"unicode": "1f587",
"unicode_alt": "1f587-fe0f"
},
":park:": {
"category": "travel",
"name": "national park",
"unicode": "1f3de",
"unicode_alt": "1f3de-fe0f"
},
":parking:": {
"category": "symbols",
"name": "negative squared latin capital letter p",
"unicode": "1f17f",
"unicode_alt": "1f17f-fe0f"
},
":part_alternation_mark:": {
"category": "symbols",
"name": "part alternation mark",
"unicode": "303d",
"unicode_alt": "303d-fe0f"
},
":partly_sunny:": {
"category": "nature",
"name": "sun behind cloud",
"unicode": "26c5",
"unicode_alt": "26c5-fe0f"
},
":passport_control:": {
"category": "symbols",
"name": "passport control",
"unicode": "1f6c2"
},
":pause_button:": {
"category": "symbols",
"name": "double vertical bar",
"unicode": "23f8",
"unicode_alt": "23f8-fe0f"
},
":peace:": {
"category": "symbols",
"name": "peace symbol",
"unicode": "262e",
"unicode_alt": "262e-fe0f"
},
":peach:": {
"category": "food",
"name": "peach",
"unicode": "1f351"
},
":peanuts:": {
"category": "food",
"name": "peanuts",
"unicode": "1f95c"
},
":pear:": {
"category": "food",
"name": "pear",
"unicode": "1f350"
},
":pen_ballpoint:": {
"category": "objects",
"name": "lower left ballpoint pen",
"unicode": "1f58a",
"unicode_alt": "1f58a-fe0f"
},
":pen_fountain:": {
"category": "objects",
"name": "lower left fountain pen",
"unicode": "1f58b",
"unicode_alt": "1f58b-fe0f"
},
":pencil2:": {
"category": "objects",
"name": "pencil",
"unicode": "270f",
"unicode_alt": "270f-fe0f"
},
":pencil:": {
"category": "objects",
"name": "memo",
"unicode": "1f4dd"
},
":penguin:": {
"category": "nature",
"name": "penguin",
"unicode": "1f427"
},
":pensive:": {
"category": "people",
"name": "pensive face",
"unicode": "1f614"
},
":performing_arts:": {
"category": "activity",
"name": "performing arts",
"unicode": "1f3ad"
},
":persevere:": {
"category": "people",
"name": "persevering face",
"unicode": "1f623"
},
":person_frowning:": {
"category": "people",
"name": "person frowning",
"unicode": "1f64d"
},
":person_frowning_tone1:": {
"category": "people",
"name": "person frowning tone 1",
"unicode": "1f64d-1f3fb"
},
":person_frowning_tone2:": {
"category": "people",
"name": "person frowning tone 2",
"unicode": "1f64d-1f3fc"
},
":person_frowning_tone3:": {
"category": "people",
"name": "person frowning tone 3",
"unicode": "1f64d-1f3fd"
},
":person_frowning_tone4:": {
"category": "people",
"name": "person frowning tone 4",
"unicode": "1f64d-1f3fe"
},
":person_frowning_tone5:": {
"category": "people",
"name": "person frowning tone 5",
"unicode": "1f64d-1f3ff"
},
":person_with_blond_hair:": {
"category": "people",
"name": "person with blond hair",
"unicode": "1f471"
},
":person_with_blond_hair_tone1:": {
"category": "people",
"name": "person with blond hair tone 1",
"unicode": "1f471-1f3fb"
},
":person_with_blond_hair_tone2:": {
"category": "people",
"name": "person with blond hair tone 2",
"unicode": "1f471-1f3fc"
},
":person_with_blond_hair_tone3:": {
"category": "people",
"name": "person with blond hair tone 3",
"unicode": "1f471-1f3fd"
},
":person_with_blond_hair_tone4:": {
"category": "people",
"name": "person with blond hair tone 4",
"unicode": "1f471-1f3fe"
},
":person_with_blond_hair_tone5:": {
"category": "people",
"name": "person with blond hair tone 5",
"unicode": "1f471-1f3ff"
},
":person_with_pouting_face:": {
"category": "people",
"name": "person with pouting face",
"unicode": "1f64e"
},
":person_with_pouting_face_tone1:": {
"category": "people",
"name": "person with pouting face tone1",
"unicode": "1f64e-1f3fb"
},
":person_with_pouting_face_tone2:": {
"category": "people",
"name": "person with pouting face tone2",
"unicode": "1f64e-1f3fc"
},
":person_with_pouting_face_tone3:": {
"category": "people",
"name": "person with pouting face tone3",
"unicode": "1f64e-1f3fd"
},
":person_with_pouting_face_tone4:": {
"category": "people",
"name": "person with pouting face tone4",
"unicode": "1f64e-1f3fe"
},
":person_with_pouting_face_tone5:": {
"category": "people",
"name": "person with pouting face tone5",
"unicode": "1f64e-1f3ff"
},
":pick:": {
"category": "objects",
"name": "pick",
"unicode": "26cf",
"unicode_alt": "26cf-fe0f"
},
":pig2:": {
"category": "nature",
"name": "pig",
"unicode": "1f416"
},
":pig:": {
"category": "nature",
"name": "pig face",
"unicode": "1f437"
},
":pig_nose:": {
"category": "nature",
"name": "pig nose",
"unicode": "1f43d"
},
":pill:": {
"category": "objects",
"name": "pill",
"unicode": "1f48a"
},
":pineapple:": {
"category": "food",
"name": "pineapple",
"unicode": "1f34d"
},
":ping_pong:": {
"category": "activity",
"name": "table tennis paddle and ball",
"unicode": "1f3d3"
},
":pisces:": {
"category": "symbols",
"name": "pisces",
"unicode": "2653",
"unicode_alt": "2653-fe0f"
},
":pizza:": {
"category": "food",
"name": "slice of pizza",
"unicode": "1f355"
},
":place_of_worship:": {
"category": "symbols",
"name": "place of worship",
"unicode": "1f6d0"
},
":play_pause:": {
"category": "symbols",
"name": "black right-pointing double triangle with double vertical bar",
"unicode": "23ef",
"unicode_alt": "23ef-fe0f"
},
":point_down:": {
"category": "people",
"name": "white down pointing backhand index",
"unicode": "1f447"
},
":point_down_tone1:": {
"category": "people",
"name": "white down pointing backhand index tone 1",
"unicode": "1f447-1f3fb"
},
":point_down_tone2:": {
"category": "people",
"name": "white down pointing backhand index tone 2",
"unicode": "1f447-1f3fc"
},
":point_down_tone3:": {
"category": "people",
"name": "white down pointing backhand index tone 3",
"unicode": "1f447-1f3fd"
},
":point_down_tone4:": {
"category": "people",
"name": "white down pointing backhand index tone 4",
"unicode": "1f447-1f3fe"
},
":point_down_tone5:": {
"category": "people",
"name": "white down pointing backhand index tone 5",
"unicode": "1f447-1f3ff"
},
":point_left:": {
"category": "people",
"name": "white left pointing backhand index",
"unicode": "1f448"
},
":point_left_tone1:": {
"category": "people",
"name": "white left pointing backhand index tone 1",
"unicode": "1f448-1f3fb"
},
":point_left_tone2:": {
"category": "people",
"name": "white left pointing backhand index tone 2",
"unicode": "1f448-1f3fc"
},
":point_left_tone3:": {
"category": "people",
"name": "white left pointing backhand index tone 3",
"unicode": "1f448-1f3fd"
},
":point_left_tone4:": {
"category": "people",
"name": "white left pointing backhand index tone 4",
"unicode": "1f448-1f3fe"
},
":point_left_tone5:": {
"category": "people",
"name": "white left pointing backhand index tone 5",
"unicode": "1f448-1f3ff"
},
":point_right:": {
"category": "people",
"name": "white right pointing backhand index",
"unicode": "1f449"
},
":point_right_tone1:": {
"category": "people",
"name": "white right pointing backhand index tone 1",
"unicode": "1f449-1f3fb"
},
":point_right_tone2:": {
"category": "people",
"name": "white right pointing backhand index tone 2",
"unicode": "1f449-1f3fc"
},
":point_right_tone3:": {
"category": "people",
"name": "white right pointing backhand index tone 3",
"unicode": "1f449-1f3fd"
},
":point_right_tone4:": {
"category": "people",
"name": "white right pointing backhand index tone 4",
"unicode": "1f449-1f3fe"
},
":point_right_tone5:": {
"category": "people",
"name": "white right pointing backhand index tone 5",
"unicode": "1f449-1f3ff"
},
":point_up:": {
"category": "people",
"name": "white up pointing index",
"unicode": "261d",
"unicode_alt": "261d-fe0f"
},
":point_up_2:": {
"category": "people",
"name": "white up pointing backhand index",
"unicode": "1f446"
},
":point_up_2_tone1:": {
"category": "people",
"name": "white up pointing backhand index tone 1",
"unicode": "1f446-1f3fb"
},
":point_up_2_tone2:": {
"category": "people",
"name": "white up pointing backhand index tone 2",
"unicode": "1f446-1f3fc"
},
":point_up_2_tone3:": {
"category": "people",
"name": "white up pointing backhand index tone 3",
"unicode": "1f446-1f3fd"
},
":point_up_2_tone4:": {
"category": "people",
"name": "white up pointing backhand index tone 4",
"unicode": "1f446-1f3fe"
},
":point_up_2_tone5:": {
"category": "people",
"name": "white up pointing backhand index tone 5",
"unicode": "1f446-1f3ff"
},
":point_up_tone1:": {
"category": "people",
"name": "white up pointing index tone 1",
"unicode": "261d-1f3fb"
},
":point_up_tone2:": {
"category": "people",
"name": "white up pointing index tone 2",
"unicode": "261d-1f3fc"
},
":point_up_tone3:": {
"category": "people",
"name": "white up pointing index tone 3",
"unicode": "261d-1f3fd"
},
":point_up_tone4:": {
"category": "people",
"name": "white up pointing index tone 4",
"unicode": "261d-1f3fe"
},
":point_up_tone5:": {
"category": "people",
"name": "white up pointing index tone 5",
"unicode": "261d-1f3ff"
},
":police_car:": {
"category": "travel",
"name": "police car",
"unicode": "1f693"
},
":poodle:": {
"category": "nature",
"name": "poodle",
"unicode": "1f429"
},
":poop:": {
"category": "people",
"name": "pile of poo",
"unicode": "1f4a9"
},
":popcorn:": {
"category": "food",
"name": "popcorn",
"unicode": "1f37f"
},
":post_office:": {
"category": "travel",
"name": "japanese post office",
"unicode": "1f3e3"
},
":postal_horn:": {
"category": "objects",
"name": "postal horn",
"unicode": "1f4ef"
},
":postbox:": {
"category": "objects",
"name": "postbox",
"unicode": "1f4ee"
},
":potable_water:": {
"category": "symbols",
"name": "potable water symbol",
"unicode": "1f6b0"
},
":potato:": {
"category": "food",
"name": "potato",
"unicode": "1f954"
},
":pouch:": {
"category": "people",
"name": "pouch",
"unicode": "1f45d"
},
":poultry_leg:": {
"category": "food",
"name": "poultry leg",
"unicode": "1f357"
},
":pound:": {
"category": "objects",
"name": "banknote with pound sign",
"unicode": "1f4b7"
},
":pouting_cat:": {
"category": "people",
"name": "pouting cat face",
"unicode": "1f63e"
},
":pray:": {
"category": "people",
"name": "person with folded hands",
"unicode": "1f64f"
},
":pray_tone1:": {
"category": "people",
"name": "person with folded hands tone 1",
"unicode": "1f64f-1f3fb"
},
":pray_tone2:": {
"category": "people",
"name": "person with folded hands tone 2",
"unicode": "1f64f-1f3fc"
},
":pray_tone3:": {
"category": "people",
"name": "person with folded hands tone 3",
"unicode": "1f64f-1f3fd"
},
":pray_tone4:": {
"category": "people",
"name": "person with folded hands tone 4",
"unicode": "1f64f-1f3fe"
},
":pray_tone5:": {
"category": "people",
"name": "person with folded hands tone 5",
"unicode": "1f64f-1f3ff"
},
":prayer_beads:": {
"category": "objects",
"name": "prayer beads",
"unicode": "1f4ff"
},
":pregnant_woman:": {
"category": "people",
"name": "pregnant woman",
"unicode": "1f930"
},
":pregnant_woman_tone1:": {
"category": "people",
"name": "pregnant woman tone 1",
"unicode": "1f930-1f3fb"
},
":pregnant_woman_tone2:": {
"category": "people",
"name": "pregnant woman tone 2",
"unicode": "1f930-1f3fc"
},
":pregnant_woman_tone3:": {
"category": "people",
"name": "pregnant woman tone 3",
"unicode": "1f930-1f3fd"
},
":pregnant_woman_tone4:": {
"category": "people",
"name": "pregnant woman tone 4",
"unicode": "1f930-1f3fe"
},
":pregnant_woman_tone5:": {
"category": "people",
"name": "pregnant woman tone 5",
"unicode": "1f930-1f3ff"
},
":prince:": {
"category": "people",
"name": "prince",
"unicode": "1f934"
},
":prince_tone1:": {
"category": "people",
"name": "prince tone 1",
"unicode": "1f934-1f3fb"
},
":prince_tone2:": {
"category": "people",
"name": "prince tone 2",
"unicode": "1f934-1f3fc"
},
":prince_tone3:": {
"category": "people",
"name": "prince tone 3",
"unicode": "1f934-1f3fd"
},
":prince_tone4:": {
"category": "people",
"name": "prince tone 4",
"unicode": "1f934-1f3fe"
},
":prince_tone5:": {
"category": "people",
"name": "prince tone 5",
"unicode": "1f934-1f3ff"
},
":princess:": {
"category": "people",
"name": "princess",
"unicode": "1f478"
},
":princess_tone1:": {
"category": "people",
"name": "princess tone 1",
"unicode": "1f478-1f3fb"
},
":princess_tone2:": {
"category": "people",
"name": "princess tone 2",
"unicode": "1f478-1f3fc"
},
":princess_tone3:": {
"category": "people",
"name": "princess tone 3",
"unicode": "1f478-1f3fd"
},
":princess_tone4:": {
"category": "people",
"name": "princess tone 4",
"unicode": "1f478-1f3fe"
},
":princess_tone5:": {
"category": "people",
"name": "princess tone 5",
"unicode": "1f478-1f3ff"
},
":printer:": {
"category": "objects",
"name": "printer",
"unicode": "1f5a8",
"unicode_alt": "1f5a8-fe0f"
},
":projector:": {
"category": "objects",
"name": "film projector",
"unicode": "1f4fd",
"unicode_alt": "1f4fd-fe0f"
},
":punch:": {
"category": "people",
"name": "fisted hand sign",
"unicode": "1f44a"
},
":punch_tone1:": {
"category": "people",
"name": "fisted hand sign tone 1",
"unicode": "1f44a-1f3fb"
},
":punch_tone2:": {
"category": "people",
"name": "fisted hand sign tone 2",
"unicode": "1f44a-1f3fc"
},
":punch_tone3:": {
"category": "people",
"name": "fisted hand sign tone 3",
"unicode": "1f44a-1f3fd"
},
":punch_tone4:": {
"category": "people",
"name": "fisted hand sign tone 4",
"unicode": "1f44a-1f3fe"
},
":punch_tone5:": {
"category": "people",
"name": "fisted hand sign tone 5",
"unicode": "1f44a-1f3ff"
},
":purple_heart:": {
"category": "symbols",
"name": "purple heart",
"unicode": "1f49c"
},
":purse:": {
"category": "people",
"name": "purse",
"unicode": "1f45b"
},
":pushpin:": {
"category": "objects",
"name": "pushpin",
"unicode": "1f4cc"
},
":put_litter_in_its_place:": {
"category": "symbols",
"name": "put litter in its place symbol",
"unicode": "1f6ae"
},
":question:": {
"category": "symbols",
"name": "black question mark ornament",
"unicode": "2753"
},
":rabbit2:": {
"category": "nature",
"name": "rabbit",
"unicode": "1f407"
},
":rabbit:": {
"category": "nature",
"name": "rabbit face",
"unicode": "1f430"
},
":race_car:": {
"category": "travel",
"name": "racing car",
"unicode": "1f3ce",
"unicode_alt": "1f3ce-fe0f"
},
":racehorse:": {
"category": "nature",
"name": "horse",
"unicode": "1f40e"
},
":radio:": {
"category": "objects",
"name": "radio",
"unicode": "1f4fb"
},
":radio_button:": {
"category": "symbols",
"name": "radio button",
"unicode": "1f518"
},
":radioactive:": {
"category": "symbols",
"name": "radioactive sign",
"unicode": "2622",
"unicode_alt": "2622-fe0f"
},
":rage:": {
"category": "people",
"name": "pouting face",
"unicode": "1f621"
},
":railway_car:": {
"category": "travel",
"name": "railway car",
"unicode": "1f683"
},
":railway_track:": {
"category": "travel",
"name": "railway track",
"unicode": "1f6e4",
"unicode_alt": "1f6e4-fe0f"
},
":rainbow:": {
"category": "travel",
"name": "rainbow",
"unicode": "1f308"
},
":rainbow_flag:": {
"category": "objects",
"name": "rainbow_flag",
"unicode": "1f3f3-1f308"
},
":raised_back_of_hand:": {
"category": "people",
"name": "raised back of hand",
"unicode": "1f91a"
},
":raised_back_of_hand_tone1:": {
"category": "people",
"name": "raised back of hand tone 1",
"unicode": "1f91a-1f3fb"
},
":raised_back_of_hand_tone2:": {
"category": "people",
"name": "raised back of hand tone 2",
"unicode": "1f91a-1f3fc"
},
":raised_back_of_hand_tone3:": {
"category": "people",
"name": "raised back of hand tone 3",
"unicode": "1f91a-1f3fd"
},
":raised_back_of_hand_tone4:": {
"category": "people",
"name": "raised back of hand tone 4",
"unicode": "1f91a-1f3fe"
},
":raised_back_of_hand_tone5:": {
"category": "people",
"name": "raised back of hand tone 5",
"unicode": "1f91a-1f3ff"
},
":raised_hand:": {
"category": "people",
"name": "raised hand",
"unicode": "270b"
},
":raised_hand_tone1:": {
"category": "people",
"name": "raised hand tone 1",
"unicode": "270b-1f3fb"
},
":raised_hand_tone2:": {
"category": "people",
"name": "raised hand tone 2",
"unicode": "270b-1f3fc"
},
":raised_hand_tone3:": {
"category": "people",
"name": "raised hand tone 3",
"unicode": "270b-1f3fd"
},
":raised_hand_tone4:": {
"category": "people",
"name": "raised hand tone 4",
"unicode": "270b-1f3fe"
},
":raised_hand_tone5:": {
"category": "people",
"name": "raised hand tone 5",
"unicode": "270b-1f3ff"
},
":raised_hands:": {
"category": "people",
"name": "person raising both hands in celebration",
"unicode": "1f64c"
},
":raised_hands_tone1:": {
"category": "people",
"name": "person raising both hands in celebration tone 1",
"unicode": "1f64c-1f3fb"
},
":raised_hands_tone2:": {
"category": "people",
"name": "person raising both hands in celebration tone 2",
"unicode": "1f64c-1f3fc"
},
":raised_hands_tone3:": {
"category": "people",
"name": "person raising both hands in celebration tone 3",
"unicode": "1f64c-1f3fd"
},
":raised_hands_tone4:": {
"category": "people",
"name": "person raising both hands in celebration tone 4",
"unicode": "1f64c-1f3fe"
},
":raised_hands_tone5:": {
"category": "people",
"name": "person raising both hands in celebration tone 5",
"unicode": "1f64c-1f3ff"
},
":raising_hand:": {
"category": "people",
"name": "happy person raising one hand",
"unicode": "1f64b"
},
":raising_hand_tone1:": {
"category": "people",
"name": "happy person raising one hand tone1",
"unicode": "1f64b-1f3fb"
},
":raising_hand_tone2:": {
"category": "people",
"name": "happy person raising one hand tone2",
"unicode": "1f64b-1f3fc"
},
":raising_hand_tone3:": {
"category": "people",
"name": "happy person raising one hand tone3",
"unicode": "1f64b-1f3fd"
},
":raising_hand_tone4:": {
"category": "people",
"name": "happy person raising one hand tone4",
"unicode": "1f64b-1f3fe"
},
":raising_hand_tone5:": {
"category": "people",
"name": "happy person raising one hand tone5",
"unicode": "1f64b-1f3ff"
},
":ram:": {
"category": "nature",
"name": "ram",
"unicode": "1f40f"
},
":ramen:": {
"category": "food",
"name": "steaming bowl",
"unicode": "1f35c"
},
":rat:": {
"category": "nature",
"name": "rat",
"unicode": "1f400"
},
":record_button:": {
"category": "symbols",
"name": "black circle for record",
"unicode": "23fa",
"unicode_alt": "23fa-fe0f"
},
":recycle:": {
"category": "symbols",
"name": "black universal recycling symbol",
"unicode": "267b",
"unicode_alt": "267b-fe0f"
},
":red_car:": {
"category": "travel",
"name": "automobile",
"unicode": "1f697"
},
":red_circle:": {
"category": "symbols",
"name": "red circle",
"unicode": "1f534"
},
":regional_indicator_a:": {
"category": "regional",
"name": "regional indicator symbol letter a",
"unicode": "1f1e6"
},
":regional_indicator_b:": {
"category": "regional",
"name": "regional indicator symbol letter b",
"unicode": "1f1e7"
},
":regional_indicator_c:": {
"category": "regional",
"name": "regional indicator symbol letter c",
"unicode": "1f1e8"
},
":regional_indicator_d:": {
"category": "regional",
"name": "regional indicator symbol letter d",
"unicode": "1f1e9"
},
":regional_indicator_e:": {
"category": "regional",
"name": "regional indicator symbol letter e",
"unicode": "1f1ea"
},
":regional_indicator_f:": {
"category": "regional",
"name": "regional indicator symbol letter f",
"unicode": "1f1eb"
},
":regional_indicator_g:": {
"category": "regional",
"name": "regional indicator symbol letter g",
"unicode": "1f1ec"
},
":regional_indicator_h:": {
"category": "regional",
"name": "regional indicator symbol letter h",
"unicode": "1f1ed"
},
":regional_indicator_i:": {
"category": "regional",
"name": "regional indicator symbol letter i",
"unicode": "1f1ee"
},
":regional_indicator_j:": {
"category": "regional",
"name": "regional indicator symbol letter j",
"unicode": "1f1ef"
},
":regional_indicator_k:": {
"category": "regional",
"name": "regional indicator symbol letter k",
"unicode": "1f1f0"
},
":regional_indicator_l:": {
"category": "regional",
"name": "regional indicator symbol letter l",
"unicode": "1f1f1"
},
":regional_indicator_m:": {
"category": "regional",
"name": "regional indicator symbol letter m",
"unicode": "1f1f2"
},
":regional_indicator_n:": {
"category": "regional",
"name": "regional indicator symbol letter n",
"unicode": "1f1f3"
},
":regional_indicator_o:": {
"category": "regional",
"name": "regional indicator symbol letter o",
"unicode": "1f1f4"
},
":regional_indicator_p:": {
"category": "regional",
"name": "regional indicator symbol letter p",
"unicode": "1f1f5"
},
":regional_indicator_q:": {
"category": "regional",
"name": "regional indicator symbol letter q",
"unicode": "1f1f6"
},
":regional_indicator_r:": {
"category": "regional",
"name": "regional indicator symbol letter r",
"unicode": "1f1f7"
},
":regional_indicator_s:": {
"category": "regional",
"name": "regional indicator symbol letter s",
"unicode": "1f1f8"
},
":regional_indicator_t:": {
"category": "regional",
"name": "regional indicator symbol letter t",
"unicode": "1f1f9"
},
":regional_indicator_u:": {
"category": "regional",
"name": "regional indicator symbol letter u",
"unicode": "1f1fa"
},
":regional_indicator_v:": {
"category": "regional",
"name": "regional indicator symbol letter v",
"unicode": "1f1fb"
},
":regional_indicator_w:": {
"category": "regional",
"name": "regional indicator symbol letter w",
"unicode": "1f1fc"
},
":regional_indicator_x:": {
"category": "regional",
"name": "regional indicator symbol letter x",
"unicode": "1f1fd"
},
":regional_indicator_y:": {
"category": "regional",
"name": "regional indicator symbol letter y",
"unicode": "1f1fe"
},
":regional_indicator_z:": {
"category": "regional",
"name": "regional indicator symbol letter z",
"unicode": "1f1ff"
},
":registered:": {
"category": "symbols",
"name": "registered sign",
"unicode": "00ae",
"unicode_alt": "00ae-fe0f"
},
":relaxed:": {
"category": "people",
"name": "white smiling face",
"unicode": "263a",
"unicode_alt": "263a-fe0f"
},
":relieved:": {
"category": "people",
"name": "relieved face",
"unicode": "1f60c"
},
":reminder_ribbon:": {
"category": "activity",
"name": "reminder ribbon",
"unicode": "1f397",
"unicode_alt": "1f397-fe0f"
},
":repeat:": {
"category": "symbols",
"name": "clockwise rightwards and leftwards open circle arrows",
"unicode": "1f501"
},
":repeat_one:": {
"category": "symbols",
"name": "clockwise rightwards and leftwards open circle arrows with circled one overlay",
"unicode": "1f502"
},
":restroom:": {
"category": "symbols",
"name": "restroom",
"unicode": "1f6bb"
},
":revolving_hearts:": {
"category": "symbols",
"name": "revolving hearts",
"unicode": "1f49e"
},
":rewind:": {
"category": "symbols",
"name": "black left-pointing double triangle",
"unicode": "23ea"
},
":rhino:": {
"category": "nature",
"name": "rhinoceros",
"unicode": "1f98f"
},
":ribbon:": {
"category": "objects",
"name": "ribbon",
"unicode": "1f380"
},
":rice:": {
"category": "food",
"name": "cooked rice",
"unicode": "1f35a"
},
":rice_ball:": {
"category": "food",
"name": "rice ball",
"unicode": "1f359"
},
":rice_cracker:": {
"category": "food",
"name": "rice cracker",
"unicode": "1f358"
},
":rice_scene:": {
"category": "travel",
"name": "moon viewing ceremony",
"unicode": "1f391"
},
":right_facing_fist:": {
"category": "people",
"name": "right-facing fist",
"unicode": "1f91c"
},
":right_facing_fist_tone1:": {
"category": "people",
"name": "right facing fist tone 1",
"unicode": "1f91c-1f3fb"
},
":right_facing_fist_tone2:": {
"category": "people",
"name": "right facing fist tone 2",
"unicode": "1f91c-1f3fc"
},
":right_facing_fist_tone3:": {
"category": "people",
"name": "right facing fist tone 3",
"unicode": "1f91c-1f3fd"
},
":right_facing_fist_tone4:": {
"category": "people",
"name": "right facing fist tone 4",
"unicode": "1f91c-1f3fe"
},
":right_facing_fist_tone5:": {
"category": "people",
"name": "right facing fist tone 5",
"unicode": "1f91c-1f3ff"
},
":ring:": {
"category": "people",
"name": "ring",
"unicode": "1f48d"
},
":robot:": {
"category": "people",
"name": "robot face",
"unicode": "1f916"
},
":rocket:": {
"category": "travel",
"name": "rocket",
"unicode": "1f680"
},
":rofl:": {
"category": "people",
"name": "rolling on the floor laughing",
"unicode": "1f923"
},
":roller_coaster:": {
"category": "travel",
"name": "roller coaster",
"unicode": "1f3a2"
},
":rolling_eyes:": {
"category": "people",
"name": "face with rolling eyes",
"unicode": "1f644"
},
":rooster:": {
"category": "nature",
"name": "rooster",
"unicode": "1f413"
},
":rose:": {
"category": "nature",
"name": "rose",
"unicode": "1f339"
},
":rosette:": {
"category": "nature",
"name": "rosette",
"unicode": "1f3f5",
"unicode_alt": "1f3f5-fe0f"
},
":rotating_light:": {
"category": "travel",
"name": "police cars revolving light",
"unicode": "1f6a8"
},
":round_pushpin:": {
"category": "objects",
"name": "round pushpin",
"unicode": "1f4cd"
},
":rowboat:": {
"category": "activity",
"name": "rowboat",
"unicode": "1f6a3"
},
":rowboat_tone1:": {
"category": "activity",
"name": "rowboat tone 1",
"unicode": "1f6a3-1f3fb"
},
":rowboat_tone2:": {
"category": "activity",
"name": "rowboat tone 2",
"unicode": "1f6a3-1f3fc"
},
":rowboat_tone3:": {
"category": "activity",
"name": "rowboat tone 3",
"unicode": "1f6a3-1f3fd"
},
":rowboat_tone4:": {
"category": "activity",
"name": "rowboat tone 4",
"unicode": "1f6a3-1f3fe"
},
":rowboat_tone5:": {
"category": "activity",
"name": "rowboat tone 5",
"unicode": "1f6a3-1f3ff"
},
":rugby_football:": {
"category": "activity",
"name": "rugby football",
"unicode": "1f3c9"
},
":runner:": {
"category": "people",
"name": "runner",
"unicode": "1f3c3"
},
":runner_tone1:": {
"category": "people",
"name": "runner tone 1",
"unicode": "1f3c3-1f3fb"
},
":runner_tone2:": {
"category": "people",
"name": "runner tone 2",
"unicode": "1f3c3-1f3fc"
},
":runner_tone3:": {
"category": "people",
"name": "runner tone 3",
"unicode": "1f3c3-1f3fd"
},
":runner_tone4:": {
"category": "people",
"name": "runner tone 4",
"unicode": "1f3c3-1f3fe"
},
":runner_tone5:": {
"category": "people",
"name": "runner tone 5",
"unicode": "1f3c3-1f3ff"
},
":running_shirt_with_sash:": {
"category": "activity",
"name": "running shirt with sash",
"unicode": "1f3bd"
},
":sa:": {
"category": "symbols",
"name": "squared katakana sa",
"unicode": "1f202",
"unicode_alt": "1f202-fe0f"
},
":sagittarius:": {
"category": "symbols",
"name": "sagittarius",
"unicode": "2650",
"unicode_alt": "2650-fe0f"
},
":sailboat:": {
"category": "travel",
"name": "sailboat",
"unicode": "26f5",
"unicode_alt": "26f5-fe0f"
},
":sake:": {
"category": "food",
"name": "sake bottle and cup",
"unicode": "1f376"
},
":salad:": {
"category": "food",
"name": "green salad",
"unicode": "1f957"
},
":sandal:": {
"category": "people",
"name": "womans sandal",
"unicode": "1f461"
},
":santa:": {
"category": "people",
"name": "father christmas",
"unicode": "1f385"
},
":santa_tone1:": {
"category": "people",
"name": "father christmas tone 1",
"unicode": "1f385-1f3fb"
},
":santa_tone2:": {
"category": "people",
"name": "father christmas tone 2",
"unicode": "1f385-1f3fc"
},
":santa_tone3:": {
"category": "people",
"name": "father christmas tone 3",
"unicode": "1f385-1f3fd"
},
":santa_tone4:": {
"category": "people",
"name": "father christmas tone 4",
"unicode": "1f385-1f3fe"
},
":santa_tone5:": {
"category": "people",
"name": "father christmas tone 5",
"unicode": "1f385-1f3ff"
},
":satellite:": {
"category": "objects",
"name": "satellite antenna",
"unicode": "1f4e1"
},
":satellite_orbital:": {
"category": "travel",
"name": "satellite",
"unicode": "1f6f0",
"unicode_alt": "1f6f0-fe0f"
},
":saxophone:": {
"category": "activity",
"name": "saxophone",
"unicode": "1f3b7"
},
":scales:": {
"category": "objects",
"name": "scales",
"unicode": "2696",
"unicode_alt": "2696-fe0f"
},
":school:": {
"category": "travel",
"name": "school",
"unicode": "1f3eb"
},
":school_satchel:": {
"category": "people",
"name": "school satchel",
"unicode": "1f392"
},
":scissors:": {
"category": "objects",
"name": "black scissors",
"unicode": "2702",
"unicode_alt": "2702-fe0f"
},
":scooter:": {
"category": "travel",
"name": "scooter",
"unicode": "1f6f4"
},
":scorpion:": {
"category": "nature",
"name": "scorpion",
"unicode": "1f982"
},
":scorpius:": {
"category": "symbols",
"name": "scorpius",
"unicode": "264f",
"unicode_alt": "264f-fe0f"
},
":scream:": {
"category": "people",
"name": "face screaming in fear",
"unicode": "1f631"
},
":scream_cat:": {
"category": "people",
"name": "weary cat face",
"unicode": "1f640"
},
":scroll:": {
"category": "objects",
"name": "scroll",
"unicode": "1f4dc"
},
":seat:": {
"category": "travel",
"name": "seat",
"unicode": "1f4ba"
},
":second_place:": {
"category": "activity",
"name": "second place medal",
"unicode": "1f948"
},
":secret:": {
"category": "symbols",
"name": "circled ideograph secret",
"unicode": "3299",
"unicode_alt": "3299-fe0f"
},
":see_no_evil:": {
"category": "nature",
"name": "see-no-evil monkey",
"unicode": "1f648"
},
":seedling:": {
"category": "nature",
"name": "seedling",
"unicode": "1f331"
},
":selfie:": {
"category": "people",
"name": "selfie",
"unicode": "1f933"
},
":selfie_tone1:": {
"category": "people",
"name": "selfie tone 1",
"unicode": "1f933-1f3fb"
},
":selfie_tone2:": {
"category": "people",
"name": "selfie tone 2",
"unicode": "1f933-1f3fc"
},
":selfie_tone3:": {
"category": "people",
"name": "selfie tone 3",
"unicode": "1f933-1f3fd"
},
":selfie_tone4:": {
"category": "people",
"name": "selfie tone 4",
"unicode": "1f933-1f3fe"
},
":selfie_tone5:": {
"category": "people",
"name": "selfie tone 5",
"unicode": "1f933-1f3ff"
},
":seven:": {
"category": "symbols",
"name": "keycap digit seven",
"unicode": "0037-20e3",
"unicode_alt": "0037-fe0f-20e3"
},
":shallow_pan_of_food:": {
"category": "food",
"name": "shallow pan of food",
"unicode": "1f958"
},
":shamrock:": {
"category": "nature",
"name": "shamrock",
"unicode": "2618",
"unicode_alt": "2618-fe0f"
},
":shark:": {
"category": "nature",
"name": "shark",
"unicode": "1f988"
},
":shaved_ice:": {
"category": "food",
"name": "shaved ice",
"unicode": "1f367"
},
":sheep:": {
"category": "nature",
"name": "sheep",
"unicode": "1f411"
},
":shell:": {
"category": "nature",
"name": "spiral shell",
"unicode": "1f41a"
},
":shield:": {
"category": "objects",
"name": "shield",
"unicode": "1f6e1",
"unicode_alt": "1f6e1-fe0f"
},
":shinto_shrine:": {
"category": "travel",
"name": "shinto shrine",
"unicode": "26e9",
"unicode_alt": "26e9-fe0f"
},
":ship:": {
"category": "travel",
"name": "ship",
"unicode": "1f6a2"
},
":shirt:": {
"category": "people",
"name": "t-shirt",
"unicode": "1f455"
},
":shopping_bags:": {
"category": "objects",
"name": "shopping bags",
"unicode": "1f6cd",
"unicode_alt": "1f6cd-fe0f"
},
":shopping_cart:": {
"category": "objects",
"name": "shopping trolley",
"unicode": "1f6d2"
},
":shower:": {
"category": "objects",
"name": "shower",
"unicode": "1f6bf"
},
":shrimp:": {
"category": "nature",
"name": "shrimp",
"unicode": "1f990"
},
":shrug:": {
"category": "people",
"name": "shrug",
"unicode": "1f937"
},
":shrug_tone1:": {
"category": "people",
"name": "shrug tone 1",
"unicode": "1f937-1f3fb"
},
":shrug_tone2:": {
"category": "people",
"name": "shrug tone 2",
"unicode": "1f937-1f3fc"
},
":shrug_tone3:": {
"category": "people",
"name": "shrug tone 3",
"unicode": "1f937-1f3fd"
},
":shrug_tone4:": {
"category": "people",
"name": "shrug tone 4",
"unicode": "1f937-1f3fe"
},
":shrug_tone5:": {
"category": "people",
"name": "shrug tone 5",
"unicode": "1f937-1f3ff"
},
":signal_strength:": {
"category": "symbols",
"name": "antenna with bars",
"unicode": "1f4f6"
},
":six:": {
"category": "symbols",
"name": "keycap digit six",
"unicode": "0036-20e3",
"unicode_alt": "0036-fe0f-20e3"
},
":six_pointed_star:": {
"category": "symbols",
"name": "six pointed star with middle dot",
"unicode": "1f52f"
},
":ski:": {
"category": "activity",
"name": "ski and ski boot",
"unicode": "1f3bf"
},
":skier:": {
"category": "activity",
"name": "skier",
"unicode": "26f7",
"unicode_alt": "26f7-fe0f"
},
":skull:": {
"category": "people",
"name": "skull",
"unicode": "1f480"
},
":skull_crossbones:": {
"category": "objects",
"name": "skull and crossbones",
"unicode": "2620",
"unicode_alt": "2620-fe0f"
},
":sleeping:": {
"category": "people",
"name": "sleeping face",
"unicode": "1f634"
},
":sleeping_accommodation:": {
"category": "objects",
"name": "sleeping accommodation",
"unicode": "1f6cc"
},
":sleepy:": {
"category": "people",
"name": "sleepy face",
"unicode": "1f62a"
},
":slight_frown:": {
"category": "people",
"name": "slightly frowning face",
"unicode": "1f641"
},
":slight_smile:": {
"category": "people",
"name": "slightly smiling face",
"unicode": "1f642"
},
":slot_machine:": {
"category": "activity",
"name": "slot machine",
"unicode": "1f3b0"
},
":small_blue_diamond:": {
"category": "symbols",
"name": "small blue diamond",
"unicode": "1f539"
},
":small_orange_diamond:": {
"category": "symbols",
"name": "small orange diamond",
"unicode": "1f538"
},
":small_red_triangle:": {
"category": "symbols",
"name": "up-pointing red triangle",
"unicode": "1f53a"
},
":small_red_triangle_down:": {
"category": "symbols",
"name": "down-pointing red triangle",
"unicode": "1f53b"
},
":smile:": {
"category": "people",
"name": "smiling face with open mouth and smiling eyes",
"unicode": "1f604"
},
":smile_cat:": {
"category": "people",
"name": "grinning cat face with smiling eyes",
"unicode": "1f638"
},
":smiley:": {
"category": "people",
"name": "smiling face with open mouth",
"unicode": "1f603"
},
":smiley_cat:": {
"category": "people",
"name": "smiling cat face with open mouth",
"unicode": "1f63a"
},
":smiling_imp:": {
"category": "people",
"name": "smiling face with horns",
"unicode": "1f608"
},
":smirk:": {
"category": "people",
"name": "smirking face",
"unicode": "1f60f"
},
":smirk_cat:": {
"category": "people",
"name": "cat face with wry smile",
"unicode": "1f63c"
},
":smoking:": {
"category": "objects",
"name": "smoking symbol",
"unicode": "1f6ac"
},
":snail:": {
"category": "nature",
"name": "snail",
"unicode": "1f40c"
},
":snake:": {
"category": "nature",
"name": "snake",
"unicode": "1f40d"
},
":sneezing_face:": {
"category": "people",
"name": "sneezing face",
"unicode": "1f927"
},
":snowboarder:": {
"category": "activity",
"name": "snowboarder",
"unicode": "1f3c2"
},
":snowflake:": {
"category": "nature",
"name": "snowflake",
"unicode": "2744",
"unicode_alt": "2744-fe0f"
},
":snowman2:": {
"category": "nature",
"name": "snowman",
"unicode": "2603",
"unicode_alt": "2603-fe0f"
},
":snowman:": {
"category": "nature",
"name": "snowman without snow",
"unicode": "26c4",
"unicode_alt": "26c4-fe0f"
},
":sob:": {
"category": "people",
"name": "loudly crying face",
"unicode": "1f62d"
},
":soccer:": {
"category": "activity",
"name": "soccer ball",
"unicode": "26bd",
"unicode_alt": "26bd-fe0f"
},
":soon:": {
"category": "symbols",
"name": "soon with rightwards arrow above",
"unicode": "1f51c"
},
":sos:": {
"category": "symbols",
"name": "squared sos",
"unicode": "1f198"
},
":sound:": {
"category": "symbols",
"name": "speaker with one sound wave",
"unicode": "1f509"
},
":space_invader:": {
"category": "activity",
"name": "alien monster",
"unicode": "1f47e"
},
":spades:": {
"category": "symbols",
"name": "black spade suit",
"unicode": "2660",
"unicode_alt": "2660-fe0f"
},
":spaghetti:": {
"category": "food",
"name": "spaghetti",
"unicode": "1f35d"
},
":sparkle:": {
"category": "symbols",
"name": "sparkle",
"unicode": "2747",
"unicode_alt": "2747-fe0f"
},
":sparkler:": {
"category": "travel",
"name": "firework sparkler",
"unicode": "1f387"
},
":sparkles:": {
"category": "nature",
"name": "sparkles",
"unicode": "2728"
},
":sparkling_heart:": {
"category": "symbols",
"name": "sparkling heart",
"unicode": "1f496"
},
":speak_no_evil:": {
"category": "nature",
"name": "speak-no-evil monkey",
"unicode": "1f64a"
},
":speaker:": {
"category": "symbols",
"name": "speaker",
"unicode": "1f508"
},
":speaking_head:": {
"category": "people",
"name": "speaking head in silhouette",
"unicode": "1f5e3",
"unicode_alt": "1f5e3-fe0f"
},
":speech_balloon:": {
"category": "symbols",
"name": "speech balloon",
"unicode": "1f4ac"
},
":speech_left:": {
"category": "symbols",
"name": "left speech bubble",
"unicode": "1f5e8",
"unicode_alt": "1f5e8-fe0f"
},
":speedboat:": {
"category": "travel",
"name": "speedboat",
"unicode": "1f6a4"
},
":spider:": {
"category": "nature",
"name": "spider",
"unicode": "1f577",
"unicode_alt": "1f577-fe0f"
},
":spider_web:": {
"category": "nature",
"name": "spider web",
"unicode": "1f578",
"unicode_alt": "1f578-fe0f"
},
":spoon:": {
"category": "food",
"name": "spoon",
"unicode": "1f944"
},
":spy:": {
"category": "people",
"name": "sleuth or spy",
"unicode": "1f575",
"unicode_alt": "1f575-fe0f"
},
":spy_tone1:": {
"category": "people",
"name": "sleuth or spy tone 1",
"unicode": "1f575-1f3fb"
},
":spy_tone2:": {
"category": "people",
"name": "sleuth or spy tone 2",
"unicode": "1f575-1f3fc"
},
":spy_tone3:": {
"category": "people",
"name": "sleuth or spy tone 3",
"unicode": "1f575-1f3fd"
},
":spy_tone4:": {
"category": "people",
"name": "sleuth or spy tone 4",
"unicode": "1f575-1f3fe"
},
":spy_tone5:": {
"category": "people",
"name": "sleuth or spy tone 5",
"unicode": "1f575-1f3ff"
},
":squid:": {
"category": "nature",
"name": "squid",
"unicode": "1f991"
},
":stadium:": {
"category": "travel",
"name": "stadium",
"unicode": "1f3df",
"unicode_alt": "1f3df-fe0f"
},
":star2:": {
"category": "nature",
"name": "glowing star",
"unicode": "1f31f"
},
":star:": {
"category": "nature",
"name": "white medium star",
"unicode": "2b50",
"unicode_alt": "2b50-fe0f"
},
":star_and_crescent:": {
"category": "symbols",
"name": "star and crescent",
"unicode": "262a",
"unicode_alt": "262a-fe0f"
},
":star_of_david:": {
"category": "symbols",
"name": "star of david",
"unicode": "2721",
"unicode_alt": "2721-fe0f"
},
":stars:": {
"category": "travel",
"name": "shooting star",
"unicode": "1f320"
},
":station:": {
"category": "travel",
"name": "station",
"unicode": "1f689"
},
":statue_of_liberty:": {
"category": "travel",
"name": "statue of liberty",
"unicode": "1f5fd"
},
":steam_locomotive:": {
"category": "travel",
"name": "steam locomotive",
"unicode": "1f682"
},
":stew:": {
"category": "food",
"name": "pot of food",
"unicode": "1f372"
},
":stop_button:": {
"category": "symbols",
"name": "black square for stop",
"unicode": "23f9",
"unicode_alt": "23f9-fe0f"
},
":stopwatch:": {
"category": "objects",
"name": "stopwatch",
"unicode": "23f1",
"unicode_alt": "23f1-fe0f"
},
":straight_ruler:": {
"category": "objects",
"name": "straight ruler",
"unicode": "1f4cf"
},
":strawberry:": {
"category": "food",
"name": "strawberry",
"unicode": "1f353"
},
":stuck_out_tongue:": {
"category": "people",
"name": "face with stuck-out tongue",
"unicode": "1f61b"
},
":stuck_out_tongue_closed_eyes:": {
"category": "people",
"name": "face with stuck-out tongue and tightly-closed eyes",
"unicode": "1f61d"
},
":stuck_out_tongue_winking_eye:": {
"category": "people",
"name": "face with stuck-out tongue and winking eye",
"unicode": "1f61c"
},
":stuffed_flatbread:": {
"category": "food",
"name": "stuffed flatbread",
"unicode": "1f959"
},
":sun_with_face:": {
"category": "nature",
"name": "sun with face",
"unicode": "1f31e"
},
":sunflower:": {
"category": "nature",
"name": "sunflower",
"unicode": "1f33b"
},
":sunglasses:": {
"category": "people",
"name": "smiling face with sunglasses",
"unicode": "1f60e"
},
":sunny:": {
"category": "nature",
"name": "black sun with rays",
"unicode": "2600",
"unicode_alt": "2600-fe0f"
},
":sunrise:": {
"category": "travel",
"name": "sunrise",
"unicode": "1f305"
},
":sunrise_over_mountains:": {
"category": "travel",
"name": "sunrise over mountains",
"unicode": "1f304"
},
":surfer:": {
"category": "activity",
"name": "surfer",
"unicode": "1f3c4"
},
":surfer_tone1:": {
"category": "activity",
"name": "surfer tone 1",
"unicode": "1f3c4-1f3fb"
},
":surfer_tone2:": {
"category": "activity",
"name": "surfer tone 2",
"unicode": "1f3c4-1f3fc"
},
":surfer_tone3:": {
"category": "activity",
"name": "surfer tone 3",
"unicode": "1f3c4-1f3fd"
},
":surfer_tone4:": {
"category": "activity",
"name": "surfer tone 4",
"unicode": "1f3c4-1f3fe"
},
":surfer_tone5:": {
"category": "activity",
"name": "surfer tone 5",
"unicode": "1f3c4-1f3ff"
},
":sushi:": {
"category": "food",
"name": "sushi",
"unicode": "1f363"
},
":suspension_railway:": {
"category": "travel",
"name": "suspension railway",
"unicode": "1f69f"
},
":sweat:": {
"category": "people",
"name": "face with cold sweat",
"unicode": "1f613"
},
":sweat_drops:": {
"category": "nature",
"name": "splashing sweat symbol",
"unicode": "1f4a6"
},
":sweat_smile:": {
"category": "people",
"name": "smiling face with open mouth and cold sweat",
"unicode": "1f605"
},
":sweet_potato:": {
"category": "food",
"name": "roasted sweet potato",
"unicode": "1f360"
},
":swimmer:": {
"category": "activity",
"name": "swimmer",
"unicode": "1f3ca"
},
":swimmer_tone1:": {
"category": "activity",
"name": "swimmer tone 1",
"unicode": "1f3ca-1f3fb"
},
":swimmer_tone2:": {
"category": "activity",
"name": "swimmer tone 2",
"unicode": "1f3ca-1f3fc"
},
":swimmer_tone3:": {
"category": "activity",
"name": "swimmer tone 3",
"unicode": "1f3ca-1f3fd"
},
":swimmer_tone4:": {
"category": "activity",
"name": "swimmer tone 4",
"unicode": "1f3ca-1f3fe"
},
":swimmer_tone5:": {
"category": "activity",
"name": "swimmer tone 5",
"unicode": "1f3ca-1f3ff"
},
":symbols:": {
"category": "symbols",
"name": "input symbol for symbols",
"unicode": "1f523"
},
":synagogue:": {
"category": "travel",
"name": "synagogue",
"unicode": "1f54d"
},
":syringe:": {
"category": "objects",
"name": "syringe",
"unicode": "1f489"
},
":taco:": {
"category": "food",
"name": "taco",
"unicode": "1f32e"
},
":tada:": {
"category": "objects",
"name": "party popper",
"unicode": "1f389"
},
":tanabata_tree:": {
"category": "nature",
"name": "tanabata tree",
"unicode": "1f38b"
},
":tangerine:": {
"category": "food",
"name": "tangerine",
"unicode": "1f34a"
},
":taurus:": {
"category": "symbols",
"name": "taurus",
"unicode": "2649",
"unicode_alt": "2649-fe0f"
},
":taxi:": {
"category": "travel",
"name": "taxi",
"unicode": "1f695"
},
":tea:": {
"category": "food",
"name": "teacup without handle",
"unicode": "1f375"
},
":telephone:": {
"category": "objects",
"name": "black telephone",
"unicode": "260e",
"unicode_alt": "260e-fe0f"
},
":telephone_receiver:": {
"category": "objects",
"name": "telephone receiver",
"unicode": "1f4de"
},
":telescope:": {
"category": "objects",
"name": "telescope",
"unicode": "1f52d"
},
":tennis:": {
"category": "activity",
"name": "tennis racquet and ball",
"unicode": "1f3be"
},
":tent:": {
"category": "travel",
"name": "tent",
"unicode": "26fa",
"unicode_alt": "26fa-fe0f"
},
":thermometer:": {
"category": "objects",
"name": "thermometer",
"unicode": "1f321",
"unicode_alt": "1f321-fe0f"
},
":thermometer_face:": {
"category": "people",
"name": "face with thermometer",
"unicode": "1f912"
},
":thinking:": {
"category": "people",
"name": "thinking face",
"unicode": "1f914"
},
":third_place:": {
"category": "activity",
"name": "third place medal",
"unicode": "1f949"
},
":thought_balloon:": {
"category": "symbols",
"name": "thought balloon",
"unicode": "1f4ad"
},
":three:": {
"category": "symbols",
"name": "keycap digit three",
"unicode": "0033-20e3",
"unicode_alt": "0033-fe0f-20e3"
},
":thumbsdown:": {
"category": "people",
"name": "thumbs down sign",
"unicode": "1f44e"
},
":thumbsdown_tone1:": {
"category": "people",
"name": "thumbs down sign tone 1",
"unicode": "1f44e-1f3fb"
},
":thumbsdown_tone2:": {
"category": "people",
"name": "thumbs down sign tone 2",
"unicode": "1f44e-1f3fc"
},
":thumbsdown_tone3:": {
"category": "people",
"name": "thumbs down sign tone 3",
"unicode": "1f44e-1f3fd"
},
":thumbsdown_tone4:": {
"category": "people",
"name": "thumbs down sign tone 4",
"unicode": "1f44e-1f3fe"
},
":thumbsdown_tone5:": {
"category": "people",
"name": "thumbs down sign tone 5",
"unicode": "1f44e-1f3ff"
},
":thumbsup:": {
"category": "people",
"name": "thumbs up sign",
"unicode": "1f44d"
},
":thumbsup_tone1:": {
"category": "people",
"name": "thumbs up sign tone 1",
"unicode": "1f44d-1f3fb"
},
":thumbsup_tone2:": {
"category": "people",
"name": "thumbs up sign tone 2",
"unicode": "1f44d-1f3fc"
},
":thumbsup_tone3:": {
"category": "people",
"name": "thumbs up sign tone 3",
"unicode": "1f44d-1f3fd"
},
":thumbsup_tone4:": {
"category": "people",
"name": "thumbs up sign tone 4",
"unicode": "1f44d-1f3fe"
},
":thumbsup_tone5:": {
"category": "people",
"name": "thumbs up sign tone 5",
"unicode": "1f44d-1f3ff"
},
":thunder_cloud_rain:": {
"category": "nature",
"name": "thunder cloud and rain",
"unicode": "26c8",
"unicode_alt": "26c8-fe0f"
},
":ticket:": {
"category": "activity",
"name": "ticket",
"unicode": "1f3ab"
},
":tickets:": {
"category": "activity",
"name": "admission tickets",
"unicode": "1f39f",
"unicode_alt": "1f39f-fe0f"
},
":tiger2:": {
"category": "nature",
"name": "tiger",
"unicode": "1f405"
},
":tiger:": {
"category": "nature",
"name": "tiger face",
"unicode": "1f42f"
},
":timer:": {
"category": "objects",
"name": "timer clock",
"unicode": "23f2",
"unicode_alt": "23f2-fe0f"
},
":tired_face:": {
"category": "people",
"name": "tired face",
"unicode": "1f62b"
},
":tm:": {
"category": "symbols",
"name": "trade mark sign",
"unicode": "2122",
"unicode_alt": "2122-fe0f"
},
":toilet:": {
"category": "objects",
"name": "toilet",
"unicode": "1f6bd"
},
":tokyo_tower:": {
"category": "travel",
"name": "tokyo tower",
"unicode": "1f5fc"
},
":tomato:": {
"category": "food",
"name": "tomato",
"unicode": "1f345"
},
":tone1:": {
"category": "modifier",
"name": "emoji modifier Fitzpatrick type-1-2",
"unicode": "1f3fb"
},
":tone2:": {
"category": "modifier",
"name": "emoji modifier Fitzpatrick type-3",
"unicode": "1f3fc"
},
":tone3:": {
"category": "modifier",
"name": "emoji modifier Fitzpatrick type-4",
"unicode": "1f3fd"
},
":tone4:": {
"category": "modifier",
"name": "emoji modifier Fitzpatrick type-5",
"unicode": "1f3fe"
},
":tone5:": {
"category": "modifier",
"name": "emoji modifier Fitzpatrick type-6",
"unicode": "1f3ff"
},
":tongue:": {
"category": "people",
"name": "tongue",
"unicode": "1f445"
},
":tools:": {
"category": "objects",
"name": "hammer and wrench",
"unicode": "1f6e0",
"unicode_alt": "1f6e0-fe0f"
},
":top:": {
"category": "symbols",
"name": "top with upwards arrow above",
"unicode": "1f51d"
},
":tophat:": {
"category": "people",
"name": "top hat",
"unicode": "1f3a9"
},
":track_next:": {
"category": "symbols",
"name": "black right-pointing double triangle with vertical bar",
"unicode": "23ed",
"unicode_alt": "23ed-fe0f"
},
":track_previous:": {
"category": "symbols",
"name": "black left-pointing double triangle with vertical bar",
"unicode": "23ee",
"unicode_alt": "23ee-fe0f"
},
":trackball:": {
"category": "objects",
"name": "trackball",
"unicode": "1f5b2",
"unicode_alt": "1f5b2-fe0f"
},
":tractor:": {
"category": "travel",
"name": "tractor",
"unicode": "1f69c"
},
":traffic_light:": {
"category": "travel",
"name": "horizontal traffic light",
"unicode": "1f6a5"
},
":train2:": {
"category": "travel",
"name": "train",
"unicode": "1f686"
},
":train:": {
"category": "travel",
"name": "tram car",
"unicode": "1f68b"
},
":tram:": {
"category": "travel",
"name": "tram",
"unicode": "1f68a"
},
":triangular_flag_on_post:": {
"category": "objects",
"name": "triangular flag on post",
"unicode": "1f6a9"
},
":triangular_ruler:": {
"category": "objects",
"name": "triangular ruler",
"unicode": "1f4d0"
},
":trident:": {
"category": "symbols",
"name": "trident emblem",
"unicode": "1f531"
},
":triumph:": {
"category": "people",
"name": "face with look of triumph",
"unicode": "1f624"
},
":trolleybus:": {
"category": "travel",
"name": "trolleybus",
"unicode": "1f68e"
},
":trophy:": {
"category": "activity",
"name": "trophy",
"unicode": "1f3c6"
},
":tropical_drink:": {
"category": "food",
"name": "tropical drink",
"unicode": "1f379"
},
":tropical_fish:": {
"category": "nature",
"name": "tropical fish",
"unicode": "1f420"
},
":truck:": {
"category": "travel",
"name": "delivery truck",
"unicode": "1f69a"
},
":trumpet:": {
"category": "activity",
"name": "trumpet",
"unicode": "1f3ba"
},
":tulip:": {
"category": "nature",
"name": "tulip",
"unicode": "1f337"
},
":tumbler_glass:": {
"category": "food",
"name": "tumbler glass",
"unicode": "1f943"
},
":turkey:": {
"category": "nature",
"name": "turkey",
"unicode": "1f983"
},
":turtle:": {
"category": "nature",
"name": "turtle",
"unicode": "1f422"
},
":tv:": {
"category": "objects",
"name": "television",
"unicode": "1f4fa"
},
":twisted_rightwards_arrows:": {
"category": "symbols",
"name": "twisted rightwards arrows",
"unicode": "1f500"
},
":two:": {
"category": "symbols",
"name": "keycap digit two",
"unicode": "0032-20e3",
"unicode_alt": "0032-fe0f-20e3"
},
":two_hearts:": {
"category": "symbols",
"name": "two hearts",
"unicode": "1f495"
},
":two_men_holding_hands:": {
"category": "people",
"name": "two men holding hands",
"unicode": "1f46c"
},
":two_women_holding_hands:": {
"category": "people",
"name": "two women holding hands",
"unicode": "1f46d"
},
":u5272:": {
"category": "symbols",
"name": "squared cjk unified ideograph-5272",
"unicode": "1f239"
},
":u5408:": {
"category": "symbols",
"name": "squared cjk unified ideograph-5408",
"unicode": "1f234"
},
":u55b6:": {
"category": "symbols",
"name": "squared cjk unified ideograph-55b6",
"unicode": "1f23a"
},
":u6307:": {
"category": "symbols",
"name": "squared cjk unified ideograph-6307",
"unicode": "1f22f",
"unicode_alt": "1f22f-fe0f"
},
":u6708:": {
"category": "symbols",
"name": "squared cjk unified ideograph-6708",
"unicode": "1f237",
"unicode_alt": "1f237-fe0f"
},
":u6709:": {
"category": "symbols",
"name": "squared cjk unified ideograph-6709",
"unicode": "1f236"
},
":u6e80:": {
"category": "symbols",
"name": "squared cjk unified ideograph-6e80",
"unicode": "1f235"
},
":u7121:": {
"category": "symbols",
"name": "squared cjk unified ideograph-7121",
"unicode": "1f21a",
"unicode_alt": "1f21a-fe0f"
},
":u7533:": {
"category": "symbols",
"name": "squared cjk unified ideograph-7533",
"unicode": "1f238"
},
":u7981:": {
"category": "symbols",
"name": "squared cjk unified ideograph-7981",
"unicode": "1f232"
},
":u7a7a:": {
"category": "symbols",
"name": "squared cjk unified ideograph-7a7a",
"unicode": "1f233"
},
":umbrella2:": {
"category": "nature",
"name": "umbrella",
"unicode": "2602",
"unicode_alt": "2602-fe0f"
},
":umbrella:": {
"category": "nature",
"name": "umbrella with rain drops",
"unicode": "2614",
"unicode_alt": "2614-fe0f"
},
":unamused:": {
"category": "people",
"name": "unamused face",
"unicode": "1f612"
},
":underage:": {
"category": "symbols",
"name": "no one under eighteen symbol",
"unicode": "1f51e"
},
":unicorn:": {
"category": "nature",
"name": "unicorn face",
"unicode": "1f984"
},
":unlock:": {
"category": "objects",
"name": "open lock",
"unicode": "1f513"
},
":up:": {
"category": "symbols",
"name": "squared up with exclamation mark",
"unicode": "1f199"
},
":upside_down:": {
"category": "people",
"name": "upside-down face",
"unicode": "1f643"
},
":urn:": {
"category": "objects",
"name": "funeral urn",
"unicode": "26b1",
"unicode_alt": "26b1-fe0f"
},
":v:": {
"category": "people",
"name": "victory hand",
"unicode": "270c",
"unicode_alt": "270c-fe0f"
},
":v_tone1:": {
"category": "people",
"name": "victory hand tone 1",
"unicode": "270c-1f3fb"
},
":v_tone2:": {
"category": "people",
"name": "victory hand tone 2",
"unicode": "270c-1f3fc"
},
":v_tone3:": {
"category": "people",
"name": "victory hand tone 3",
"unicode": "270c-1f3fd"
},
":v_tone4:": {
"category": "people",
"name": "victory hand tone 4",
"unicode": "270c-1f3fe"
},
":v_tone5:": {
"category": "people",
"name": "victory hand tone 5",
"unicode": "270c-1f3ff"
},
":vertical_traffic_light:": {
"category": "travel",
"name": "vertical traffic light",
"unicode": "1f6a6"
},
":vhs:": {
"category": "objects",
"name": "videocassette",
"unicode": "1f4fc"
},
":vibration_mode:": {
"category": "symbols",
"name": "vibration mode",
"unicode": "1f4f3"
},
":video_camera:": {
"category": "objects",
"name": "video camera",
"unicode": "1f4f9"
},
":video_game:": {
"category": "activity",
"name": "video game",
"unicode": "1f3ae"
},
":violin:": {
"category": "activity",
"name": "violin",
"unicode": "1f3bb"
},
":virgo:": {
"category": "symbols",
"name": "virgo",
"unicode": "264d",
"unicode_alt": "264d-fe0f"
},
":volcano:": {
"category": "travel",
"name": "volcano",
"unicode": "1f30b"
},
":volleyball:": {
"category": "activity",
"name": "volleyball",
"unicode": "1f3d0"
},
":vs:": {
"category": "symbols",
"name": "squared vs",
"unicode": "1f19a"
},
":vulcan:": {
"category": "people",
"name": "raised hand with part between middle and ring fingers",
"unicode": "1f596"
},
":vulcan_tone1:": {
"category": "people",
"name": "raised hand with part between middle and ring fingers tone 1",
"unicode": "1f596-1f3fb"
},
":vulcan_tone2:": {
"category": "people",
"name": "raised hand with part between middle and ring fingers tone 2",
"unicode": "1f596-1f3fc"
},
":vulcan_tone3:": {
"category": "people",
"name": "raised hand with part between middle and ring fingers tone 3",
"unicode": "1f596-1f3fd"
},
":vulcan_tone4:": {
"category": "people",
"name": "raised hand with part between middle and ring fingers tone 4",
"unicode": "1f596-1f3fe"
},
":vulcan_tone5:": {
"category": "people",
"name": "raised hand with part between middle and ring fingers tone 5",
"unicode": "1f596-1f3ff"
},
":walking:": {
"category": "people",
"name": "pedestrian",
"unicode": "1f6b6"
},
":walking_tone1:": {
"category": "people",
"name": "pedestrian tone 1",
"unicode": "1f6b6-1f3fb"
},
":walking_tone2:": {
"category": "people",
"name": "pedestrian tone 2",
"unicode": "1f6b6-1f3fc"
},
":walking_tone3:": {
"category": "people",
"name": "pedestrian tone 3",
"unicode": "1f6b6-1f3fd"
},
":walking_tone4:": {
"category": "people",
"name": "pedestrian tone 4",
"unicode": "1f6b6-1f3fe"
},
":walking_tone5:": {
"category": "people",
"name": "pedestrian tone 5",
"unicode": "1f6b6-1f3ff"
},
":waning_crescent_moon:": {
"category": "nature",
"name": "waning crescent moon symbol",
"unicode": "1f318"
},
":waning_gibbous_moon:": {
"category": "nature",
"name": "waning gibbous moon symbol",
"unicode": "1f316"
},
":warning:": {
"category": "symbols",
"name": "warning sign",
"unicode": "26a0",
"unicode_alt": "26a0-fe0f"
},
":wastebasket:": {
"category": "objects",
"name": "wastebasket",
"unicode": "1f5d1",
"unicode_alt": "1f5d1-fe0f"
},
":watch:": {
"category": "objects",
"name": "watch",
"unicode": "231a",
"unicode_alt": "231a-fe0f"
},
":water_buffalo:": {
"category": "nature",
"name": "water buffalo",
"unicode": "1f403"
},
":water_polo:": {
"category": "activity",
"name": "water polo",
"unicode": "1f93d"
},
":water_polo_tone1:": {
"category": "activity",
"name": "water polo tone 1",
"unicode": "1f93d-1f3fb"
},
":water_polo_tone2:": {
"category": "activity",
"name": "water polo tone 2",
"unicode": "1f93d-1f3fc"
},
":water_polo_tone3:": {
"category": "activity",
"name": "water polo tone 3",
"unicode": "1f93d-1f3fd"
},
":water_polo_tone4:": {
"category": "activity",
"name": "water polo tone 4",
"unicode": "1f93d-1f3fe"
},
":water_polo_tone5:": {
"category": "activity",
"name": "water polo tone 5",
"unicode": "1f93d-1f3ff"
},
":watermelon:": {
"category": "food",
"name": "watermelon",
"unicode": "1f349"
},
":wave:": {
"category": "people",
"name": "waving hand sign",
"unicode": "1f44b"
},
":wave_tone1:": {
"category": "people",
"name": "waving hand sign tone 1",
"unicode": "1f44b-1f3fb"
},
":wave_tone2:": {
"category": "people",
"name": "waving hand sign tone 2",
"unicode": "1f44b-1f3fc"
},
":wave_tone3:": {
"category": "people",
"name": "waving hand sign tone 3",
"unicode": "1f44b-1f3fd"
},
":wave_tone4:": {
"category": "people",
"name": "waving hand sign tone 4",
"unicode": "1f44b-1f3fe"
},
":wave_tone5:": {
"category": "people",
"name": "waving hand sign tone 5",
"unicode": "1f44b-1f3ff"
},
":wavy_dash:": {
"category": "symbols",
"name": "wavy dash",
"unicode": "3030",
"unicode_alt": "3030-fe0f"
},
":waxing_crescent_moon:": {
"category": "nature",
"name": "waxing crescent moon symbol",
"unicode": "1f312"
},
":waxing_gibbous_moon:": {
"category": "nature",
"name": "waxing gibbous moon symbol",
"unicode": "1f314"
},
":wc:": {
"category": "symbols",
"name": "water closet",
"unicode": "1f6be"
},
":weary:": {
"category": "people",
"name": "weary face",
"unicode": "1f629"
},
":wedding:": {
"category": "travel",
"name": "wedding",
"unicode": "1f492"
},
":whale2:": {
"category": "nature",
"name": "whale",
"unicode": "1f40b"
},
":whale:": {
"category": "nature",
"name": "spouting whale",
"unicode": "1f433"
},
":wheel_of_dharma:": {
"category": "symbols",
"name": "wheel of dharma",
"unicode": "2638",
"unicode_alt": "2638-fe0f"
},
":wheelchair:": {
"category": "symbols",
"name": "wheelchair symbol",
"unicode": "267f",
"unicode_alt": "267f-fe0f"
},
":white_check_mark:": {
"category": "symbols",
"name": "white heavy check mark",
"unicode": "2705"
},
":white_circle:": {
"category": "symbols",
"name": "white circle",
"unicode": "26aa",
"unicode_alt": "26aa-fe0f"
},
":white_flower:": {
"category": "symbols",
"name": "white flower",
"unicode": "1f4ae"
},
":white_large_square:": {
"category": "symbols",
"name": "white large square",
"unicode": "2b1c",
"unicode_alt": "2b1c-fe0f"
},
":white_medium_small_square:": {
"category": "symbols",
"name": "white medium small square",
"unicode": "25fd",
"unicode_alt": "25fd-fe0f"
},
":white_medium_square:": {
"category": "symbols",
"name": "white medium square",
"unicode": "25fb",
"unicode_alt": "25fb-fe0f"
},
":white_small_square:": {
"category": "symbols",
"name": "white small square",
"unicode": "25ab",
"unicode_alt": "25ab-fe0f"
},
":white_square_button:": {
"category": "symbols",
"name": "white square button",
"unicode": "1f533"
},
":white_sun_cloud:": {
"category": "nature",
"name": "white sun behind cloud",
"unicode": "1f325",
"unicode_alt": "1f325-fe0f"
},
":white_sun_rain_cloud:": {
"category": "nature",
"name": "white sun behind cloud with rain",
"unicode": "1f326",
"unicode_alt": "1f326-fe0f"
},
":white_sun_small_cloud:": {
"category": "nature",
"name": "white sun with small cloud",
"unicode": "1f324",
"unicode_alt": "1f324-fe0f"
},
":wilted_rose:": {
"category": "nature",
"name": "wilted flower",
"unicode": "1f940"
},
":wind_blowing_face:": {
"category": "nature",
"name": "wind blowing face",
"unicode": "1f32c",
"unicode_alt": "1f32c-fe0f"
},
":wind_chime:": {
"category": "objects",
"name": "wind chime",
"unicode": "1f390"
},
":wine_glass:": {
"category": "food",
"name": "wine glass",
"unicode": "1f377"
},
":wink:": {
"category": "people",
"name": "winking face",
"unicode": "1f609"
},
":wolf:": {
"category": "nature",
"name": "wolf face",
"unicode": "1f43a"
},
":woman:": {
"category": "people",
"name": "woman",
"unicode": "1f469"
},
":woman_tone1:": {
"category": "people",
"name": "woman tone 1",
"unicode": "1f469-1f3fb"
},
":woman_tone2:": {
"category": "people",
"name": "woman tone 2",
"unicode": "1f469-1f3fc"
},
":woman_tone3:": {
"category": "people",
"name": "woman tone 3",
"unicode": "1f469-1f3fd"
},
":woman_tone4:": {
"category": "people",
"name": "woman tone 4",
"unicode": "1f469-1f3fe"
},
":woman_tone5:": {
"category": "people",
"name": "woman tone 5",
"unicode": "1f469-1f3ff"
},
":womans_clothes:": {
"category": "people",
"name": "womans clothes",
"unicode": "1f45a"
},
":womans_hat:": {
"category": "people",
"name": "womans hat",
"unicode": "1f452"
},
":womens:": {
"category": "symbols",
"name": "womens symbol",
"unicode": "1f6ba"
},
":worried:": {
"category": "people",
"name": "worried face",
"unicode": "1f61f"
},
":wrench:": {
"category": "objects",
"name": "wrench",
"unicode": "1f527"
},
":wrestlers:": {
"category": "activity",
"name": "wrestlers",
"unicode": "1f93c"
},
":wrestlers_tone1:": {
"category": "activity",
"name": "wrestlers tone 1",
"unicode": "1f93c-1f3fb"
},
":wrestlers_tone2:": {
"category": "activity",
"name": "wrestlers tone 2",
"unicode": "1f93c-1f3fc"
},
":wrestlers_tone3:": {
"category": "activity",
"name": "wrestlers tone 3",
"unicode": "1f93c-1f3fd"
},
":wrestlers_tone4:": {
"category": "activity",
"name": "wrestlers tone 4",
"unicode": "1f93c-1f3fe"
},
":wrestlers_tone5:": {
"category": "activity",
"name": "wrestlers tone 5",
"unicode": "1f93c-1f3ff"
},
":writing_hand:": {
"category": "people",
"name": "writing hand",
"unicode": "270d",
"unicode_alt": "270d-fe0f"
},
":writing_hand_tone1:": {
"category": "people",
"name": "writing hand tone 1",
"unicode": "270d-1f3fb"
},
":writing_hand_tone2:": {
"category": "people",
"name": "writing hand tone 2",
"unicode": "270d-1f3fc"
},
":writing_hand_tone3:": {
"category": "people",
"name": "writing hand tone 3",
"unicode": "270d-1f3fd"
},
":writing_hand_tone4:": {
"category": "people",
"name": "writing hand tone 4",
"unicode": "270d-1f3fe"
},
":writing_hand_tone5:": {
"category": "people",
"name": "writing hand tone 5",
"unicode": "270d-1f3ff"
},
":x:": {
"category": "symbols",
"name": "cross mark",
"unicode": "274c"
},
":yellow_heart:": {
"category": "symbols",
"name": "yellow heart",
"unicode": "1f49b"
},
":yen:": {
"category": "objects",
"name": "banknote with yen sign",
"unicode": "1f4b4"
},
":yin_yang:": {
"category": "symbols",
"name": "yin yang",
"unicode": "262f",
"unicode_alt": "262f-fe0f"
},
":yum:": {
"category": "people",
"name": "face savouring delicious food",
"unicode": "1f60b"
},
":zap:": {
"category": "nature",
"name": "high voltage sign",
"unicode": "26a1",
"unicode_alt": "26a1-fe0f"
},
":zero:": {
"category": "symbols",
"name": "keycap digit zero",
"unicode": "0030-20e3",
"unicode_alt": "0030-fe0f-20e3"
},
":zipper_mouth:": {
"category": "people",
"name": "zipper-mouth face",
"unicode": "1f910"
},
":zzz:": {
"category": "people",
"name": "sleeping symbol",
"unicode": "1f4a4"
}
}
aliases = {
":+1:": ":thumbsup:",
":+1_tone1:": ":thumbsup_tone1:",
":+1_tone2:": ":thumbsup_tone2:",
":+1_tone3:": ":thumbsup_tone3:",
":+1_tone4:": ":thumbsup_tone4:",
":+1_tone5:": ":thumbsup_tone5:",
":-1:": ":thumbsdown:",
":-1_tone1:": ":thumbsdown_tone1:",
":-1_tone2:": ":thumbsdown_tone2:",
":-1_tone3:": ":thumbsdown_tone3:",
":-1_tone4:": ":thumbsdown_tone4:",
":-1_tone5:": ":thumbsdown_tone5:",
":ac:": ":flag_ac:",
":ad:": ":flag_ad:",
":admission_tickets:": ":tickets:",
":ae:": ":flag_ae:",
":af:": ":flag_af:",
":ag:": ":flag_ag:",
":ai:": ":flag_ai:",
":al:": ":flag_al:",
":am:": ":flag_am:",
":ao:": ":flag_ao:",
":aq:": ":flag_aq:",
":ar:": ":flag_ar:",
":archery:": ":bow_and_arrow:",
":as:": ":flag_as:",
":at:": ":flag_at:",
":atom_symbol:": ":atom:",
":au:": ":flag_au:",
":aw:": ":flag_aw:",
":ax:": ":flag_ax:",
":az:": ":flag_az:",
":ba:": ":flag_ba:",
":back_of_hand:": ":raised_back_of_hand:",
":back_of_hand_tone1:": ":raised_back_of_hand_tone1:",
":back_of_hand_tone2:": ":raised_back_of_hand_tone2:",
":back_of_hand_tone3:": ":raised_back_of_hand_tone3:",
":back_of_hand_tone4:": ":raised_back_of_hand_tone4:",
":back_of_hand_tone5:": ":raised_back_of_hand_tone5:",
":baguette_bread:": ":french_bread:",
":ballot_box_with_ballot:": ":ballot_box:",
":bb:": ":flag_bb:",
":bd:": ":flag_bd:",
":be:": ":flag_be:",
":beach_with_umbrella:": ":beach:",
":bellhop_bell:": ":bellhop:",
":bf:": ":flag_bf:",
":bg:": ":flag_bg:",
":bh:": ":flag_bh:",
":bi:": ":flag_bi:",
":biohazard_sign:": ":biohazard:",
":bj:": ":flag_bj:",
":bl:": ":flag_bl:",
":bm:": ":flag_bm:",
":bn:": ":flag_bn:",
":bo:": ":flag_bo:",
":bottle_with_popping_cork:": ":champagne:",
":boxing_gloves:": ":boxing_glove:",
":bq:": ":flag_bq:",
":br:": ":flag_br:",
":bs:": ":flag_bs:",
":bt:": ":flag_bt:",
":building_construction:": ":construction_site:",
":bv:": ":flag_bv:",
":bw:": ":flag_bw:",
":by:": ":flag_by:",
":bz:": ":flag_bz:",
":ca:": ":flag_ca:",
":call_me_hand:": ":call_me:",
":call_me_hand_tone1:": ":call_me_tone1:",
":call_me_hand_tone2:": ":call_me_tone2:",
":call_me_hand_tone3:": ":call_me_tone3:",
":call_me_hand_tone4:": ":call_me_tone4:",
":call_me_hand_tone5:": ":call_me_tone5:",
":card_file_box:": ":card_box:",
":card_index_dividers:": ":dividers:",
":cc:": ":flag_cc:",
":cf:": ":flag_cf:",
":cg:": ":flag_cg:",
":ch:": ":flag_ch:",
":cheese_wedge:": ":cheese:",
":chile:": ":flag_cl:",
":ci:": ":flag_ci:",
":city_sunrise:": ":city_sunset:",
":ck:": ":flag_ck:",
":clinking_glass:": ":champagne_glass:",
":cloud_with_lightning:": ":cloud_lightning:",
":cloud_with_rain:": ":cloud_rain:",
":cloud_with_snow:": ":cloud_snow:",
":cloud_with_tornado:": ":cloud_tornado:",
":clown_face:": ":clown:",
":cm:": ":flag_cm:",
":cn:": ":flag_cn:",
":co:": ":flag_co:",
":congo:": ":flag_cd:",
":couch_and_lamp:": ":couch:",
":couple_with_heart_mm:": ":couple_mm:",
":couple_with_heart_ww:": ":couple_ww:",
":couplekiss_mm:": ":kiss_mm:",
":couplekiss_ww:": ":kiss_ww:",
":cp:": ":flag_cp:",
":cr:": ":flag_cr:",
":cricket_bat_ball:": ":cricket:",
":cu:": ":flag_cu:",
":cv:": ":flag_cv:",
":cw:": ":flag_cw:",
":cx:": ":flag_cx:",
":cy:": ":flag_cy:",
":cz:": ":flag_cz:",
":dagger_knife:": ":dagger:",
":de:": ":flag_de:",
":derelict_house_building:": ":house_abandoned:",
":desert_island:": ":island:",
":desktop_computer:": ":desktop:",
":dg:": ":flag_dg:",
":dj:": ":flag_dj:",
":dk:": ":flag_dk:",
":dm:": ":flag_dm:",
":do:": ":flag_do:",
":double_vertical_bar:": ":pause_button:",
":dove_of_peace:": ":dove:",
":drool:": ":drooling_face:",
":drum_with_drumsticks:": ":drum:",
":dz:": ":flag_dz:",
":ea:": ":flag_ea:",
":ec:": ":flag_ec:",
":ee:": ":flag_ee:",
":eg:": ":flag_eg:",
":eh:": ":flag_eh:",
":eject_symbol:": ":eject:",
":email:": ":e-mail:",
":er:": ":flag_er:",
":es:": ":flag_es:",
":et:": ":flag_et:",
":eu:": ":flag_eu:",
":expecting_woman:": ":pregnant_woman:",
":expecting_woman_tone1:": ":pregnant_woman_tone1:",
":expecting_woman_tone2:": ":pregnant_woman_tone2:",
":expecting_woman_tone3:": ":pregnant_woman_tone3:",
":expecting_woman_tone4:": ":pregnant_woman_tone4:",
":expecting_woman_tone5:": ":pregnant_woman_tone5:",
":face_with_cowboy_hat:": ":cowboy:",
":face_with_head_bandage:": ":head_bandage:",
":face_with_rolling_eyes:": ":rolling_eyes:",
":face_with_thermometer:": ":thermometer_face:",
":facepalm:": ":face_palm:",
":facepalm_tone1:": ":face_palm_tone1:",
":facepalm_tone2:": ":face_palm_tone2:",
":facepalm_tone3:": ":face_palm_tone3:",
":facepalm_tone4:": ":face_palm_tone4:",
":facepalm_tone5:": ":face_palm_tone5:",
":fencing:": ":fencer:",
":fi:": ":flag_fi:",
":film_projector:": ":projector:",
":first_place_medal:": ":first_place:",
":fj:": ":flag_fj:",
":fk:": ":flag_fk:",
":flame:": ":fire:",
":flan:": ":custard:",
":fm:": ":flag_fm:",
":fo:": ":flag_fo:",
":fork_and_knife_with_plate:": ":fork_knife_plate:",
":fox_face:": ":fox:",
":fr:": ":flag_fr:",
":frame_with_picture:": ":frame_photo:",
":funeral_urn:": ":urn:",
":ga:": ":flag_ga:",
":gay_pride_flag:": ":rainbow_flag:",
":gb:": ":flag_gb:",
":gd:": ":flag_gd:",
":ge:": ":flag_ge:",
":gf:": ":flag_gf:",
":gg:": ":flag_gg:",
":gh:": ":flag_gh:",
":gi:": ":flag_gi:",
":gl:": ":flag_gl:",
":glass_of_milk:": ":milk:",
":gm:": ":flag_gm:",
":gn:": ":flag_gn:",
":goal_net:": ":goal:",
":gp:": ":flag_gp:",
":gq:": ":flag_gq:",
":gr:": ":flag_gr:",
":grandma:": ":older_woman:",
":grandma_tone1:": ":older_woman_tone1:",
":grandma_tone2:": ":older_woman_tone2:",
":grandma_tone3:": ":older_woman_tone3:",
":grandma_tone4:": ":older_woman_tone4:",
":grandma_tone5:": ":older_woman_tone5:",
":green_salad:": ":salad:",
":gs:": ":flag_gs:",
":gt:": ":flag_gt:",
":gu:": ":flag_gu:",
":gw:": ":flag_gw:",
":gy:": ":flag_gy:",
":hammer_and_pick:": ":hammer_pick:",
":hammer_and_wrench:": ":tools:",
":hand_with_index_and_middle_finger_crossed:": ":fingers_crossed:",
":hand_with_index_and_middle_fingers_crossed_tone1:": ":fingers_crossed_tone1:",
":hand_with_index_and_middle_fingers_crossed_tone2:": ":fingers_crossed_tone2:",
":hand_with_index_and_middle_fingers_crossed_tone3:": ":fingers_crossed_tone3:",
":hand_with_index_and_middle_fingers_crossed_tone4:": ":fingers_crossed_tone4:",
":hand_with_index_and_middle_fingers_crossed_tone5:": ":fingers_crossed_tone5:",
":hankey:": ":poop:",
":heavy_heart_exclamation_mark_ornament:": ":heart_exclamation:",
":helmet_with_white_cross:": ":helmet_with_cross:",
":hk:": ":flag_hk:",
":hm:": ":flag_hm:",
":hn:": ":flag_hn:",
":hot_dog:": ":hotdog:",
":house_buildings:": ":homes:",
":hr:": ":flag_hr:",
":ht:": ":flag_ht:",
":hu:": ":flag_hu:",
":hugging_face:": ":hugging:",
":ic:": ":flag_ic:",
":ie:": ":flag_ie:",
":il:": ":flag_il:",
":im:": ":flag_im:",
":in:": ":flag_in:",
":indonesia:": ":flag_id:",
":io:": ":flag_io:",
":iq:": ":flag_iq:",
":ir:": ":flag_ir:",
":is:": ":flag_is:",
":it:": ":flag_it:",
":je:": ":flag_je:",
":jm:": ":flag_jm:",
":jo:": ":flag_jo:",
":jp:": ":flag_jp:",
":juggler:": ":juggling:",
":juggler_tone1:": ":juggling_tone1:",
":juggler_tone2:": ":juggling_tone2:",
":juggler_tone3:": ":juggling_tone3:",
":juggler_tone4:": ":juggling_tone4:",
":juggler_tone5:": ":juggling_tone5:",
":karate_uniform:": ":martial_arts_uniform:",
":kayak:": ":canoe:",
":ke:": ":flag_ke:",
":keycap_asterisk:": ":asterisk:",
":kg:": ":flag_kg:",
":kh:": ":flag_kh:",
":ki:": ":flag_ki:",
":kiwifruit:": ":kiwi:",
":km:": ":flag_km:",
":kn:": ":flag_kn:",
":kp:": ":flag_kp:",
":kr:": ":flag_kr:",
":kw:": ":flag_kw:",
":ky:": ":flag_ky:",
":kz:": ":flag_kz:",
":la:": ":flag_la:",
":latin_cross:": ":cross:",
":lb:": ":flag_lb:",
":lc:": ":flag_lc:",
":left_fist:": ":left_facing_fist:",
":left_fist_tone1:": ":left_facing_fist_tone1:",
":left_fist_tone2:": ":left_facing_fist_tone2:",
":left_fist_tone3:": ":left_facing_fist_tone3:",
":left_fist_tone4:": ":left_facing_fist_tone4:",
":left_fist_tone5:": ":left_facing_fist_tone5:",
":left_speech_bubble:": ":speech_left:",
":li:": ":flag_li:",
":liar:": ":lying_face:",
":linked_paperclips:": ":paperclips:",
":lion:": ":lion_face:",
":lk:": ":flag_lk:",
":lower_left_ballpoint_pen:": ":pen_ballpoint:",
":lower_left_crayon:": ":crayon:",
":lower_left_fountain_pen:": ":pen_fountain:",
":lower_left_paintbrush:": ":paintbrush:",
":lr:": ":flag_lr:",
":ls:": ":flag_ls:",
":lt:": ":flag_lt:",
":lu:": ":flag_lu:",
":lv:": ":flag_lv:",
":ly:": ":flag_ly:",
":ma:": ":flag_ma:",
":male_dancer:": ":man_dancing:",
":male_dancer_tone1:": ":man_dancing_tone1:",
":male_dancer_tone2:": ":man_dancing_tone2:",
":male_dancer_tone3:": ":man_dancing_tone3:",
":male_dancer_tone4:": ":man_dancing_tone4:",
":male_dancer_tone5:": ":man_dancing_tone5:",
":man_in_business_suit_levitating:": ":levitate:",
":mantlepiece_clock:": ":clock:",
":mc:": ":flag_mc:",
":md:": ":flag_md:",
":me:": ":flag_me:",
":mf:": ":flag_mf:",
":mg:": ":flag_mg:",
":mh:": ":flag_mh:",
":mk:": ":flag_mk:",
":ml:": ":flag_ml:",
":mm:": ":flag_mm:",
":mn:": ":flag_mn:",
":mo:": ":flag_mo:",
":money_mouth_face:": ":money_mouth:",
":mother_christmas:": ":mrs_claus:",
":mother_christmas_tone1:": ":mrs_claus_tone1:",
":mother_christmas_tone2:": ":mrs_claus_tone2:",
":mother_christmas_tone3:": ":mrs_claus_tone3:",
":mother_christmas_tone4:": ":mrs_claus_tone4:",
":mother_christmas_tone5:": ":mrs_claus_tone5:",
":motorbike:": ":motor_scooter:",
":mp:": ":flag_mp:",
":mq:": ":flag_mq:",
":mr:": ":flag_mr:",
":ms:": ":flag_ms:",
":mt:": ":flag_mt:",
":mu:": ":flag_mu:",
":mv:": ":flag_mv:",
":mw:": ":flag_mw:",
":mx:": ":flag_mx:",
":my:": ":flag_my:",
":mz:": ":flag_mz:",
":na:": ":flag_na:",
":national_park:": ":park:",
":nc:": ":flag_nc:",
":ne:": ":flag_ne:",
":nerd_face:": ":nerd:",
":next_track:": ":track_next:",
":nf:": ":flag_nf:",
":ni:": ":flag_ni:",
":nigeria:": ":flag_ng:",
":nl:": ":flag_nl:",
":no:": ":flag_no:",
":np:": ":flag_np:",
":nr:": ":flag_nr:",
":nu:": ":flag_nu:",
":nz:": ":flag_nz:",
":oil_drum:": ":oil:",
":old_key:": ":key2:",
":om:": ":flag_om:",
":pa:": ":flag_pa:",
":paella:": ":shallow_pan_of_food:",
":passenger_ship:": ":cruise_ship:",
":paw_prints:": ":feet:",
":pe:": ":flag_pe:",
":peace_symbol:": ":peace:",
":person_doing_cartwheel:": ":cartwheel:",
":person_doing_cartwheel_tone1:": ":cartwheel_tone1:",
":person_doing_cartwheel_tone2:": ":cartwheel_tone2:",
":person_doing_cartwheel_tone3:": ":cartwheel_tone3:",
":person_doing_cartwheel_tone4:": ":cartwheel_tone4:",
":person_doing_cartwheel_tone5:": ":cartwheel_tone5:",
":person_with_ball:": ":basketball_player:",
":person_with_ball_tone1:": ":basketball_player_tone1:",
":person_with_ball_tone2:": ":basketball_player_tone2:",
":person_with_ball_tone3:": ":basketball_player_tone3:",
":person_with_ball_tone4:": ":basketball_player_tone4:",
":person_with_ball_tone5:": ":basketball_player_tone5:",
":pf:": ":flag_pf:",
":pg:": ":flag_pg:",
":ph:": ":flag_ph:",
":pk:": ":flag_pk:",
":pl:": ":flag_pl:",
":pm:": ":flag_pm:",
":pn:": ":flag_pn:",
":poo:": ":poop:",
":pr:": ":flag_pr:",
":previous_track:": ":track_previous:",
":ps:": ":flag_ps:",
":pt:": ":flag_pt:",
":pudding:": ":custard:",
":pw:": ":flag_pw:",
":py:": ":flag_py:",
":qa:": ":flag_qa:",
":racing_car:": ":race_car:",
":racing_motorcycle:": ":motorcycle:",
":radioactive_sign:": ":radioactive:",
":railroad_track:": ":railway_track:",
":raised_hand_with_fingers_splayed:": ":hand_splayed:",
":raised_hand_with_fingers_splayed_tone1:": ":hand_splayed_tone1:",
":raised_hand_with_fingers_splayed_tone2:": ":hand_splayed_tone2:",
":raised_hand_with_fingers_splayed_tone3:": ":hand_splayed_tone3:",
":raised_hand_with_fingers_splayed_tone4:": ":hand_splayed_tone4:",
":raised_hand_with_fingers_splayed_tone5:": ":hand_splayed_tone5:",
":raised_hand_with_part_between_middle_and_ring_fingers:": ":vulcan:",
":raised_hand_with_part_between_middle_and_ring_fingers_tone1:": ":vulcan_tone1:",
":raised_hand_with_part_between_middle_and_ring_fingers_tone2:": ":vulcan_tone2:",
":raised_hand_with_part_between_middle_and_ring_fingers_tone3:": ":vulcan_tone3:",
":raised_hand_with_part_between_middle_and_ring_fingers_tone4:": ":vulcan_tone4:",
":raised_hand_with_part_between_middle_and_ring_fingers_tone5:": ":vulcan_tone5:",
":re:": ":flag_re:",
":reversed_hand_with_middle_finger_extended:": ":middle_finger:",
":reversed_hand_with_middle_finger_extended_tone1:": ":middle_finger_tone1:",
":reversed_hand_with_middle_finger_extended_tone2:": ":middle_finger_tone2:",
":reversed_hand_with_middle_finger_extended_tone3:": ":middle_finger_tone3:",
":reversed_hand_with_middle_finger_extended_tone4:": ":middle_finger_tone4:",
":reversed_hand_with_middle_finger_extended_tone5:": ":middle_finger_tone5:",
":rhinoceros:": ":rhino:",
":right_anger_bubble:": ":anger_right:",
":right_fist:": ":right_facing_fist:",
":right_fist_tone1:": ":right_facing_fist_tone1:",
":right_fist_tone2:": ":right_facing_fist_tone2:",
":right_fist_tone3:": ":right_facing_fist_tone3:",
":right_fist_tone4:": ":right_facing_fist_tone4:",
":right_fist_tone5:": ":right_facing_fist_tone5:",
":ro:": ":flag_ro:",
":robot_face:": ":robot:",
":rolled_up_newspaper:": ":newspaper2:",
":rolling_on_the_floor_laughing:": ":rofl:",
":rs:": ":flag_rs:",
":ru:": ":flag_ru:",
":rw:": ":flag_rw:",
":satisfied:": ":laughing:",
":saudi:": ":flag_sa:",
":saudiarabia:": ":flag_sa:",
":sb:": ":flag_sb:",
":sc:": ":flag_sc:",
":sd:": ":flag_sd:",
":se:": ":flag_se:",
":second_place_medal:": ":second_place:",
":sg:": ":flag_sg:",
":sh:": ":flag_sh:",
":shaking_hands:": ":handshake:",
":shaking_hands_tone1:": ":handshake_tone1:",
":shaking_hands_tone2:": ":handshake_tone2:",
":shaking_hands_tone3:": ":handshake_tone3:",
":shaking_hands_tone4:": ":handshake_tone4:",
":shaking_hands_tone5:": ":handshake_tone5:",
":shelled_peanut:": ":peanuts:",
":shit:": ":poop:",
":shopping_trolley:": ":shopping_cart:",
":si:": ":flag_si:",
":sick:": ":nauseated_face:",
":sign_of_the_horns:": ":metal:",
":sign_of_the_horns_tone1:": ":metal_tone1:",
":sign_of_the_horns_tone2:": ":metal_tone2:",
":sign_of_the_horns_tone3:": ":metal_tone3:",
":sign_of_the_horns_tone4:": ":metal_tone4:",
":sign_of_the_horns_tone5:": ":metal_tone5:",
":sj:": ":flag_sj:",
":sk:": ":flag_sk:",
":skeleton:": ":skull:",
":skull_and_crossbones:": ":skull_crossbones:",
":sl:": ":flag_sl:",
":sleuth_or_spy:": ":spy:",
":sleuth_or_spy_tone1:": ":spy_tone1:",
":sleuth_or_spy_tone2:": ":spy_tone2:",
":sleuth_or_spy_tone3:": ":spy_tone3:",
":sleuth_or_spy_tone4:": ":spy_tone4:",
":sleuth_or_spy_tone5:": ":spy_tone5:",
":slightly_frowning_face:": ":slight_frown:",
":slightly_smiling_face:": ":slight_smile:",
":sm:": ":flag_sm:",
":small_airplane:": ":airplane_small:",
":sn:": ":flag_sn:",
":sneeze:": ":sneezing_face:",
":snow_capped_mountain:": ":mountain_snow:",
":so:": ":flag_so:",
":speaking_head_in_silhouette:": ":speaking_head:",
":spiral_calendar_pad:": ":calendar_spiral:",
":spiral_note_pad:": ":notepad_spiral:",
":sports_medal:": ":medal:",
":sr:": ":flag_sr:",
":ss:": ":flag_ss:",
":st:": ":flag_st:",
":stop_sign:": ":octagonal_sign:",
":studio_microphone:": ":microphone2:",
":stuffed_pita:": ":stuffed_flatbread:",
":sv:": ":flag_sv:",
":sx:": ":flag_sx:",
":sy:": ":flag_sy:",
":sz:": ":flag_sz:",
":ta:": ":flag_ta:",
":table_tennis:": ":ping_pong:",
":tc:": ":flag_tc:",
":td:": ":flag_td:",
":tf:": ":flag_tf:",
":tg:": ":flag_tg:",
":th:": ":flag_th:",
":thinking_face:": ":thinking:",
":third_place_medal:": ":third_place:",
":three_button_mouse:": ":mouse_three_button:",
":thumbdown:": ":thumbsdown:",
":thumbdown_tone1:": ":thumbsdown_tone1:",
":thumbdown_tone2:": ":thumbsdown_tone2:",
":thumbdown_tone3:": ":thumbsdown_tone3:",
":thumbdown_tone4:": ":thumbsdown_tone4:",
":thumbdown_tone5:": ":thumbsdown_tone5:",
":thumbup:": ":thumbsup:",
":thumbup_tone1:": ":thumbsup_tone1:",
":thumbup_tone2:": ":thumbsup_tone2:",
":thumbup_tone3:": ":thumbsup_tone3:",
":thumbup_tone4:": ":thumbsup_tone4:",
":thumbup_tone5:": ":thumbsup_tone5:",
":thunder_cloud_and_rain:": ":thunder_cloud_rain:",
":timer_clock:": ":timer:",
":tj:": ":flag_tj:",
":tk:": ":flag_tk:",
":tl:": ":flag_tl:",
":tn:": ":flag_tn:",
":to:": ":flag_to:",
":tr:": ":flag_tr:",
":tt:": ":flag_tt:",
":turkmenistan:": ":flag_tm:",
":tuvalu:": ":flag_tv:",
":tuxedo_tone1:": ":man_in_tuxedo_tone1:",
":tuxedo_tone2:": ":man_in_tuxedo_tone2:",
":tuxedo_tone3:": ":man_in_tuxedo_tone3:",
":tuxedo_tone4:": ":man_in_tuxedo_tone4:",
":tuxedo_tone5:": ":man_in_tuxedo_tone5:",
":tw:": ":flag_tw:",
":tz:": ":flag_tz:",
":ua:": ":flag_ua:",
":ug:": ":flag_ug:",
":um:": ":flag_um:",
":umbrella_on_ground:": ":beach_umbrella:",
":unicorn_face:": ":unicorn:",
":upside_down_face:": ":upside_down:",
":us:": ":flag_us:",
":uy:": ":flag_uy:",
":uz:": ":flag_uz:",
":va:": ":flag_va:",
":vc:": ":flag_vc:",
":ve:": ":flag_ve:",
":vg:": ":flag_vg:",
":vi:": ":flag_vi:",
":vn:": ":flag_vn:",
":vu:": ":flag_vu:",
":waving_black_flag:": ":flag_black:",
":waving_white_flag:": ":flag_white:",
":weight_lifter:": ":lifter:",
":weight_lifter_tone1:": ":lifter_tone1:",
":weight_lifter_tone2:": ":lifter_tone2:",
":weight_lifter_tone3:": ":lifter_tone3:",
":weight_lifter_tone4:": ":lifter_tone4:",
":weight_lifter_tone5:": ":lifter_tone5:",
":wf:": ":flag_wf:",
":whisky:": ":tumbler_glass:",
":white_frowning_face:": ":frowning2:",
":white_sun_behind_cloud:": ":white_sun_cloud:",
":white_sun_behind_cloud_with_rain:": ":white_sun_rain_cloud:",
":white_sun_with_small_cloud:": ":white_sun_small_cloud:",
":wilted_flower:": ":wilted_rose:",
":world_map:": ":map:",
":worship_symbol:": ":place_of_worship:",
":wrestling:": ":wrestlers:",
":wrestling_tone1:": ":wrestlers_tone1:",
":wrestling_tone2:": ":wrestlers_tone2:",
":wrestling_tone3:": ":wrestlers_tone3:",
":wrestling_tone4:": ":wrestlers_tone4:",
":wrestling_tone5:": ":wrestlers_tone5:",
":ws:": ":flag_ws:",
":xk:": ":flag_xk:",
":ye:": ":flag_ye:",
":yt:": ":flag_yt:",
":za:": ":flag_za:",
":zipper_mouth_face:": ":zipper_mouth:",
":zm:": ":flag_zm:",
":zw:": ":flag_zw:"
}
|
facelessuser/sublime-markdown-popups
|
st3/mdpopups/pymdownx/emoji1_db.py
|
Python
|
mit
| 259,076
|
[
"CRYSTAL",
"FLEUR",
"Octopus"
] |
287d39a543b6cdf6aa4ed82fb3ca79e49cb6eef8210a5fc02e47007e6ccd6744
|
'''Convert to and from Roman numerals
This program is part of 'Dive Into Python 3', a free Python book for
experienced programmers. Visit http://diveintopython3.org/ for the
latest version.
'''
import re
class OutOfRangeError(ValueError): pass
class NotIntegerError(ValueError): pass
class InvalidRomanNumeralError(ValueError): pass
roman_numeral_map = (('M', 1000),
('CM', 900),
('D', 500),
('CD', 400),
('C', 100),
('XC', 90),
('L', 50),
('XL', 40),
('X', 10),
('IX', 9),
('V', 5),
('IV', 4),
('I', 1))
roman_numeral_pattern = re.compile('''
^ # beginning of string
M{0,4} # thousands - 0 to 4 M's
(CM|CD|D?C{0,3}) # hundreds - 900 (CM), 400 (CD), 0-300 (0 to 3 C's),
# or 500-800 (D, followed by 0 to 3 C's)
(XC|XL|L?X{0,3}) # tens - 90 (XC), 40 (XL), 0-30 (0 to 3 X's),
# or 50-80 (L, followed by 0 to 3 X's)
(IX|IV|V?I{0,3}) # ones - 9 (IX), 4 (IV), 0-3 (0 to 3 I's),
# or 5-8 (V, followed by 0 to 3 I's)
$ # end of string
''', re.VERBOSE)
def to_roman(n):
'''convert integer to Roman numeral'''
if not isinstance(n, int):
raise NotIntegerError('non-integers can not be converted')
if not (0 < n < 5000):
raise OutOfRangeError('number out of range (must be 1..4999)')
result = ''
for numeral, integer in roman_numeral_map:
while n >= integer:
result += numeral
n -= integer
return result
def from_roman(s):
'''convert Roman numeral to integer'''
if not isinstance(s, str):
raise InvalidRomanNumeralError('Input must be a string')
if not s:
raise InvalidRomanNumeralError('Input can not be blank')
if not roman_numeral_pattern.search(s):
raise InvalidRomanNumeralError('Invalid Roman numeral: {0}'.format(s))
result = 0
index = 0
for numeral, integer in roman_numeral_map:
while s[index : index + len(numeral)] == numeral:
result += integer
index += len(numeral)
return result
# Copyright (c) 2009, Mark Pilgrim, All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
|
ctasims/Dive-Into-Python-3
|
examples/roman9.py
|
Python
|
mit
| 3,731
|
[
"VisIt"
] |
94aa6c196a2781a597f48d0fbf9e44bf42ee37c1b7467d59dcef5d709d9b23b0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.