text string | size int64 | token_count int64 |
|---|---|---|
import os
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String
from sqlalchemy import create_engine
from sqlalchemy.orm import declarative_base, relationship
from sqlalchemy.sql import func
Base = declarative_base()
class User(Base):
'''
A discord user.
'''
__tablename__ = 'User'
id = Column(Integer, primary_key=True)
# Time when this user was first observed
addition_time = Column(DateTime(timezone=True), server_default=func.now())
# User's username on discord on the message which prompted them to be added
# to the DB. They might go by a different username now.
discord_username = Column(String)
# Discord ID of the user. Immutable on Discord's side.
discord_id = Column(Integer, index=True, unique=True)
# Prompts which were triggered for this user.
triggered_prompts = relationship('TriggeredPrompt')
def __repr__(self):
return (f'User(id={self.id}, addition_time={self.addition_time}, '+
f'discord_username={self.discord_username}, '+
f'discord_id={self.discord_id})')
@classmethod
def new_user(cls, discord_user_name, discord_user_id):
'Create a new user'
return User(
discord_username=discord_user_name,
discord_id=discord_user_id)
@classmethod
def get_user_with_discord_id(cls, session, discord_user_id):
'Get user by ID'
return (session
.query(User)
.filter_by(discord_id=discord_user_id)
.scalar())
@classmethod
def get_triggered_prompt_for_user(cls, session, prompt_name, discord_user_id):
'''Get a triggered prompt for a given user, or returns None if the user
never triggered that prompt.'''
return (session
.query(TriggeredPrompt)
.filter_by(prompt_name=prompt_name)
.join(User)
.filter(User.discord_id == discord_user_id)
.all())
class TriggeredPrompt(Base):
__tablename__ = 'TriggeredPrompt'
user_id = Column(Integer, ForeignKey('User.id'), primary_key=True)
# Name of triggered prompt. See the config.yml file for details. Represented
# as the keys of the config['prompts'] dictionary.
prompt_name = Column(String, primary_key=True)
# Time when the message was recorded as shown *on the SQL server side.* This
# time is only loosely tied to the actual send time of the message.
trigger_time = Column(
DateTime(timezone=True),
server_default=func.now(),
primary_key=True)
# Full text of the message which triggered this showing.
trigger_message = Column(String)
# Trigger string that was matched against the message.
trigger_string = Column(String)
@classmethod
def record_prompt_for_user(
cls, user, prompt_name, msg_content, trigger_string):
return TriggeredPrompt(
user_id=user.id,
prompt_name=prompt_name,
trigger_message=msg_content,
trigger_string=trigger_string)
def get_engine(db_file):
db_file = os.path.abspath(db_file)
db_path = f'sqlite:///{db_file}'
return create_engine(db_path, echo=False)
| 3,296 | 978 |
class InvalidFullNameException(Exception):
pass
class InvalidCPFException(Exception):
pass
class EmergencyMeetingException(Exception):
pass
| 156 | 43 |
"""
Core module.
Normally, do not add new construction methods here, do this in scene.py instead.
"""
from enum import Enum, auto, unique
import itertools
import re
import sympy as sp
from typing import List
from .figure import Figure
from .reason import Reason
from .util import LazyComment, Comment, divide
class CoreScene:
layers = ('user', 'auxiliary', 'invisible')
@staticmethod
def layers_by(max_layer):
return CoreScene.layers[0:CoreScene.layers.index(max_layer) + 1]
class Object:
"""
Common ancestor for all geometric objects like point, line, circle
"""
def __init__(self, scene, **kwargs):
assert isinstance(scene, CoreScene)
label = kwargs.get('label')
if label:
assert scene.get(label) is None, 'Object with label `%s` already exists' % label
else:
pattern = self.__class__.prefix + '%d'
for index in itertools.count():
label = pattern % index
if scene.get(label) is None:
self.label = label
self.auto_label = True
break
self.layer = kwargs.get('layer', 'user')
assert self.layer in CoreScene.layers
self.extra_labels = set()
self.scene = scene
self.__dict__.update(kwargs)
scene.add(self)
def with_extra_args(self, **kwargs):
if self.scene.is_frozen:
return self
layer = kwargs.get('layer', 'user')
if self.layer not in CoreScene.layers_by(layer):
self.layer = layer
for key in kwargs:
if key == 'layer':
continue
value = kwargs[key]
if key == 'label' and value and value != self.label:
if hasattr(self, 'auto_label'):
self.label = value
delattr(self, 'auto_label')
else:
self.extra_labels.add(value)
elif not hasattr(self, key):
self.__dict__[key] = value
return self
@property
def name(self):
return self.label
def __str__(self):
return self.name
@property
def description(self):
dct = {}
for key in ('layer', 'extra_labels', 'all_points', 'comment'):
value = self.__dict__.get(key)
if value is None:
continue
if isinstance(value, Enum):
dct[key] = value.name
elif isinstance(value, CoreScene.Object):
dct[key] = value.label
elif isinstance(value, (list, tuple, set)):
if value:
dct[key] = [elt.label if isinstance(elt, CoreScene.Object) else str(elt) for elt in value]
else:
dct[key] = str(value)
if self.name == self.label:
return '%s %s %s' % (self.__class__.__name__, self, dct)
else:
return '%s %s %s %s' % (self.__class__.__name__, self.label, self.name, dct)
class Point(Object, Figure):
prefix = 'Pt_'
class Origin(Enum):
free = auto()
translated = auto()
perp = auto()
line = auto()
circle = auto()
line_x_line = auto()
circle_x_line = auto()
circle_x_circle = auto()
def __init__(self, scene, origin, **kwargs):
assert isinstance(origin, CoreScene.Point.Origin), 'origin must be a Point.Origin, not %s' % type(origin)
CoreScene.Object.__init__(self, scene, origin=origin, **kwargs)
self.__vectors = {}
self.__perpendiculars = {}
def translated_point(self, vector, coef=1, **kwargs):
self.scene.assert_vector(vector)
if coef == 0:
return self
if coef == 1 and vector.start == self:
return vector.end
if coef == -1 and vector.end == self:
return vector.start
for pt in self.scene.points():
if pt.origin == CoreScene.Point.Origin.translated and pt.base == self and pt.delta == vector and pt.coef == coef:
return pt
if 'comment' not in kwargs:
kwargs = dict(kwargs)
if coef == 1:
pattern = 'translation of $%{point:pt}$ by vector $%{vector:vector}$'
else:
pattern = 'translation of $%{point:pt}$ by vector $%{multiplier:coef} %{vector:vector}$'
kwargs['comment'] = Comment(
pattern,
{'pt': self, 'coef': coef, 'vector': vector}
)
new_point = CoreScene.Point(
self.scene,
CoreScene.Point.Origin.translated,
base=self, delta=vector, coef=coef, **kwargs
)
if self in {vector.start, vector.end}:
new_point.collinear_constraint(vector.start, vector.end)
if coef > 0:
self.vector(new_point).parallel_constraint(vector, guaranteed=True)
else:
new_point.vector(self).parallel_constraint(vector, guaranteed=True)
self.segment(new_point).ratio_constraint(vector.as_segment, sp.Abs(coef), guaranteed=True)
return new_point
def symmetric_point(self, centre, **kwargs):
symmetric = CoreScene.Point(
self.scene, CoreScene.Point.Origin.translated,
base=centre, delta=self.vector(centre), coef=1, **kwargs
)
symmetric.collinear_constraint(self, centre, guaranteed=True)
from .property import MiddleOfSegmentProperty
self.scene.add_property(MiddleOfSegmentProperty(centre, self.segment(symmetric)))
return symmetric
def perpendicular_line(self, line, **kwargs):
"""
Constructs a line through the point, perpendicular to the given line.
"""
self.scene.assert_line(line)
existing = self.__perpendiculars.get(line)
if existing:
return existing.with_extra_args(**kwargs)
new_point = CoreScene.Point(
self.scene,
CoreScene.Point.Origin.perp,
point=self, line=line,
layer='invisible'
)
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'perpendicular from $%{point:pt}$ to $%{line:line}$',
{'pt': self, 'line': line}
)
new_line = self.line_through(new_point, **kwargs)
if self not in line:
crossing = new_line.intersection_point(line, layer='auxiliary', comment=Comment(
'foot of the perpendicular from $%{point:pt}$ to $%{line:line}$',
{'pt': self, 'line': line}
))
line.perpendicular_constraint(new_line, guaranteed=True)
self.__perpendiculars[line] = new_line
return new_line
def line_through(self, point, **kwargs):
self.scene.assert_point(point)
assert self != point, 'Cannot create a line by a single point'
self.not_equal_constraint(point)
for existing in self.scene.lines():
if self in existing and point in existing:
return existing.with_extra_args(**kwargs)
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'Line through $%{point:pt0}$ and $%{point:pt1}$',
{'pt0': self, 'pt1': point}
)
line = CoreScene.Line(self.scene, point0=self, point1=point, **kwargs)
if not self.scene.is_frozen:
for cnstr in self.scene.constraints(Constraint.Kind.collinear):
if len([pt for pt in line.all_points if pt in cnstr.params]) == 2:
for pt in cnstr.params:
if pt not in line.all_points:
line.all_points.append(pt)
return line
def circle_through(self, point, **kwargs):
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'Circle with centre $%{point:centre}$ through $%{point:pt}$',
{'centre': self, 'pt': point}
)
return self.circle_with_radius(self.segment(point), **kwargs)
def circle_with_radius(self, radius, **kwargs):
self.scene.assert_segment(radius)
assert radius.points[0] != radius.points[1], 'Cannot create a circle of zero radius'
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'Circle with centre $%{point:centre}$ with radius $%{segment:radius}$',
{'centre': self, 'radius': radius}
)
return CoreScene.Circle(
self.scene, centre=self, radius=radius, **kwargs
)
def vector(self, point):
assert self != point, 'Cannot create vector from a single point'
vec = self.__vectors.get(point)
if vec is None:
vec = CoreScene.Vector(self, point)
self.__vectors[point] = vec
return vec
def segment(self, point):
assert self != point, 'Cannot create segment from a single point'
return self.scene._get_segment(self, point)
def angle(self, point0, point1):
assert point0 != point1, 'Angle endpoints should be different'
return self.scene._get_angle(self.vector(point0), self.vector(point1))
def belongs_to(self, line_or_circle):
self.scene.assert_line_or_circle(line_or_circle)
if not self.scene.is_frozen and self not in line_or_circle.all_points:
line_or_circle.all_points.append(self)
def not_equal_constraint(self, A, **kwargs):
"""
The current point does not coincide with A.
"""
if self.scene.is_frozen:
return
for cnstr in self.scene.constraints(Constraint.Kind.not_equal):
if set(cnstr.params) == {self, A}:
cnstr.update(kwargs)
return
self.scene.constraint(Constraint.Kind.not_equal, self, A, **kwargs)
def not_collinear_constraint(self, A, B, **kwargs):
"""
The current point is not collinear with A and B.
"""
for cnstr in self.scene.constraints(Constraint.Kind.not_collinear):
if set(cnstr.params) == {self, A, B}:
cnstr.update(kwargs)
return
self.scene.constraint(Constraint.Kind.not_collinear, self, A, B, **kwargs)
self.not_equal_constraint(A, guaranteed=True, **kwargs)
self.not_equal_constraint(B, guaranteed=True, **kwargs)
A.not_equal_constraint(B, guaranteed=True, **kwargs)
def collinear_constraint(self, A, B, **kwargs):
"""
The current point is collinear with A and B.
"""
cnstr = self.scene.constraint(Constraint.Kind.collinear, self, A, B, **kwargs)
if not self.scene.is_frozen:
for line in self.scene.lines():
if len([pt for pt in line.all_points if pt in cnstr.params]) == 2:
for pt in cnstr.params:
if pt not in line.all_points:
line.all_points.append(pt)
return cnstr
def distance_constraint(self, A, distance, **kwargs):
"""
Distance to the point A equals to the given distance.
The given distance must be a non-negative number
"""
if isinstance(A, str):
A = self.scene.get(A)
return self.segment(A).length_constraint(distance, **kwargs)
def opposite_side_constraint(self, point, line, **kwargs):
"""
The current point lies on the opposite side to the line than the given point.
"""
if isinstance(point, CoreScene.Line) and isinstance(line, CoreScene.Point):
point, line = line, point
for cnstr in self.scene.constraints(Constraint.Kind.opposite_side):
if line == cnstr.params[2] and set(cnstr.params[0:2]) == {self, point}:
cnstr.update(kwargs)
return
#self.not_collinear_constraint(line.point0, line.point1, **kwargs)
#point.not_collinear_constraint(line.point0, line.point1, **kwargs)
self.scene.constraint(Constraint.Kind.opposite_side, self, point, line, **kwargs)
def same_side_constraint(self, point, line, **kwargs):
"""
The point lies on the same side to the line as the given point.
"""
if isinstance(point, CoreScene.Line) and isinstance(line, CoreScene.Point):
point, line = line, point
for cnstr in self.scene.constraints(Constraint.Kind.same_side):
if line == cnstr.params[2] and set(cnstr.params[0:2]) == {self, point}:
cnstr.update(kwargs)
return
self.not_collinear_constraint(line.point0, line.point1, **kwargs)
point.not_collinear_constraint(line.point0, line.point1, **kwargs)
self.scene.constraint(Constraint.Kind.same_side, self, point, line, **kwargs)
def same_direction_constraint(self, A, B, **kwargs):
"""
Vectors (self, A) and (self, B) have the same direction
"""
for cnstr in self.scene.constraints(Constraint.Kind.same_direction):
if self == cnstr.params[0] and set(cnstr.params[1:3]) == {A, B}:
cnstr.update(kwargs)
return
self.not_equal_constraint(A)
self.not_equal_constraint(B)
A.belongs_to(self.line_through(B, layer='auxiliary'))
self.scene.constraint(Constraint.Kind.same_direction, self, A, B, **kwargs)
def inside_constraint(self, obj, **kwargs):
"""
The point is inside the object (angle or segment)
"""
if isinstance(obj, CoreScene.Segment):
self.collinear_constraint(*obj.points, **kwargs)
self.scene.constraint(Constraint.Kind.inside_segment, self, obj, **kwargs)
elif isinstance(obj, CoreScene.Angle) and obj.vertex:
self.scene.constraint(Constraint.Kind.inside_angle, self, obj, **kwargs)
else:
assert False, 'Cannot declare point lying inside %s' % obj
def inside_triangle_constraint(self, triangle, **kwargs):
"""
The point is inside the triangle
"""
triangle.points[0].not_collinear_constraint(*triangle.points[1:])
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'point $%{point:pt}$ is inside $%{triangle:triangle}$',
{'pt': self, 'triangle': triangle}
)
for angle in triangle.angles:
self.inside_constraint(angle, **kwargs)
from .property import SameOrOppositeSideProperty
for vertex, side in zip(triangle.points, triangle.sides):
self.scene.add_property(SameOrOppositeSideProperty(side, vertex, self, True))
class Line(Object):
prefix = 'Ln_'
def __init__(self, scene, **kwargs):
CoreScene.Object.__init__(self, scene, **kwargs)
self.all_points = [self.point0, self.point1]
@property
def name(self):
if hasattr(self, 'auto_label') and self.auto_label:
for points in itertools.combinations(self.all_points, 2):
if points[0].layer == 'user' and points[1].layer == 'user':
return '(%s %s)' % (points[0].name, points[1].name)
return super().name
def free_point(self, **kwargs):
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment('point on line $%{line:line}$', {'line': self})
point = CoreScene.Point(self.scene, CoreScene.Point.Origin.line, line=self, **kwargs)
point.belongs_to(self)
return point
def intersection_point(self, obj, **kwargs):
"""
Creates an intersection point of the line and given object (line or circle).
Requires a constraint for determinate placement if the object a circle
"""
self.scene.assert_line_or_circle(obj)
assert self != obj, 'The line does not cross itself'
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = LazyComment('crossing point of %s and %s', self.label, obj.label)
if isinstance(obj, CoreScene.Circle):
crossing = CoreScene.Point(
self.scene,
CoreScene.Point.Origin.circle_x_line,
circle=obj, line=self, **kwargs
)
else:
existing = next((pt for pt in self.all_points if pt in obj), None)
if existing:
return existing.with_extra_args(**kwargs)
crossing = CoreScene.Point(
self.scene,
CoreScene.Point.Origin.line_x_line,
line0=self, line1=obj, **kwargs
)
crossing.belongs_to(self)
crossing.belongs_to(obj)
return crossing
def perpendicular_constraint(self, other, **kwargs):
"""
self ⟂ other
"""
self.point0.segment(self.point1).perpendicular_constraint(other.point0.segment(other.point1), **kwargs)
def __contains__(self, obj):
if obj is None:
return False
if isinstance(obj, CoreScene.Point):
return obj in self.all_points
if isinstance(obj, CoreScene.Vector):
return obj.start in self.all_points and obj.end in self.all_points
assert False, 'Operator not defined for %s and Line' % type(obj)
class Circle(Object):
prefix = 'Circ_'
def __init__(self, scene, **kwargs):
CoreScene.Object.__init__(self, scene, **kwargs)
self.all_points = []
if not scene.is_frozen:
if self.centre == self.radius.points[0]:
self.all_points.append(self.radius.points[1])
elif self.centre == self.radius.points[1]:
self.all_points.append(self.radius.points[0])
def centre_point(self, **kwargs):
return self.centre.with_extra_args(**kwargs)
def free_point(self, **kwargs):
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = LazyComment('point on circle %s', self.label)
point = CoreScene.Point(self.scene, CoreScene.Point.Origin.circle, circle=self, **kwargs)
point.belongs_to(self)
return point
def intersection_point(self, obj, **kwargs):
"""
Creates an intersection point of the circle and given object (line or circle).
Requires a constraint for determinate placement
"""
self.scene.assert_line_or_circle(obj)
assert self != obj, 'The circle does not cross itself'
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = LazyComment('crossing point of %s and %s', self.label, obj.label)
if isinstance(obj, CoreScene.Circle):
crossing = CoreScene.Point(
self.scene,
CoreScene.Point.Origin.circle_x_circle,
circle0=self, circle1=obj, **kwargs
)
else:
crossing = CoreScene.Point(
self.scene,
CoreScene.Point.Origin.circle_x_line,
circle=self, line=obj, **kwargs
)
crossing.belongs_to(self)
crossing.belongs_to(obj)
return crossing
def __contains__(self, obj):
if obj is None:
return False
if isinstance(obj, CoreScene.Point):
return obj in self.all_points
assert False, 'Operator not defined for %s and Circle' % type(obj)
class Vector(Figure):
def __init__(self, start, end):
assert isinstance(start, CoreScene.Point)
assert isinstance(end, CoreScene.Point)
assert start.scene == end.scene
self.start = start
self.end = end
self.points = (start, end)
self.__segment = None
@property
def as_segment(self):
if self.__segment is None:
self.__segment = self.start.segment(self.end)
return self.__segment
def angle(self, other):
angle = self.scene._get_angle(self, other)
if not self.scene.is_frozen:
for vec in (self, other):
for cnstr in vec.scene.constraints(Constraint.Kind.not_equal):
if set(cnstr.params) == set(vec.points):
break
else:
vec.as_segment.non_zero_length_constraint(comment=Comment(
'$%{vector:side}$ is side of $%{angle:angle}$',
{'side': vec, 'angle': angle}
))
return angle
@property
def scene(self):
return self.start.scene
@property
def reversed(self):
return self.end.vector(self.start)
def parallel_constraint(self, vector, **kwargs):
"""
Self and vector have the same direction.
This constraint also fulfilled if at least one of the vectors has zero length.
"""
assert isinstance(vector, CoreScene.Vector)
assert self.scene == vector.scene
return self.scene.constraint(Constraint.Kind.parallel_vectors, self, vector, **kwargs)
def __str__(self):
return '%s %s' % (self.start, self.end)
def _get_segment(self, point0, point1):
assert isinstance(point0, CoreScene.Point)
assert isinstance(point1, CoreScene.Point)
assert point0.scene == self
assert point1.scene == self
key = frozenset([point0, point1])
#key = (point0, point1)
segment = self.__segments.get(key)
if segment is None:
segment = CoreScene.Segment(point0, point1)
self.__segments[key] = segment
return segment
class Segment(Figure):
def __init__(self, pt0, pt1):
self.points = (pt0, pt1)
self.point_set = frozenset(self.points)
self.__middle_point = None
@property
def scene(self):
return self.points[0].scene
def middle_point(self, **kwargs):
"""
Constructs middle point of the segment
"""
if self.__middle_point:
return self.__middle_point.with_extra_args(**kwargs)
delta = self.points[0].vector(self.points[1])
coef = divide(1, 2)
for pt in self.scene.points():
if pt.origin == CoreScene.Point.Origin.translated:
if pt.base == self.points[0] and pt.delta == delta and pt.coef == coef:
middle = pt
break
if pt.base == self.points[1] and pt.delta == delta.reversed and pt.coef == coef:
middle = pt
break
else:
middle = CoreScene.Point(
self.scene, CoreScene.Point.Origin.translated,
base=self.points[0], delta=delta, coef=coef, **kwargs
)
middle.collinear_constraint(*self.points, guaranteed=True)
from .property import MiddleOfSegmentProperty
self.scene.add_property(MiddleOfSegmentProperty(middle, self))
self.__middle_point = middle
return middle
def free_point(self, **kwargs):
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment('point on segment $%{segment:seg}$', {'seg': self})
point = self.line_through(layer='auxiliary').free_point(**kwargs)
point.inside_constraint(self)
return point
def line_through(self, **kwargs):
return self.points[0].line_through(self.points[1], **kwargs)
def perpendicular_bisector_line(self, **kwargs):
"""
Perpendicular bisector
"""
middle = self.middle_point(layer='auxiliary')
line = self.line_through(layer='auxiliary')
if kwargs.get('comment') is None:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'perpendicular bisector of $%{segment:seg}$',
{'seg': self}
)
bisector = middle.perpendicular_line(line, **kwargs)
comment=Comment(
'$%{line:bisector}$ is the perpendicular bisector of $%{segment:seg}$',
{'bisector': bisector, 'seg': self}
)
bisector.perpendicular_constraint(line, comment=comment)
return bisector
def perpendicular_constraint(self, other, **kwargs):
"""
self ⟂ other
"""
for cnstr in self.scene.constraints(Constraint.Kind.perpendicular):
if set(cnstr.params) == {self, other}:
cnstr.update(kwargs)
return
self.scene.constraint(Constraint.Kind.perpendicular, self, other, **kwargs)
def ratio_constraint(self, segment, coef, **kwargs):
"""
|self| == |segment| * coef
coef is a non-zero number
"""
assert isinstance(segment, CoreScene.Segment)
assert self.scene == segment.scene
assert coef != 0
for cnstr in self.scene.constraints(Constraint.Kind.length_ratio):
if set(cnstr.params) == {self, segment, coef}:
cnstr.update(kwargs)
return
comment = kwargs.get('comment')
if not comment:
kwargs = dict(kwargs)
if coef == 1:
pattern = '$|%{segment:seg0}| = |%{segment:seg1}|$'
else:
pattern = '$|%{segment:seg0}| = %{multiplier:coef} |%{segment:seg1}|$'
kwargs['comment'] = Comment(
pattern, {'seg0': self, 'seg1': segment, 'coef': coef}
)
return self.scene.constraint(Constraint.Kind.length_ratio, self, segment, coef, **kwargs)
def congruent_constraint(self, segment, **kwargs):
"""
|self| == |vector|
"""
self.ratio_constraint(segment, 1, **kwargs)
def non_zero_length_constraint(self, **kwargs):
"""
|self| > 0
"""
self.points[0].not_equal_constraint(self.points[1], **kwargs)
def length_constraint(self, length, **kwargs):
"""
|self| == length
"""
if length > 0:
self.non_zero_length_constraint(**kwargs)
#TODO: equal_constraint otherwise?
self.scene.constraint(Constraint.Kind.distance, self, length, **kwargs)
def __str__(self):
return '%s %s' % self.points
def _get_angle(self, vector0, vector1):
assert isinstance(vector0, CoreScene.Vector)
assert isinstance(vector1, CoreScene.Vector)
assert vector0.scene == self
assert vector1.scene == self
key = frozenset([vector0, vector1])
angle = self.__angles.get(key)
if angle is None:
angle = CoreScene.Angle(vector0, vector1)
if angle.vertex is None and angle.pseudo_vertex:
if angle.vectors[0].end == angle.vectors[1].start:
from .property import SumOfTwoAnglesProperty
#TODO add comment
self.add_property(SumOfTwoAnglesProperty(
angle, angle.vectors[0].reversed.angle(angle.vectors[1]), 180
))
elif angle.vectors[0].start == angle.vectors[1].end:
from .property import SumOfTwoAnglesProperty
#TODO add comment
self.add_property(SumOfTwoAnglesProperty(
angle, angle.vectors[0].angle(angle.vectors[1].reversed), 180
))
elif angle.vectors[0].end == angle.vectors[1].end:
#TODO vertical angles
pass
self.__angles[key] = angle
return angle
class Angle(Figure):
def __init__(self, vector0, vector1):
assert vector0 != vector1 and vector0 != vector1.reversed
self.vectors = (vector0, vector1)
self.vertex = vector0.start if vector0.start == vector1.start else None
if self.vertex:
self.pseudo_vertex = self.vertex
else:
self.pseudo_vertex = next((p for p in vector0.points if p in vector1.points), None)
self.point_set = frozenset([*vector0.points, *vector1.points])
self.__bisector = None
@property
def scene(self):
return self.vectors[0].scene
@property
def endpoints(self):
assert self.vertex, 'Cannot locate endpoints of angle with no vertex'
return (self.vectors[0].end, self.vectors[1].end)
def bisector_line(self, **kwargs):
assert self.pseudo_vertex, 'Cannot construct bisector of angle %s with no vertex' % self
if self.__bisector:
return self.__bisector.with_extra_args(**kwargs)
v = self.pseudo_vertex
vec0 = self.vectors[0]
e0 = vec0.end if v == vec0.start else v.translated_point(vec0, layer='invisible')
vec1 = self.vectors[1]
e1 = vec1.end if v == vec1.start else v.translated_point(vec1, layer='invisible')
circle = v.circle_through(e0, layer='invisible')
line = v.line_through(e1, layer='invisible')
X = circle.intersection_point(line, layer='invisible')
v.same_direction_constraint(X, e1)
Y = X.translated_point(v.vector(e0), layer='invisible')
self.point_on_bisector_constraint(Y, guaranteed=True)
if kwargs.get('comment') is None:
kwargs = dict(kwargs)
kwargs['comment'] = Comment('bisector of $%{angle:angle}$', {'angle': self})
self.__bisector = v.line_through(Y, **kwargs)
return self.__bisector
def point_on_bisector_constraint(self, point, **kwargs):
bisector = self.pseudo_vertex.vector(point)
if kwargs.get('comment') is None:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'$%{ray:bisector}$ is the bisector of $%{angle:angle}$',
{'bisector': bisector, 'angle': self}
)
angle0 = self.vectors[0].angle(bisector)
angle1 = self.vectors[1].angle(bisector)
if self.vertex:
point.inside_constraint(self, **kwargs)
self.ratio_constraint(angle0, 2, **kwargs)
self.ratio_constraint(angle1, 2, **kwargs)
angle0.ratio_constraint(angle1, 1, **kwargs)
def ratio_constraint(self, angle, ratio, **kwargs):
# self = angle * ratio
self.scene.assert_angle(angle)
self.scene.constraint(Constraint.Kind.angles_ratio, self, angle, ratio, **kwargs)
def value_constraint(self, degree, **kwargs):
if kwargs.get('comment') is None:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'$%{anglemeasure:angle} = %{degree:degree}$',
{'angle': self, 'degree': degree}
)
self.scene.constraint(Constraint.Kind.angle_value, self, degree, **kwargs)
def is_acute_constraint(self, **kwargs):
self.scene.constraint(Constraint.Kind.acute_angle, self, **kwargs)
def is_obtuse_constraint(self, **kwargs):
self.scene.constraint(Constraint.Kind.obtuse_angle, self, **kwargs)
def is_right_constraint(self, **kwargs):
self.vectors[0].as_segment.line_through().perpendicular_constraint(
self.vectors[1].as_segment.line_through(),
**kwargs
)
def __str__(self):
if self.vertex:
return '\\angle %s %s %s' % (self.vectors[0].end, self.vertex, self.vectors[1].end)
return '\\angle(%s, %s)' % self.vectors
class Triangle(Figure):
def __init__(self, pt0, pt1, pt2):
self.points = (pt0, pt1, pt2)
self.__sides = None
self.__angles = None
self.__permutations = None
@property
def scene(self):
return self.points[0].scene
@property
def is_equilateral(self):
for cnstr in self.scene.constraints(Constraint.Kind.equilateral):
if set(cnstr.params[0].points) == set(self.points):
return True
# TODO: check implicit equilateral constraints, e.g. congruency of sides
return False
@property
def sides(self):
if self.__sides is None:
self.__sides = (
self.points[1].segment(self.points[2]),
self.points[0].segment(self.points[2]),
self.points[0].segment(self.points[1])
)
return self.__sides
@property
def angles(self):
if self.__angles is None:
self.__angles = (
self.points[0].angle(self.points[1], self.points[2]),
self.points[1].angle(self.points[0], self.points[2]),
self.points[2].angle(self.points[0], self.points[1])
)
return self.__angles
@property
def permutations(self):
if self.__permutations is None:
self.__permutations = (
(self.points[0], self.points[1], self.points[2]),
(self.points[0], self.points[2], self.points[1]),
(self.points[1], self.points[0], self.points[2]),
(self.points[1], self.points[2], self.points[0]),
(self.points[2], self.points[0], self.points[1]),
(self.points[2], self.points[1], self.points[0])
)
return self.__permutations
def __str__(self):
return '\\bigtriangleup %s %s %s' % self.points
class Polygon(Figure):
def __init__(self, *points):
self.points = tuple(points)
self.__sides = None
self.__angles = None
def __str__(self):
return ' '.join(['%s'] * len(self.points)) % self.points
@property
def scene(self):
return self.points[0].scene
@property
def sides(self):
if self.__sides is None:
pts = self.points
self.__sides = tuple(p0.segment(p1) for (p0, p1) in zip(pts, pts[1:] + (pts[0], )))
return self.__sides
@property
def angles(self):
if self.__angles is None:
pts = self.points + self.points[:2]
self.__angles = tuple(pts[i + 1].angle(pts[i], pts[i + 2]) for i in range(0, len(self.points)))
return self.__angles
def __init__(self):
self.__objects = []
self.validation_constraints = []
self.adjustment_constraints = []
self.__properties = set()
self.__frozen = False
self.__angles = {} # {vector, vector} => angle
self.__segments = {} # {point, point} => angle
def add_property(self, prop):
if prop not in self.__properties:
self.__properties.add(prop)
@property
def properties(self):
return list(self.__properties)
def constraint(self, kind, *args, **kwargs):
cns = Constraint(kind, self, *args, **kwargs)
if not self.__frozen:
if kind.stage == Stage.validation:
self.validation_constraints.append(cns)
else:
self.adjustment_constraints.append(cns)
return cns
def equilateral_constraint(self, triangle, **kwargs):
if 'comment' not in kwargs:
kwargs = dict(kwargs)
kwargs['comment'] = Comment(
'$%{triangle:equilateral}$ is equilateral',
{'equilateral': triangle}
)
self.constraint(Constraint.Kind.equilateral, triangle, **kwargs)
from .property import EquilateralTriangleProperty
self.add_property(EquilateralTriangleProperty(triangle))
def quadrilateral_constraint(self, A, B, C, D, **kwargs):
"""
ABDC is a quadrilateral.
I.e., the polygonal chain ABCD does not cross itself and contains no 180º angles.
"""
self.constraint(Constraint.Kind.quadrilateral, A, B, C, D, **kwargs)
def convex_polygon_constraint(self, *points, **kwargs):
"""
*points (in given order) is a convex polygon.
"""
assert len(points) > 3
self.constraint(Constraint.Kind.convex_polygon, points, **kwargs)
def points(self, max_layer='invisible'):
return [p for p in self.__objects if isinstance(p, CoreScene.Point) and p.layer in CoreScene.layers_by(max_layer)]
def lines(self, max_layer='invisible'):
return [l for l in self.__objects if isinstance(l, CoreScene.Line) and l.layer in CoreScene.layers_by(max_layer)]
def circles(self, max_layer='invisible'):
return [c for c in self.__objects if isinstance(c, CoreScene.Circle) and c.layer in CoreScene.layers_by(max_layer)]
def constraints(self, kind):
if kind.stage == Stage.validation:
return [cnstr for cnstr in self.validation_constraints if cnstr.kind == kind]
else:
return [cnstr for cnstr in self.adjustment_constraints if cnstr.kind == kind]
def assert_type(self, obj, *args):
assert isinstance(obj, args), 'Unexpected type %s' % type(obj)
assert obj.scene == self
def assert_point(self, obj):
self.assert_type(obj, CoreScene.Point)
def assert_line(self, obj):
self.assert_type(obj, CoreScene.Line)
def assert_line_or_circle(self, obj):
self.assert_type(obj, CoreScene.Line, CoreScene.Circle)
def assert_vector(self, obj):
self.assert_type(obj, CoreScene.Vector)
def assert_segment(self, obj):
self.assert_type(obj, CoreScene.Segment)
def assert_angle(self, obj):
self.assert_type(obj, CoreScene.Angle)
def free_point(self, **kwargs):
return CoreScene.Point(self, origin=CoreScene.Point.Origin.free, **kwargs)
def existing_line(self, point0, point1):
for cnstr in self.constraints(Constraint.Kind.not_equal):
if {point0, point1} == {*cnstr.params}:
break
else:
return None
for line in self.lines():
if point0 in line and point1 in line:
return line
return None
def add(self, obj: Object):
if not self.__frozen:
self.__objects.append(obj)
def get(self, label: str):
for obj in self.__objects:
if obj.label == label or label in obj.extra_labels:
return obj
return None
def freeze(self):
self.__frozen = True
def unfreeze(self):
self.__frozen = False
@property
def is_frozen(self):
return self.__frozen
def dump(self, include_constraints=False, max_layer='auxiliary'):
print('Objects:')
print('\n'.join(['\t' + obj.description for obj in self.__objects if obj.layer in CoreScene.layers_by(max_layer)]))
counts = [len([o for o in self.__objects if o.layer == layer]) for layer in ('user', 'auxiliary', 'invisible')]
print('Total: %s objects (+ %s auxiliary, %s invisible)' % tuple(counts))
if include_constraints:
if self.validation_constraints:
print('\nValidation constraints:')
print('\n'.join(['\t' + str(cnstr) for cnstr in self.validation_constraints]))
if self.adjustment_constraints:
print('\nAdjustment constraints:')
print('\n'.join(['\t' + str(cnstr) for cnstr in self.adjustment_constraints]))
class Stage(Enum):
validation = auto()
adjustment = auto()
class Constraint:
@unique
class Kind(Enum):
not_equal = ('not_equal', Stage.validation, CoreScene.Point, CoreScene.Point)
not_collinear = ('not_collinear', Stage.validation, CoreScene.Point, CoreScene.Point, CoreScene.Point)
collinear = ('collinear', Stage.adjustment, CoreScene.Point, CoreScene.Point, CoreScene.Point)
opposite_side = ('opposite_side', Stage.validation, CoreScene.Point, CoreScene.Point, CoreScene.Line)
same_side = ('same_side', Stage.validation, CoreScene.Point, CoreScene.Point, CoreScene.Line)
same_direction = ('same_direction', Stage.validation, CoreScene.Point, CoreScene.Point, CoreScene.Point)
inside_segment = ('inside_segment', Stage.validation, CoreScene.Point, CoreScene.Segment)
inside_angle = ('inside_angle', Stage.validation, CoreScene.Point, CoreScene.Angle)
quadrilateral = ('quadrilateral', Stage.validation, CoreScene.Point, CoreScene.Point, CoreScene.Point, CoreScene.Point)
equilateral = ('equilateral', Stage.adjustment, CoreScene.Triangle)
convex_polygon = ('convex_polygon', Stage.validation, List[CoreScene.Point])
distance = ('distance', Stage.adjustment, CoreScene.Vector, int)
length_ratio = ('length_ratio', Stage.adjustment, CoreScene.Segment, CoreScene.Segment, int)
parallel_vectors = ('parallel_vectors', Stage.adjustment, CoreScene.Vector, CoreScene.Vector)
angles_ratio = ('angles_ratio', Stage.adjustment, CoreScene.Angle, CoreScene.Angle, int)
perpendicular = ('perpendicular', Stage.adjustment, CoreScene.Segment, CoreScene.Segment)
acute_angle = ('acute_angle', Stage.validation, CoreScene.Angle)
obtuse_angle = ('obtuse_angle', Stage.validation, CoreScene.Angle)
angle_value = ('angle_value', Stage.adjustment, CoreScene.Angle, int)
def __init__(self, name, stage, *params):
self.stage = stage
self.params = params
def __init__(self, kind, scene, *args, **kwargs):
assert isinstance(kind, Constraint.Kind)
assert len(args) == len(kind.params)
self.params = []
for (arg, knd) in zip(args, kind.params):
if knd == List[CoreScene.Point]:
knd = knd.__origin__
if issubclass(knd, CoreScene.Object):
if isinstance(arg, str):
arg = scene.get(arg)
scene.assert_type(arg, knd)
elif issubclass(knd, List):
# TODO: check element types
assert isinstance(arg, (list, tuple))
# TODO: restore other parameters type check
#else:
# assert isinstance(arg, knd)
self.params.append(arg)
self.kind = kind
self.comment = None
self.update(kwargs)
def update(self, kwargs):
self.__dict__.update(kwargs)
def __str__(self):
params = [para.label if isinstance(para, CoreScene.Object) else str(para) for para in self.params]
extras = dict(self.__dict__)
del extras['kind']
del extras['params']
del extras['comment']
if self.comment:
return 'Constraint(%s) %s %s (%s)' % (self.kind.name, params, self.comment, extras)
else:
return 'Constraint(%s) %s (%s)' % (self.kind.name, params, extras)
| 46,103 | 12,558 |
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 14 10:08:03 2020
@author: ucobiz
"""
num = 0 # we have to initialize num to zero
while num <= 0: # so that we can use it here
num = int(input("Enter a positive number: "))
# the while loop has exited b/c num is positive
print("Thank you. The number you chose is:", num)
| 345 | 143 |
import math
import tkinter as tk
from tkinter import ttk, messagebox
from tkinter.constants import X, Y
from ttkthemes import ThemedStyle
class PickLight():
def __init__(self, root, widg):
self.root = root
self.widg = widg
self.x = self.root.winfo_pointerx()
self.y = self.root.winfo_pointery()
def open_light_win(self):
x_offset = -340
y_offset = 20
self.light_win = tk.Toplevel(self.widg)
style = ThemedStyle(self.root)
style.theme_use("equilux")
self.light_win.configure(bg=style.lookup('TLabel', 'background'))
self.light_win.wm_overrideredirect(1)
self.light_win.wm_geometry(f"+{self.x + x_offset}+{self.y + y_offset}")
self.pfont = ('Papyrus', '14')
self.light_win.focus_set()
self.light_win.focus_force()
self.light_win.attributes('-topmost', True)
border_frame = ttk.Frame(master=self.light_win, borderwidth=2, relief='sunken')
border_frame.grid(row=0, column=0, padx=20, pady=20)#, ipadx=10, ipady=10)
btn_close = tk.Button(master=border_frame, text="X", command=self.light_win.destroy, bg='gray18', fg='gray70', activebackground='red3', bd=0, relief='sunken', font=('Papyrus', '8'), width=2, height=1, anchor='center')
btn_close.grid(row=0, column=1, sticky='e', padx=5)
lbl_light_shape = ttk.Label(master=border_frame, text="Shape", font=self.pfont)
lbl_light_shape.grid(row=1, column=0, sticky='w')
shape_list = [
'Square',
'Circle',
'Cone',
'Line',
'Ring'
]
self.cbx_light_shape = ttk.Combobox(master=border_frame, values=shape_list, width=18, state='readonly')
self.cbx_light_shape.grid(row=1, column=1, sticky='w', padx=5)
self.cbx_light_shape.bind("<<ComboboxSelected>>", self.shape_select)
lbl_size = ttk.Label(master=border_frame, text="Size", font=self.pfont)
lbl_size.grid(row=2, column=0, sticky='w')
self.cbx_light_size = ttk.Combobox(master=border_frame, width=18, state='readonly')
self.cbx_light_size.grid(row=2, column=1, sticky='w', padx=5)
lbl_angle = ttk.Label(master=border_frame, text="Angle from North", font=self.pfont)
lbl_angle.grid(row=3, column=0, sticky='w')
self.cbx_light_angle = ttk.Combobox(master=border_frame, width=18, state='readonly')
self.cbx_light_angle.grid(row=3, column=1, sticky='w', padx=5)
self.btn_confirm = ttk.Button(master=border_frame, text="Confirm")
self.btn_confirm.grid(row=4, column=0, columnspan=2, pady=5)
def shape_select(self, event):
shape = self.cbx_light_shape.get()
len_list = []
angle_list = []
if shape == 'Square':
for i in range(5, 125, 5):
len_list.append(i)
elif shape == 'Circle' or shape == 'Ring':
len_list = [
10,
15,
20,
30,
40,
50,
60,
90,
100,
120
]
elif shape == 'Cone':
for i in range(5, 105, 5):
len_list.append(i)
angle_list = [
'N',
'NE',
'E',
'SE',
'S',
'SW',
'W',
'NW'
]
elif shape == 'Line':
for i in range(5, 305, 5):
len_list.append(i)
for i in range(0, 361, 15):
angle_list.append(i)
self.cbx_light_size.config(values=len_list)
self.cbx_light_size.set('')
self.cbx_light_angle.config(values=angle_list)
self.cbx_light_angle.set(''),
def collect(self):
shape = self.cbx_light_shape.get()
size = self.cbx_light_size.get()
angle = self.cbx_light_angle.get()
offset_array = []
if shape == '':
return
if size == '':
return
size = int(int(size) / 5)
if shape == 'Square':
offset_array = self.fill_square(size)
elif shape == 'Circle':
points = self.fill_circle(size)
offset_array = self.points_to_offsets(points)
elif shape == 'Ring':
#points = self.get_ring_8th(size)
#points = self.brute_ring(size)
points = self.no_fill_circle(size)
offset_array = self.points_to_offsets(points)
elif shape == 'Line':
end_x, end_y = self.find_endpoint(size, angle)
points = self.draw_line(0, 0, end_x, end_y)
offset_array = self.points_to_offsets(points)
elif shape == 'Cone':
points = self.draw_cone(size, angle)
offset_array = self.points_to_offsets(points)
else:
return
return offset_array, shape
def find_endpoint(self, size, angle):
angle_diff = 0
octant = 1
if angle == '':
return
angle = int(angle)
if angle == 0 or angle == 360:
octant = 0
end_x = size
end_y = 0
elif angle == 180:
octant = 0
end_x = -size
end_y = 0
if octant > 0:
if angle > 45 and angle < 90:
angle_diff = 45
octant = 2
elif angle >= 90 and angle < 135:
angle_diff = 90
octant = 3
elif angle >= 135 and angle < 180:
angle_diff = 135
octant = 4
elif angle >= 180 and angle < 225:
angle_diff = 180
octant = 5
elif angle >= 225 and angle < 270:
angle_diff = 225
octant = 6
elif angle >= 270 and angle < 315:
angle_diff = 270
octant = 7
elif angle >= 315 and angle < 360:
angle_diff = 315
octant = 8
angle -= angle_diff
# Flip odd octant angles to simplify math
if octant % 2 == 0:
angle = abs(angle - 45)
short_leg = int((angle * size) / 45)
if octant == 1:
end_x = size
end_y = short_leg
elif octant == 2:
end_x = short_leg
end_y = size
elif octant == 3:
end_x = -short_leg
end_y = size
elif octant == 4:
end_x = -size
end_y = short_leg
elif octant == 5:
end_x = -size
end_y = -short_leg
elif octant == 6:
end_x = -short_leg
end_y = -size
elif octant == 7:
end_x = short_leg
end_y = -size
elif octant == 8:
end_x = size
end_y = -short_leg
return end_x, end_y
def points_to_offsets(self, points):
pos = [0,0]
offsets = []
for point in points:
dist = [point[0]-pos[0], point[1]-pos[1]]
offsets.append(dist)
pos = point
return offsets
def fill_square(self, size):
points = []
col = 1
area = int(size**2)
for i in range(1, area):
if col < size:
points.append((1,0))
col += 1
elif col == size:
points.append((-1 * (col-1), 1))
col = 1
return points
def fill_circle(self, r, center=[0.5, 0.5]):
top = int(center[1] - r)
bottom = int(center[1] + r)
points = []
for y in range(top, bottom+1):
dy = y - center[1]
dx = math.sqrt(r*r - dy*dy)
left = math.ceil(center[0] - dx)
right = math.floor(center[0] + dx)
for x in range(left, right+1):
points.append([x,y])
return points
def no_fill_circle(self, r):
points = []
y = 1
x = r
while x > y:
dy = y - 0.5
dx = math.sqrt(r*r - dy*dy)
left = math.ceil(0.5 - dx)
right = math.floor(0.5 + dx)
points.extend(self.transform_no_fill(left, y))
points.extend(self.transform_no_fill(right, y))
y += 1
return points
def transform_no_fill(self, x, y):
x = int(x)
y = int(y)
return [
( x, y),
(1-y, x),
(1-x, 1-y),
( y, 1-x)
]
def draw_line(self, x1, y1, x2, y2):
points = []
# undef is for a vertical line
undef = False
small_slope = True
m_error = 0
if x1 > x2:
start_x = x2
start_y = y2
end_x = x1
end_y = y1
x1 = start_x
x2 = end_x
y1 = start_y
y2 = end_y
elif x1 == x2:
undef = True
if y1 > y2:
start_x = x2
start_y = y2
end_x = x1
end_y = y1
x1 = start_x
x2 = end_x
y1 = start_y
y2 = end_y
if not undef:
dx = x2 - x1
dy = y2 - y1
m = dy / dx
if m > 1 or m < -1:
small_slope = False
if m < -1:
start_x = x2
start_y = y2
end_x = x1
end_y = y1
x1 = start_x
x2 = end_x
y1 = start_y
y2 = end_y
if small_slope:
y = y1
if m >= 0:
for x in range(x1, x2+1):
points.append([x, y])
m_error += dy
if (m_error * 2) >= dx:
y += 1
m_error -= dx
else:
for x in range(x1, x2+1):
points.append([x, y])
if (m_error + m) > -0.5:
m_error += m
else:
y -= 1
m_error = m_error + m + 1
else:
x = x1
if m > 0:
for y in range(y1, y2+1):
points.append([x, y])
m_error += dx
if (m_error * 2) >= dy:
x += 1
m_error -= dy
else:
m = 1/m
for y in range(y1, y2+1):
points.append([x, y])
if (m_error + m) > -0.5:
m_error += m
else:
x -= 1
m_error = m_error + m + 1
else:
x = x1
for y in range(y1, y2+1):
points.append([x, y])
return points
def draw_cone(self, size, dir):
points = []
if len(dir) == 1:
xl1 = 0
xl2 = 1
for y in range(1, size+1):
if y % 2 == 1 and y > 1:
xl1 -= 1
xl2 += 1
for x in range(xl1, xl2):
if dir == 'N':
points.append([x, -y])
elif dir == 'S':
points.append([x, y])
elif dir == 'W':
points.append([-y, x])
elif dir == 'E':
points.append([y, x])
else:
height = size
for x in range(1, size+1):
for y in range(1, height+1):
if dir == 'NE':
points.append([x, -y])
elif dir == 'SE':
points.append([x, y])
elif dir == 'SW':
points.append([-x, y])
elif dir == 'NW':
points.append([-x, -y])
height -= 1
return points
def escape(self):
self.light_win.destroy()
def GenLightWin(root, widg):
light_win = PickLight(root, widg)
return light_win | 12,633 | 3,998 |
from django.utils.translation import gettext_lazy as _
def get_translated_string():
return _('Testing translations')
| 123 | 39 |
import datetime
import pytz
from django.test import override_settings
from rest_framework import status
from ozpcenter import model_access as generic_model_access
from ozpcenter.scripts import sample_data_generator as data_gen
from tests.ozp.cases import APITestCase
from tests.ozpcenter.helper import APITestHelper
@override_settings(ES_ENABLED=False)
class LibraryApiTest(APITestCase):
@classmethod
def setUpTestData(cls):
data_gen.run()
def setUp(self):
pass
def test_get_library(self):
url = '/api/library/'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
self.assertIsNotNone(response.data)
def test_create_library(self):
# Listing is Enabled
response = APITestHelper.create_bookmark(self, 'wsmith', 1, folder_name='', status_code=201)
self.assertEqual(response.data['listing']['id'], 1)
# Disable Listing
APITestHelper.edit_listing(self, 1, {'is_enabled': False}, 'wsmith')
# POST to /self/library after listing disabled
response = APITestHelper.create_bookmark(self, 'wsmith', 1, folder_name='', status_code=400)
# Enabled Listing
APITestHelper.edit_listing(self, 1, {'is_enabled': True}, 'wsmith')
# POST to /self/library after listing disabled
response = APITestHelper.create_bookmark(self, 'wsmith', 1, folder_name='', status_code=201)
self.assertEqual(response.data['listing']['id'], 1)
def test_get_library_list(self):
url = '/api/self/library/'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
# import json; print(json.dumps(shorthand_types(response.data), indent=2))
self.assertEqual(10, len(response.data))
self.assertIn('listing', response.data[0])
self.assertIn('id', response.data[0]['listing'])
self.assertIn('title', response.data[0]['listing'])
self.assertIn('unique_name', response.data[0]['listing'])
self.assertIn('folder', response.data[0])
def test_get_library_self_when_listing_disabled_enabled(self):
url = '/api/self/library/'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
listing_ids = [record['listing']['id'] for record in response.data]
first_listing_id = listing_ids[0] # Should be 2
self.assertEqual([2, 23, 44, 63, 10, 77, 81, 101, 9, 147], listing_ids, 'Comparing Ids #1')
# Disable Listing
APITestHelper.edit_listing(self, first_listing_id, {'is_enabled': False})
# Get Library for current user after listing was disabled
url = '/api/self/library/'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
listing_ids = [record['listing']['id'] for record in response.data]
self.assertEqual([23, 44, 63, 10, 77, 81, 101, 9, 147], listing_ids, 'Comparing Ids #2')
# Enable Listing
APITestHelper.edit_listing(self, first_listing_id, {'is_enabled': True})
# Get Library for current user after listing was Enable
url = '/api/self/library/'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
listing_ids = [record['listing']['id'] for record in response.data]
self.assertEqual([2, 23, 44, 63, 10, 77, 81, 101, 9, 147], listing_ids, 'Comparings Ids #3')
def test_get_library_list_listing_type(self):
url = '/api/self/library/?type=Web Application'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
self.assertEqual(4, len(response.data))
self.assertIn('listing', response.data[0])
self.assertIn('id', response.data[0]['listing'])
self.assertIn('title', response.data[0]['listing'])
self.assertIn('unique_name', response.data[0]['listing'])
self.assertIn('folder', response.data[0])
def test_get_library_list_listing_type_empty(self):
url = '/api/self/library/?type=widget'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
self.assertEqual([], response.data)
def test_get_library_pk(self):
url = '/api/self/library/2/'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
self.assertIn('listing', response.data)
self.assertIn('id', response.data['listing'])
self.assertIn('title', response.data['listing'])
self.assertIn('unique_name', response.data['listing'])
self.assertIn('folder', response.data)
def test_library_update_all(self):
url = '/api/self/library/'
response = APITestHelper.request(self, url, 'GET', username='wsmith', status_code=200)
put_data = []
position_count = 0
for i in response.data:
position_count = position_count + 1
data = {'id': i['id'],
'folder': 'test',
'listing': {'id': i['listing']['id']},
'position': position_count
}
put_data.append(data)
url = '/api/self/library/update_all/'
response = APITestHelper.request(self, url, 'PUT', data=put_data, username='wsmith', status_code=200)
self.assertIsNotNone(response)
def _compare_library(self, usernames_list):
usernames_list_actual = {}
for username, ids_list in usernames_list.items():
url = '/api/self/library/'
response = APITestHelper.request(self, url, 'GET', username=username, status_code=200)
before_notification_ids = ['{}-{}'.format(entry['listing']['title'], entry['folder']) for entry in response.data]
usernames_list_actual[username] = before_notification_ids
for username, ids_list in usernames_list.items():
before_notification_ids = usernames_list_actual[username]
self.assertEqual(sorted(ids_list), sorted(before_notification_ids), 'Checking for {}'.format(username))
def test_import_bookmarks(self):
# Create notification to share Weater foler from Bigbrother to Julia
now = datetime.datetime.now(pytz.utc) + datetime.timedelta(days=5)
data = {'expires_date': str(now),
'message': 'A Simple Peer to Peer Notification',
'peer': {
'user': {
'username': 'julia',
},
'folder_name': 'Weather'
}}
url = '/api/notification/'
user = generic_model_access.get_profile('bigbrother').user
self.client.force_authenticate(user=user)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
bookmark_notification1_id = response.data['id']
# Import Bookmarks
results = APITestHelper._import_bookmarks(self, 'julia', bookmark_notification1_id, status_code=201)
# Compare Library for users
user_library_data = {'julia': ['Tornado-Weather',
'Lightning-Weather',
'Snow-Weather']}
self._compare_library(user_library_data)
# Modify Bigbrother's library to add another listing to the weather library
url_lib = '/api/self/library/'
response = APITestHelper.request(self, url_lib, 'GET', username='bigbrother', status_code=200)
put_data = []
position_count = 0
for i in response.data:
if i['id'] is 12:
data = {'id': i['id'],
'folder': "Weather",
'listing': {'id': i['listing']['id']},
'position': position_count
}
put_data.append(data)
else:
data = {'id': i['id'],
'folder': i['folder'],
'listing': {'id': i['listing']['id']},
'position': position_count
}
put_data.append(data)
url_update = '/api/self/library/update_all/'
response = APITestHelper.request(self, url_update, 'PUT', data=put_data, username='bigbrother', status_code=200)
self.assertIsNotNone(response)
# Recreate the notification to send to Julia to share the folder
now = datetime.datetime.now(pytz.utc) + datetime.timedelta(days=5)
data = {'expires_date': str(now),
'message': 'A Simple Peer to Peer Notification',
'peer': {
'user': {
'username': 'julia',
},
'folder_name': 'Weather'
}}
url = '/api/notification/'
user = generic_model_access.get_profile('bigbrother').user
self.client.force_authenticate(user=user)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
bookmark_notification1_id = response.data['id']
# Import Bookmarks
results = APITestHelper._import_bookmarks(self, 'julia', bookmark_notification1_id, status_code=201)
# Compare Library for users
user_library_data = {'bigbrother': ['Tornado-Weather',
'Lightning-Weather',
'Snow-Weather',
'Wolf Finder-Animals',
'Killer Whale-Animals',
'Lion Finder-Animals',
'Monkey Finder-Animals',
'Parrotlet-Animals',
'White Horse-Animals',
'Electric Guitar-Instruments',
'Acoustic Guitar-Instruments',
'Sound Mixer-Instruments',
'Electric Piano-Instruments',
'Piano-Instruments',
'Violin-Instruments',
'Bread Basket-Weather',
'Informational Book-None',
'Stop sign-None',
'Chain boat navigation-None',
'Gallery of Maps-None',
'Chart Course-None'],
'julia': ['Tornado-Weather',
'Lightning-Weather',
'Snow-Weather',
'Bread Basket-Weather']}
self._compare_library(user_library_data)
| 11,151 | 3,255 |
#Yiğit Yüre 150401012
import socket
import os
import sys
try:
socket.gethostbyname(sys.argv[1])
except socket.error:
print("Geçersiz IP adresi")
sys.exit()
host = sys.argv[1]
port = int(sys.argv[2])
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print("İstemci soketi başlatıldı")
except socket.error:
print("Soket oluşturulamadı")
sys.exit()
while True:
komut = input("Bir komut giriniz: \n1. get [file_name]\n2. put [file_name]\n3. list\n ")
IstemciKomutu = komut.encode('utf-8')
try:
s.sendto(IstemciKomutu, (host, port))
except ConnectionResetError:
print("Bağlantı port numaraları eşleşmiyor")
sys.exit()
KL = komut.split()
if KL[0] == "get":
try:
ClientData, clientAddr = s.recvfrom(4096)
except ConnectionResetError:
print("Port numaraları eşleşmiyor")
sys.exit()
text = ClientData.decode('utf8')
print(text)
if len(text) < 30:
Data, Recv = s.recvfrom(8192)
dosya = open(KL[1], "wb")
dosya.write(Data)
dosya.close()
print("Dosya alındı")
elif KL[0] == "put":
try:
ClientData, clientAddr = s.recvfrom(4096)
except ConnectionResetError:
print("Port numaraları eşleşmiyor")
sys.exit()
text = ClientData.decode('utf8')
print(text)
if text == "Geçerli put komutu":
if os.path.isfile(KL[1]):
dosya = open(KL[1], "rb")
Data = dosya.read()
s.sendto(Data, clientAddr)
print("Dosya yükleniyor")
dosya.close()
else:
print("Dosya bulunamadı")
else:
print("Geçersiz komut")
elif KL[0] == "list":
try:
ClientData, clientAddr = s.recvfrom(51200)
except ConnectionResetError:
print("Port numaraları eşleşmiyor")
sys.exit()
text = ClientData.decode('utf8')
print(text)
if text == "Geçerli list komutu":
ClientDataL, clientAddrL = s.recvfrom(4096)
text2 = ClientDataL.decode('utf8')
print(text2)
else:
print("Geçersiz komut") | 2,335 | 847 |
# THIS FILE WAS AUTOGENERATED BY make_vendor_list.py!
# pylint: disable=line-too-long,missing-docstring,too-many-lines
# yapf: disable
import datetime
from bios_pnp import pnp
VENDORS = {
"BUT": pnp.Vendor("21ST CENTURY ENTERTAINMENT", "BUT", datetime.date(2002, 4, 25)),
"TTL": pnp.Vendor("2-Tel B.V", "TTL", datetime.date(1999, 3, 20)),
"TCM": pnp.Vendor("3Com Corporation", "TCM", datetime.date(1996, 11, 29)),
"TDP": pnp.Vendor("3D Perception", "TDP", datetime.date(2002, 5, 16)),
"VSD": pnp.Vendor("3M", "VSD", datetime.date(1998, 10, 16)),
"NOD": pnp.Vendor("3NOD Digital Technology Co. Ltd.", "NOD", datetime.date(2014, 12, 11)),
"NGS": pnp.Vendor("A D S Exports", "NGS", datetime.date(1998, 7, 16)),
"API": pnp.Vendor("A Plus Info Corporation", "API", datetime.date(1996, 11, 29)),
"ACG": pnp.Vendor("A&R Cambridge Ltd.", "ACG", datetime.date(2007, 6, 13)),
"AVX": pnp.Vendor("A/Vaux Electronics", "AVX", datetime.date(2012, 8, 29)),
"APV": pnp.Vendor("A+V Link", "APV", datetime.date(2010, 1, 27)),
"TRU": pnp.Vendor("Aashima Technology B.V.", "TRU", datetime.date(1998, 5, 8)),
"AAM": pnp.Vendor("Aava Mobile Oy", "AAM", datetime.date(2013, 8, 13)),
"ABA": pnp.Vendor("ABBAHOME INC.", "ABA", datetime.date(1999, 11, 8)),
"MEG": pnp.Vendor("Abeam Tech Ltd.", "MEG", datetime.date(1996, 11, 29)),
"ATC": pnp.Vendor("Ably-Tech Corporation", "ATC", datetime.date(1996, 11, 29)),
"ABC": pnp.Vendor("AboCom System Inc.", "ABC", datetime.date(1997, 3, 28)),
"WTC": pnp.Vendor("ACC Microelectronics", "WTC", datetime.date(1996, 11, 29)),
"AWC": pnp.Vendor("Access Works Comm Inc", "AWC", datetime.date(1996, 11, 29)),
"PKA": pnp.Vendor("Acco UK Ltd.", "PKA", datetime.date(2003, 5, 12)),
"ACC": pnp.Vendor("Accton Technology Corporation", "ACC", datetime.date(1996, 11, 29)),
"ACU": pnp.Vendor("Acculogic", "ACU", datetime.date(1996, 11, 29)),
"ASL": pnp.Vendor("AccuScene Corporation Ltd", "ASL", datetime.date(2007, 6, 13)),
"ANT": pnp.Vendor("Ace CAD Enterprise Company Ltd", "ANT", datetime.date(1996, 11, 29)),
"CHE": pnp.Vendor("Acer Inc", "CHE", datetime.date(1996, 11, 29)),
"ALI": pnp.Vendor("Acer Labs", "ALI", datetime.date(1996, 11, 29)),
"ANX": pnp.Vendor("Acer Netxus Inc", "ANX", datetime.date(1996, 11, 29)),
"ACR": pnp.Vendor("Acer Technologies", "ACR", datetime.date(1996, 11, 29)),
"ACK": pnp.Vendor("Acksys", "ACK", datetime.date(1996, 11, 29)),
"ADC": pnp.Vendor("Acnhor Datacomm", "ADC", datetime.date(1996, 11, 29)),
"CAL": pnp.Vendor("Acon", "CAL", datetime.date(1996, 11, 29)),
"ALK": pnp.Vendor("Acrolink Inc", "ALK", datetime.date(1997, 3, 12)),
"ACM": pnp.Vendor("Acroloop Motion Control Systems Inc", "ACM", datetime.date(1998, 3, 26)),
"LAB": pnp.Vendor("ACT Labs Ltd", "LAB", datetime.date(1997, 9, 2)),
"ACE": pnp.Vendor("Actek Engineering Pty Ltd", "ACE", datetime.date(1996, 11, 29)),
"AEI": pnp.Vendor("Actiontec Electric Inc", "AEI", datetime.date(1996, 11, 29)),
"ACV": pnp.Vendor("ActivCard S.A", "ACV", datetime.date(1998, 5, 8)),
"ACB": pnp.Vendor("Aculab Ltd", "ACB", datetime.date(1996, 11, 29)),
"ALM": pnp.Vendor("Acutec Ltd.", "ALM", datetime.date(1999, 11, 8)),
"GLE": pnp.Vendor("AD electronics", "GLE", datetime.date(2000, 4, 19)),
"ADM": pnp.Vendor("Ad Lib MultiMedia Inc", "ADM", datetime.date(1998, 4, 23)),
"ADP": pnp.Vendor("Adaptec Inc", "ADP", datetime.date(1996, 11, 29)),
"ADX": pnp.Vendor("Adax Inc", "ADX", datetime.date(1996, 11, 29)),
"RSH": pnp.Vendor("ADC-Centre", "RSH", datetime.date(1999, 11, 8)),
"AVE": pnp.Vendor("Add Value Enterpises (Asia) Pte Ltd", "AVE", datetime.date(1999, 1, 10)),
"ADA": pnp.Vendor("Addi-Data GmbH", "ADA", datetime.date(1996, 11, 29)),
"ADI": pnp.Vendor("ADI Systems Inc", "ADI", datetime.date(1996, 11, 29)),
"DPM": pnp.Vendor("ADPM Synthesis sas", "DPM", datetime.date(2000, 8, 10)),
"AXB": pnp.Vendor("Adrienne Electronics Corporation", "AXB", datetime.date(1997, 10, 7)),
"ADT": pnp.Vendor("Adtek", "ADT", datetime.date(1996, 11, 29)),
"ADK": pnp.Vendor("Adtek System Science Company Ltd", "ADK", datetime.date(1996, 11, 29)),
"FLE": pnp.Vendor("ADTI Media, Inc", "FLE", datetime.date(2009, 9, 15)),
"AND": pnp.Vendor("Adtran Inc", "AND", datetime.date(1996, 11, 29)),
"AGM": pnp.Vendor("Advan Int'l Corporation", "AGM", datetime.date(1998, 5, 26)),
"AVN": pnp.Vendor("Advance Computer Corporation", "AVN", datetime.date(2010, 6, 10)),
"MSM": pnp.Vendor("Advanced Digital Systems", "MSM", datetime.date(1996, 11, 29)),
"AED": pnp.Vendor("Advanced Electronic Designs, Inc.", "AED", datetime.date(2004, 7, 12)),
"RJS": pnp.Vendor("Advanced Engineering", "RJS", datetime.date(1998, 6, 25)),
"GRV": pnp.Vendor("Advanced Gravis", "GRV", datetime.date(1996, 11, 29)),
"AIR": pnp.Vendor("Advanced Integ. Research Inc", "AIR", datetime.date(1996, 11, 29)),
"ALR": pnp.Vendor("Advanced Logic", "ALR", datetime.date(1996, 11, 29)),
"ADV": pnp.Vendor("Advanced Micro Devices Inc", "ADV", datetime.date(1996, 11, 29)),
"EVE": pnp.Vendor("Advanced Micro Peripherals Ltd", "EVE", datetime.date(2011, 11, 18)),
"AOE": pnp.Vendor("Advanced Optics Electronics, Inc.", "AOE", datetime.date(2004, 4, 20)),
"ADD": pnp.Vendor("Advanced Peripheral Devices Inc", "ADD", datetime.date(1996, 11, 29)),
"ABV": pnp.Vendor("Advanced Research Technology", "ABV", datetime.date(1997, 1, 16)),
"PSA": pnp.Vendor("Advanced Signal Processing Technologies", "PSA", datetime.date(1999, 9, 13)),
"AHC": pnp.Vendor("Advantech Co., Ltd.", "AHC", datetime.date(2007, 6, 13)),
"ADH": pnp.Vendor("Aerodata Holdings Ltd", "ADH", datetime.date(1997, 11, 11)),
"AEP": pnp.Vendor("Aetas Peripheral International", "AEP", datetime.date(1999, 11, 8)),
"AET": pnp.Vendor("Aethra Telecomunicazioni S.r.l.", "AET", datetime.date(1996, 12, 13)),
"CHS": pnp.Vendor("Agentur Chairos", "CHS", datetime.date(2001, 3, 15)),
"AGT": pnp.Vendor("Agilent Technologies", "AGT", datetime.date(2001, 10, 8)),
"ASI": pnp.Vendor("Ahead Systems", "ASI", datetime.date(1996, 11, 29)),
"AIM": pnp.Vendor("AIMS Lab Inc", "AIM", datetime.date(1998, 3, 13)),
"AYR": pnp.Vendor("Airlib, Inc", "AYR", datetime.date(2000, 2, 21)),
"AWL": pnp.Vendor("Aironet Wireless Communications, Inc", "AWL", datetime.date(1998, 8, 11)),
"AIW": pnp.Vendor("Aiwa Company Ltd", "AIW", datetime.date(1996, 11, 29)),
"AJA": pnp.Vendor("AJA Video Systems, Inc.", "AJA", datetime.date(2007, 10, 11)),
"AKE": pnp.Vendor("AKAMI Electric Co.,Ltd", "AKE", datetime.date(2010, 9, 3)),
"AKB": pnp.Vendor("Akebia Ltd", "AKB", datetime.date(1996, 11, 29)),
"AKI": pnp.Vendor("AKIA Corporation", "AKI", datetime.date(1998, 12, 23)),
"ALH": pnp.Vendor("AL Systems", "ALH", datetime.date(1999, 1, 20)),
"ALA": pnp.Vendor("Alacron Inc", "ALA", datetime.date(1996, 11, 29)),
"ALN": pnp.Vendor("Alana Technologies", "ALN", datetime.date(2000, 1, 13)),
"AOT": pnp.Vendor("Alcatel", "AOT", datetime.date(2001, 11, 6)),
"ABE": pnp.Vendor("Alcatel Bell", "ABE", datetime.date(1996, 11, 29)),
"ADB": pnp.Vendor("Aldebbaron", "ADB", datetime.date(2001, 3, 15)),
"ALE": pnp.Vendor("Alenco BV", "ALE", datetime.date(2014, 5, 20)),
"ALX": pnp.Vendor("ALEXON Co.,Ltd.", "ALX", datetime.date(1999, 9, 13)),
"AFA": pnp.Vendor("Alfa Inc", "AFA", datetime.date(1996, 11, 29)),
"ALO": pnp.Vendor("Algolith Inc.", "ALO", datetime.date(2005, 5, 2)),
"AGO": pnp.Vendor("AlgolTek, Inc.", "AGO", datetime.date(2013, 10, 23)),
"AIS": pnp.Vendor("Alien Internet Services", "AIS", datetime.date(2001, 6, 21)),
"ABD": pnp.Vendor("Allen Bradley Company", "ABD", datetime.date(1996, 11, 29)),
"ALL": pnp.Vendor("Alliance Semiconductor Corporation", "ALL", datetime.date(1996, 11, 29)),
"ATI": pnp.Vendor("Allied Telesis KK", "ATI", datetime.date(1996, 11, 29)),
"ATA": pnp.Vendor("Allied Telesyn International (Asia) Pte Ltd", "ATA", datetime.date(1997, 11, 10)),
"ATK": pnp.Vendor("Allied Telesyn Int'l", "ATK", datetime.date(1996, 11, 29)),
"ACO": pnp.Vendor("Allion Computer Inc.", "ACO", datetime.date(2000, 10, 23)),
"XAD": pnp.Vendor("Alpha Data", "XAD", datetime.date(2009, 10, 8)),
"ATD": pnp.Vendor("Alpha Telecom Inc", "ATD", datetime.date(1997, 9, 26)),
"ATP": pnp.Vendor("Alpha-Top Corporation", "ATP", datetime.date(1996, 12, 4)),
"ALV": pnp.Vendor("AlphaView LCD", "ALV", datetime.date(2008, 11, 1)),
"APE": pnp.Vendor("Alpine Electronics, Inc.", "APE", datetime.date(2013, 1, 22)),
"ALP": pnp.Vendor("Alps Electric Company Ltd", "ALP", datetime.date(1996, 11, 29)),
"AUI": pnp.Vendor("Alps Electric Inc", "AUI", datetime.date(1996, 11, 29)),
"ARC": pnp.Vendor("Alta Research Corporation", "ARC", datetime.date(1996, 11, 29)),
"ALC": pnp.Vendor("Altec Corporation", "ALC", datetime.date(1998, 8, 4)),
"ALJ": pnp.Vendor("Altec Lansing", "ALJ", datetime.date(2000, 1, 13)),
"AIX": pnp.Vendor("ALTINEX, INC.", "AIX", datetime.date(2001, 4, 24)),
"AIE": pnp.Vendor("Altmann Industrieelektronik", "AIE", datetime.date(1996, 11, 29)),
"ACS": pnp.Vendor("Altos Computer Systems", "ACS", datetime.date(1996, 11, 29)),
"AIL": pnp.Vendor("Altos India Ltd", "AIL", datetime.date(1996, 11, 29)),
"CNC": pnp.Vendor("Alvedon Computers Ltd", "CNC", datetime.date(1998, 11, 6)),
"AMB": pnp.Vendor("Ambient Technologies, Inc.", "AMB", datetime.date(1999, 5, 16)),
"ALT": pnp.Vendor("Altra", "ALT", datetime.date(1996, 11, 29)),
"AMD": pnp.Vendor("Amdek Corporation", "AMD", datetime.date(1996, 11, 29)),
"AOL": pnp.Vendor("America OnLine", "AOL", datetime.date(1996, 11, 29)),
"YOW": pnp.Vendor("American Biometric Company", "YOW", datetime.date(1999, 5, 16)),
"AXP": pnp.Vendor("American Express", "AXP", datetime.date(1999, 7, 16)),
"AXI": pnp.Vendor("American Magnetics", "AXI", datetime.date(2001, 3, 15)),
"AMI": pnp.Vendor("American Megatrends Inc", "AMI", datetime.date(1996, 11, 29)),
"MCA": pnp.Vendor("American Nuclear Systems Inc", "MCA", datetime.date(1997, 2, 12)),
"CNB": pnp.Vendor("American Power Conversion", "CNB", datetime.date(2001, 3, 15)),
"APC": pnp.Vendor("American Power Conversion", "APC", datetime.date(1996, 11, 29)),
"AMN": pnp.Vendor("Amimon LTD.", "AMN", datetime.date(2007, 6, 13)),
"AMO": pnp.Vendor("Amino Technologies PLC and Amino Communications Limited", "AMO", datetime.date(2011, 12, 9)),
"AKL": pnp.Vendor("AMiT Ltd", "AKL", datetime.date(1997, 12, 2)),
"AMP": pnp.Vendor("AMP Inc", "AMP", datetime.date(1996, 11, 29)),
"AII": pnp.Vendor("Amptron International Inc.", "AII", datetime.date(2000, 5, 24)),
"AMT": pnp.Vendor("AMT International Industry", "AMT", datetime.date(1996, 11, 29)),
"AMR": pnp.Vendor("AmTRAN Technology Co., Ltd.", "AMR", datetime.date(2013, 6, 10)),
"AMX": pnp.Vendor("AMX LLC", "AMX", datetime.date(2008, 7, 6)),
"ANA": pnp.Vendor("Anakron", "ANA", datetime.date(1999, 11, 8)),
"ADN": pnp.Vendor("Analog & Digital Devices Tel. Inc", "ADN", datetime.date(1997, 3, 14)),
"ADS": pnp.Vendor("Analog Devices Inc", "ADS", datetime.date(1996, 11, 29)),
"ANW": pnp.Vendor("Analog Way SAS", "ANW", datetime.date(2014, 1, 22)),
"ANL": pnp.Vendor("Analogix Semiconductor, Inc", "ANL", datetime.date(2005, 10, 10)),
"ABT": pnp.Vendor("Anchor Bay Technologies, Inc.", "ABT", datetime.date(2006, 2, 14)),
"AAE": pnp.Vendor("Anatek Electronics Inc.", "AAE", datetime.date(2004, 5, 25)),
"ACI": pnp.Vendor("Ancor Communications Inc", "ACI", datetime.date(1996, 11, 29)),
"ANC": pnp.Vendor("Ancot", "ANC", datetime.date(1996, 11, 29)),
"AML": pnp.Vendor("Anderson Multimedia Communications (HK) Limited", "AML", datetime.date(2003, 1, 3)),
"ANP": pnp.Vendor("Andrew Network Production", "ANP", datetime.date(1996, 11, 29)),
"ANI": pnp.Vendor("Anigma Inc", "ANI", datetime.date(1996, 11, 29)),
"ANK": pnp.Vendor("Anko Electronic Company Ltd", "ANK", datetime.date(1998, 3, 24)),
"AAT": pnp.Vendor("Ann Arbor Technologies", "AAT", datetime.date(2001, 4, 24)),
"BBB": pnp.Vendor("an-najah university", "BBB", datetime.date(2001, 3, 15)),
"ANO": pnp.Vendor("Anorad Corporation", "ANO", datetime.date(2000, 1, 13)),
"ANR": pnp.Vendor("ANR Ltd", "ANR", datetime.date(1996, 11, 29)),
"ANS": pnp.Vendor("Ansel Communication Company", "ANS", datetime.date(1996, 11, 29)),
"AEC": pnp.Vendor("Antex Electronics Corporation", "AEC", datetime.date(1996, 11, 29)),
"AOA": pnp.Vendor("AOpen Inc.", "AOA", datetime.date(2001, 11, 6)),
"APX": pnp.Vendor("AP Designs Ltd", "APX", datetime.date(1997, 12, 8)),
"DNG": pnp.Vendor("Apache Micro Peripherals Inc", "DNG", datetime.date(1997, 11, 11)),
"APL": pnp.Vendor("Aplicom Oy", "APL", datetime.date(2005, 5, 2)),
"APN": pnp.Vendor("Appian Tech Inc", "APN", datetime.date(1996, 11, 29)),
"APP": pnp.Vendor("Apple Computer Inc", "APP", datetime.date(1996, 11, 29)),
"APD": pnp.Vendor("AppliAdata", "APD", datetime.date(1996, 11, 29)),
"ACT": pnp.Vendor("Applied Creative Technology", "ACT", datetime.date(1996, 11, 29)),
"APM": pnp.Vendor("Applied Memory Tech", "APM", datetime.date(1996, 11, 29)),
"ACL": pnp.Vendor("Apricot Computers", "ACL", datetime.date(1996, 11, 29)),
"APR": pnp.Vendor("Aprilia s.p.a.", "APR", datetime.date(1999, 2, 22)),
"ATJ": pnp.Vendor("ArchiTek Corporation", "ATJ", datetime.date(2014, 1, 22)),
"ACH": pnp.Vendor("Archtek Telecom Corporation", "ACH", datetime.date(1997, 1, 15)),
"ATL": pnp.Vendor("Arcus Technology Ltd", "ATL", datetime.date(1996, 11, 29)),
"ARD": pnp.Vendor("AREC Inc.", "ARD", datetime.date(2013, 7, 8)),
"ARS": pnp.Vendor("Arescom Inc", "ARS", datetime.date(1996, 11, 29)),
"AGL": pnp.Vendor("Argolis", "AGL", datetime.date(2001, 3, 15)),
"ARI": pnp.Vendor("Argosy Research Inc", "ARI", datetime.date(1997, 2, 24)),
"ARG": pnp.Vendor("Argus Electronics Co., LTD", "ARG", datetime.date(2004, 6, 4)),
"ACA": pnp.Vendor("Ariel Corporation", "ACA", datetime.date(1996, 12, 13)),
"ARM": pnp.Vendor("Arima", "ARM", datetime.date(2004, 4, 7)),
"ADE": pnp.Vendor("Arithmos, Inc.", "ADE", datetime.date(1999, 7, 16)),
"ARK": pnp.Vendor("Ark Logic Inc", "ARK", datetime.date(1996, 11, 29)),
"ARL": pnp.Vendor("Arlotto Comnet Inc", "ARL", datetime.date(1997, 4, 29)),
"AMS": pnp.Vendor("ARMSTEL, Inc.", "AMS", datetime.date(2011, 2, 25)),
"AIC": pnp.Vendor("Arnos Insturments & Computer Systems", "AIC", datetime.date(1996, 11, 29)),
"ARR": pnp.Vendor("ARRIS Group, Inc.", "ARR", datetime.date(2015, 1, 27)),
"IMB": pnp.Vendor("ART s.r.l.", "IMB", datetime.date(2012, 1, 27)),
"AGI": pnp.Vendor("Artish Graphics Inc", "AGI", datetime.date(1996, 11, 29)),
"NPA": pnp.Vendor("Arvanics", "NPA", datetime.date(2015, 3, 5)),
"AKM": pnp.Vendor("Asahi Kasei Microsystems Company Ltd", "AKM", datetime.date(1996, 11, 29)),
"ASN": pnp.Vendor("Asante Tech Inc", "ASN", datetime.date(1996, 11, 29)),
"HER": pnp.Vendor("Ascom Business Systems", "HER", datetime.date(1999, 1, 20)),
"ASC": pnp.Vendor("Ascom Strategic Technology Unit", "ASC", datetime.date(1996, 11, 29)),
"ASM": pnp.Vendor("ASEM S.p.A.", "ASM", datetime.date(2001, 3, 15)),
"AEM": pnp.Vendor("ASEM S.p.A.", "AEM", datetime.date(1996, 11, 29)),
"ASE": pnp.Vendor("AseV Display Labs", "ASE", datetime.date(1998, 10, 16)),
"ASH": pnp.Vendor("Ashton Bentley Concepts", "ASH", datetime.date(2013, 9, 20)),
"AMA": pnp.Vendor("Asia Microelectronic Development Inc", "AMA", datetime.date(1997, 9, 24)),
"ASK": pnp.Vendor("Ask A/S", "ASK", datetime.date(1996, 11, 29)),
"DYN": pnp.Vendor("Askey Computer Corporation", "DYN", datetime.date(1997, 7, 22)),
"ASP": pnp.Vendor("ASP Microelectronics Ltd", "ASP", datetime.date(1996, 11, 29)),
"AKY": pnp.Vendor("Askey Computer Corporation", "AKY", datetime.date(1997, 4, 2)),
"ACP": pnp.Vendor("Aspen Tech Inc", "ACP", datetime.date(1996, 11, 29)),
"AST": pnp.Vendor("AST Research Inc", "AST", datetime.date(1996, 11, 29)),
"JAC": pnp.Vendor("Astec Inc", "JAC", datetime.date(1996, 11, 29)),
"ADL": pnp.Vendor("ASTRA Security Products Ltd", "ADL", datetime.date(1997, 7, 30)),
"ATO": pnp.Vendor("ASTRO DESIGN, INC.", "ATO", datetime.date(2003, 6, 6)),
"ASU": pnp.Vendor("Asuscom Network Inc", "ASU", datetime.date(1996, 11, 29)),
"ATT": pnp.Vendor("AT&T", "ATT", datetime.date(1996, 11, 29)),
"GIS": pnp.Vendor("AT&T Global Info Solutions", "GIS", datetime.date(1996, 11, 29)),
"HSM": pnp.Vendor("AT&T Microelectronics", "HSM", datetime.date(1996, 11, 29)),
"TME": pnp.Vendor("AT&T Microelectronics", "TME", datetime.date(1996, 11, 29)),
"PDN": pnp.Vendor("AT&T Paradyne", "PDN", datetime.date(1996, 11, 29)),
"AVJ": pnp.Vendor("Atelier Vision Corporation", "AVJ", datetime.date(2015, 2, 24)),
"ATH": pnp.Vendor("Athena Informatica S.R.L.", "ATH", datetime.date(1997, 1, 29)),
"ATN": pnp.Vendor("Athena Smartcard Solutions Ltd.", "ATN", datetime.date(1999, 9, 13)),
"ATX": pnp.Vendor("Athenix Corporation", "ATX", datetime.date(1996, 11, 29)),
"BUJ": pnp.Vendor("ATI Tech Inc", "BUJ", datetime.date(1996, 11, 29)),
"CFG": pnp.Vendor("Atlantis", "CFG", datetime.date(1996, 11, 29)),
"ATM": pnp.Vendor("ATM Ltd", "ATM", datetime.date(1996, 11, 29)),
"AKP": pnp.Vendor("Atom Komplex Prylad", "AKP", datetime.date(2000, 10, 23)),
"AMC": pnp.Vendor("Attachmate Corporation", "AMC", datetime.date(1996, 11, 29)),
"FWA": pnp.Vendor("Attero Tech, LLC", "FWA", datetime.date(2010, 4, 20)),
"APT": pnp.Vendor("Audio Processing Technology Ltd", "APT", datetime.date(1997, 3, 18)),
"ASX": pnp.Vendor("AudioScience", "ASX", datetime.date(1996, 11, 29)),
"AUG": pnp.Vendor("August Home, Inc.", "AUG", datetime.date(2014, 6, 11)),
"AVC": pnp.Vendor("Auravision Corporation", "AVC", datetime.date(1996, 11, 29)),
"AUR": pnp.Vendor("Aureal Semiconductor", "AUR", datetime.date(1996, 11, 29)),
"APS": pnp.Vendor("Autologic Inc", "APS", datetime.date(1996, 11, 29)),
"CLT": pnp.Vendor("automated computer control systems", "CLT", datetime.date(1999, 9, 13)),
"AUT": pnp.Vendor("Autotime Corporation", "AUT", datetime.date(2001, 10, 8)),
"AUV": pnp.Vendor("Auvidea GmbH", "AUV", datetime.date(2014, 4, 21)),
"AVL": pnp.Vendor("Avalue Technology Inc.", "AVL", datetime.date(2011, 11, 18)),
"ALS": pnp.Vendor("Avance Logic Inc", "ALS", datetime.date(1996, 11, 29)),
"AVA": pnp.Vendor("Avaya Communication", "AVA", datetime.date(2001, 3, 15)),
"AEN": pnp.Vendor("Avencall", "AEN", datetime.date(2012, 1, 27)),
"AVR": pnp.Vendor("AVer Information Inc.", "AVR", datetime.date(2010, 5, 7)),
"AVD": pnp.Vendor("Avid Electronics Corporation", "AVD", datetime.date(1996, 11, 29)),
"AVM": pnp.Vendor("AVM GmbH", "AVM", datetime.date(1996, 11, 29)),
"AAA": pnp.Vendor("Avolites Ltd", "AAA", datetime.date(2012, 2, 17)),
"AVO": pnp.Vendor("Avocent Corporation", "AVO", datetime.date(2000, 10, 23)),
"AVT": pnp.Vendor("Avtek (Electronics) Pty Ltd", "AVT", datetime.date(1996, 11, 29)),
"ACD": pnp.Vendor("AWETA BV", "ACD", datetime.date(1998, 1, 20)),
"AXL": pnp.Vendor("Axel", "AXL", datetime.date(1996, 11, 29)),
"AXC": pnp.Vendor("AXIOMTEK CO., LTD.", "AXC", datetime.date(2005, 5, 2)),
"AXO": pnp.Vendor("Axonic Labs LLC", "AXO", datetime.date(2012, 6, 21)),
"AXT": pnp.Vendor("Axtend Technologies Inc", "AXT", datetime.date(1997, 12, 1)),
"AXX": pnp.Vendor("Axxon Computer Corporation", "AXX", datetime.date(1996, 11, 29)),
"AXY": pnp.Vendor("AXYZ Automation Services, Inc", "AXY", datetime.date(1998, 8, 11)),
"AYD": pnp.Vendor("Aydin Displays", "AYD", datetime.date(2007, 6, 13)),
"AZM": pnp.Vendor("AZ Middelheim - Radiotherapy", "AZM", datetime.date(2003, 11, 14)),
"AZT": pnp.Vendor("Aztech Systems Ltd", "AZT", datetime.date(1996, 11, 29)),
"BBH": pnp.Vendor("B&Bh", "BBH", datetime.date(2003, 1, 17)),
"SMR": pnp.Vendor("B.& V. s.r.l.", "SMR", datetime.date(1997, 3, 21)),
"BFE": pnp.Vendor("B.F. Engineering Corporation", "BFE", datetime.date(1996, 11, 29)),
"BUG": pnp.Vendor("B.U.G., Inc.", "BUG", datetime.date(2011, 8, 30)),
"BNO": pnp.Vendor("Bang & Olufsen", "BNO", datetime.date(2003, 5, 16)),
"BNK": pnp.Vendor("Banksia Tech Pty Ltd", "BNK", datetime.date(1996, 11, 29)),
"BAN": pnp.Vendor("Banyan", "BAN", datetime.date(1996, 11, 29)),
"BRC": pnp.Vendor("BARC", "BRC", datetime.date(2000, 8, 10)),
"BDS": pnp.Vendor("Barco Display Systems", "BDS", datetime.date(1999, 9, 13)),
"BCD": pnp.Vendor("Barco GmbH", "BCD", datetime.date(2011, 3, 7)),
"BGB": pnp.Vendor("Barco Graphics N.V", "BGB", datetime.date(1996, 11, 29)),
"BPS": pnp.Vendor("Barco, N.V.", "BPS", datetime.date(2000, 9, 12)),
"DDS": pnp.Vendor("Barco, N.V.", "DDS", datetime.date(2000, 10, 23)),
"BEO": pnp.Vendor("Baug & Olufsen", "BEO", datetime.date(1996, 11, 29)),
"BCC": pnp.Vendor("Beaver Computer Corporaton", "BCC", datetime.date(1996, 11, 29)),
"BEC": pnp.Vendor("Beckhoff Automation", "BEC", datetime.date(2002, 4, 25)),
"BEI": pnp.Vendor("Beckworth Enterprises Inc", "BEI", datetime.date(1997, 7, 16)),
"AGC": pnp.Vendor("Beijing Aerospace Golden Card Electronic Engineering Co.,Ltd.", "AGC", datetime.date(2001, 6, 21)),
"AHS": pnp.Vendor("Beijing AnHeng SecoTech Information Technology Co., Ltd.", "AHS", datetime.date(2015, 3, 24)),
"ANV": pnp.Vendor("Beijing ANTVR Technology Co., Ltd.", "ANV", datetime.date(2015, 8, 24)),
"NRT": pnp.Vendor("Beijing Northern Radiantelecom Co.", "NRT", datetime.date(1999, 3, 20)),
"BEK": pnp.Vendor("Beko Elektronik A.S.", "BEK", datetime.date(2005, 6, 15)),
"BEL": pnp.Vendor("Beltronic Industrieelektronik GmbH", "BEL", datetime.date(2006, 9, 5)),
"BMI": pnp.Vendor("Benson Medical Instruments Company", "BMI", datetime.date(1996, 12, 4)),
"BUR": pnp.Vendor("Bernecker & Rainer Ind-Eletronik GmbH", "BUR", datetime.date(1996, 11, 29)),
"INZ": pnp.Vendor("Best Buy", "INZ", datetime.date(2004, 6, 4)),
"VPR": pnp.Vendor("Best Buy", "VPR", datetime.date(2002, 5, 16)),
"BPU": pnp.Vendor("Best Power", "BPU", datetime.date(1996, 11, 29)),
"BIA": pnp.Vendor("Biamp Systems Corporation", "BIA", datetime.date(2015, 5, 14)),
"ICC": pnp.Vendor("BICC Data Networks Ltd", "ICC", datetime.date(1996, 11, 29)),
"BIC": pnp.Vendor("Big Island Communications", "BIC", datetime.date(1997, 5, 13)),
"BIL": pnp.Vendor("Billion Electric Company Ltd", "BIL", datetime.date(1996, 12, 11)),
"BLN": pnp.Vendor("BioLink Technologies", "BLN", datetime.date(2000, 8, 10)),
"BIO": pnp.Vendor("BioLink Technologies International, Inc.", "BIO", datetime.date(2000, 5, 24)),
"BML": pnp.Vendor("BIOMED Lab", "BML", datetime.date(1997, 5, 22)),
"BSL": pnp.Vendor("Biomedical Systems Laboratory", "BSL", datetime.date(1997, 10, 16)),
"BMS": pnp.Vendor("BIOMEDISYS", "BMS", datetime.date(2000, 5, 24)),
"BAC": pnp.Vendor("Biometric Access Corporation", "BAC", datetime.date(1998, 5, 19)),
"BTO": pnp.Vendor("BioTao Ltd", "BTO", datetime.date(2012, 3, 21)),
"BIT": pnp.Vendor("Bit 3 Computer", "BIT", datetime.date(1996, 11, 29)),
"BTC": pnp.Vendor("Bit 3 Computer", "BTC", datetime.date(1996, 11, 29)),
"BTF": pnp.Vendor("Bitfield Oy", "BTF", datetime.date(1996, 11, 29)),
"BHZ": pnp.Vendor("BitHeadz, Inc.", "BHZ", datetime.date(2003, 9, 29)),
"BWK": pnp.Vendor("Bitworks Inc.", "BWK", datetime.date(2003, 7, 10)),
"BMD": pnp.Vendor("Blackmagic Design", "BMD", datetime.date(2012, 9, 13)),
"BDR": pnp.Vendor("Blonder Tongue Labs, Inc.", "BDR", datetime.date(2008, 9, 16)),
"BLP": pnp.Vendor("Bloomberg L.P.", "BLP", datetime.date(2008, 9, 16)),
"ZZZ": pnp.Vendor("Boca Research Inc", "ZZZ", datetime.date(1997, 2, 13)),
"BRI": pnp.Vendor("Boca Research Inc", "BRI", datetime.date(1996, 11, 29)),
"BST": pnp.Vendor("BodySound Technologies, Inc.", "BST", datetime.date(2008, 3, 12)),
"BII": pnp.Vendor("Boeckeler Instruments Inc", "BII", datetime.date(1996, 10, 17)),
"BCS": pnp.Vendor("Booria CAD/CAM systems", "BCS", datetime.date(2005, 5, 11)),
"BOS": pnp.Vendor("BOS", "BOS", datetime.date(1997, 7, 3)),
"BSE": pnp.Vendor("Bose Corporation", "BSE", datetime.date(2006, 9, 5)),
"BNS": pnp.Vendor("Boulder Nonlinear Systems", "BNS", datetime.date(2008, 3, 12)),
"BRA": pnp.Vendor("Braemac Pty Ltd", "BRA", datetime.date(2010, 11, 18)),
"BRM": pnp.Vendor("Braemar Inc", "BRM", datetime.date(1997, 10, 7)),
"BDO": pnp.Vendor("Brahler ICS", "BDO", datetime.date(1998, 6, 4)),
"BBL": pnp.Vendor("Brain Boxes Limited", "BBL", datetime.date(2001, 10, 2)),
"BRG": pnp.Vendor("Bridge Information Co., Ltd", "BRG", datetime.date(1998, 8, 11)),
"BSN": pnp.Vendor("BRIGHTSIGN, LLC", "BSN", datetime.date(2012, 2, 28)),
"BTE": pnp.Vendor("Brilliant Technology", "BTE", datetime.date(1996, 11, 29)),
"BCI": pnp.Vendor("Broadata Communications Inc.", "BCI", datetime.date(2013, 11, 19)),
"BCM": pnp.Vendor("Broadcom", "BCM", datetime.date(2004, 4, 1)),
"BRO": pnp.Vendor("BROTHER INDUSTRIES,LTD.", "BRO", datetime.date(2000, 2, 21)),
"NFC": pnp.Vendor("BTC Korea Co., Ltd", "NFC", datetime.date(2002, 2, 25)),
"BGT": pnp.Vendor("Budzetron Inc", "BGT", datetime.date(1996, 11, 29)),
"BUL": pnp.Vendor("Bull", "BUL", datetime.date(1998, 2, 3)),
"BNE": pnp.Vendor("Bull AB", "BNE", datetime.date(1998, 10, 6)),
"BLI": pnp.Vendor("Busicom", "BLI", datetime.date(1998, 8, 11)),
"BTI": pnp.Vendor("BusTech Inc", "BTI", datetime.date(1996, 11, 29)),
"BUS": pnp.Vendor("BusTek", "BUS", datetime.date(1996, 11, 29)),
"FLY": pnp.Vendor("Butterfly Communications", "FLY", datetime.date(1997, 5, 5)),
"BXE": pnp.Vendor("Buxco Electronics", "BXE", datetime.date(1996, 11, 29)),
"BYD": pnp.Vendor("byd:sign corporation", "BYD", datetime.date(2008, 4, 10)),
"XMM": pnp.Vendor("C3PO S.L.", "XMM", datetime.date(1998, 3, 3)),
"CAC": pnp.Vendor("CA & F Elettronica", "CAC", datetime.date(1999, 5, 16)),
"CBT": pnp.Vendor("Cabletime Ltd", "CBT", datetime.date(2010, 5, 4)),
"CSI": pnp.Vendor("Cabletron System Inc", "CSI", datetime.date(1996, 11, 29)),
"CCI": pnp.Vendor("Cache", "CCI", datetime.date(1996, 11, 29)),
"CAG": pnp.Vendor("CalComp", "CAG", datetime.date(1996, 11, 29)),
"CDP": pnp.Vendor("CalComp", "CDP", datetime.date(1996, 11, 29)),
"CUK": pnp.Vendor("Calibre UK Ltd", "CUK", datetime.date(2005, 9, 15)),
"CSO": pnp.Vendor("California Institute of Technology", "CSO", datetime.date(1999, 3, 20)),
"CAM": pnp.Vendor("Cambridge Audio", "CAM", datetime.date(2008, 8, 9)),
"CED": pnp.Vendor("Cambridge Electronic Design Ltd", "CED", datetime.date(1996, 11, 29)),
"CMR": pnp.Vendor("Cambridge Research Systems Ltd", "CMR", datetime.date(2002, 4, 25)),
"CNN": pnp.Vendor("Canon Inc", "CNN", datetime.date(1996, 11, 29)),
"CAI": pnp.Vendor("Canon Inc.", "CAI", datetime.date(2001, 11, 6)),
"UBU": pnp.Vendor("Canonical Ltd.", "UBU", datetime.date(2013, 5, 24)),
"CAN": pnp.Vendor("Canopus Company Ltd", "CAN", datetime.date(1996, 11, 29)),
"CPM": pnp.Vendor("Capella Microsystems Inc.", "CPM", datetime.date(2012, 5, 9)),
"CCP": pnp.Vendor("Capetronic USA Inc", "CCP", datetime.date(1996, 11, 29)),
"DJE": pnp.Vendor("Capstone Visua lProduct Development", "DJE", datetime.date(2008, 10, 9)),
"CAR": pnp.Vendor("Cardinal Company Ltd", "CAR", datetime.date(1996, 11, 29)),
"CRD": pnp.Vendor("Cardinal Technical Inc", "CRD", datetime.date(1996, 11, 29)),
"CLX": pnp.Vendor("CardLogix", "CLX", datetime.date(2001, 3, 15)),
"CKJ": pnp.Vendor("Carina System Co., Ltd.", "CKJ", datetime.date(2010, 9, 3)),
"CZE": pnp.Vendor("Carl Zeiss AG", "CZE", datetime.date(2009, 6, 3)),
"CAS": pnp.Vendor("CASIO COMPUTER CO.,LTD", "CAS", datetime.date(1998, 10, 6)),
"CAA": pnp.Vendor("Castles Automation Co., Ltd", "CAA", datetime.date(2000, 1, 13)),
"CAV": pnp.Vendor("Cavium Networks, Inc", "CAV", datetime.date(2011, 2, 2)),
"FVX": pnp.Vendor("C-C-C Group Plc", "FVX", datetime.date(1998, 5, 4)),
"CCL": pnp.Vendor("CCL/ITRI", "CCL", datetime.date(1997, 3, 31)),
"CCC": pnp.Vendor("C-Cube Microsystems", "CCC", datetime.date(1996, 11, 29)),
"CEP": pnp.Vendor("C-DAC", "CEP", datetime.date(1996, 11, 29)),
"CBR": pnp.Vendor("Cebra Tech A/S", "CBR", datetime.date(1996, 11, 29)),
"CEF": pnp.Vendor("Cefar Digital Vision", "CEF", datetime.date(1997, 2, 19)),
"CEN": pnp.Vendor("Centurion Technologies P/L", "CEN", datetime.date(2000, 10, 23)),
"TCE": pnp.Vendor("Century Corporation", "TCE", datetime.date(1996, 11, 29)),
"CRV": pnp.Vendor("Cerevo Inc.", "CRV", datetime.date(2010, 7, 13)),
"CER": pnp.Vendor("Ceronix", "CER", datetime.date(2008, 9, 2)),
"TOM": pnp.Vendor("Ceton Corporation", "TOM", datetime.date(2014, 5, 8)),
"CHP": pnp.Vendor("CH Products", "CHP", datetime.date(1997, 4, 24)),
"CHD": pnp.Vendor("ChangHong Electric Co.,Ltd", "CHD", datetime.date(2001, 11, 30)),
"CHA": pnp.Vendor("Chase Research PLC", "CHA", datetime.date(1996, 11, 29)),
"CHY": pnp.Vendor("Cherry GmbH", "CHY", datetime.date(1999, 5, 16)),
"CMO": pnp.Vendor("Chi Mei Optoelectronics corp.", "CMO", datetime.date(2001, 3, 15)),
"CHM": pnp.Vendor("CHIC TECHNOLOGY CORP.", "CHM", datetime.date(1999, 7, 16)),
"CEC": pnp.Vendor("Chicony Electronics Company Ltd", "CEC", datetime.date(1996, 11, 29)),
"CMN": pnp.Vendor("Chimei Innolux Corporation", "CMN", datetime.date(2010, 9, 2)),
"HLG": pnp.Vendor("China Hualu Group Co., Ltd.", "HLG", datetime.date(2013, 5, 13)),
"CHL": pnp.Vendor("Chloride-R&D", "CHL", datetime.date(1996, 11, 29)),
"CDG": pnp.Vendor("Christie Digital Systems Inc", "CDG", datetime.date(2001, 4, 24)),
"CVP": pnp.Vendor("Chromatec Video Products Ltd", "CVP", datetime.date(2013, 8, 9)),
"CHI": pnp.Vendor("Chrontel Inc", "CHI", datetime.date(1996, 11, 29)),
"CHT": pnp.Vendor("Chunghwa Picture Tubes,LTD.", "CHT", datetime.date(2001, 3, 15)),
"CTE": pnp.Vendor("Chunghwa Telecom Co., Ltd.", "CTE", datetime.date(2002, 5, 16)),
"KCD": pnp.Vendor("Chunichi Denshi Co.,LTD.", "KCD", datetime.date(2010, 12, 23)),
"QQQ": pnp.Vendor("Chuomusen Co., Ltd.", "QQQ", datetime.date(2002, 8, 7)),
"CGS": pnp.Vendor("Chyron Corp", "CGS", datetime.date(2008, 11, 13)),
"CNE": pnp.Vendor("Cine-tal", "CNE", datetime.date(2007, 6, 13)),
"PTG": pnp.Vendor("Cipher Systems Inc", "PTG", datetime.date(1996, 11, 29)),
"CIP": pnp.Vendor("Ciprico Inc", "CIP", datetime.date(1996, 11, 29)),
"CPC": pnp.Vendor("Ciprico Inc", "CPC", datetime.date(1996, 11, 29)),
"FPX": pnp.Vendor("Cirel Systemes", "FPX", datetime.date(1996, 11, 29)),
"CRQ": pnp.Vendor("Cirque Corporation", "CRQ", datetime.date(1996, 11, 29)),
"CIR": pnp.Vendor("Cirrus Logic Inc", "CIR", datetime.date(1996, 11, 29)),
"CLI": pnp.Vendor("Cirrus Logic Inc", "CLI", datetime.date(1996, 11, 29)),
"SNS": pnp.Vendor("Cirtech (UK) Ltd", "SNS", datetime.date(1997, 8, 20)),
"WSC": pnp.Vendor("CIS Technology Inc", "WSC", datetime.date(1996, 11, 29)),
"CIS": pnp.Vendor("Cisco Systems Inc", "CIS", datetime.date(1996, 11, 29)),
"CIL": pnp.Vendor("Citicom Infotech Private Limited", "CIL", datetime.date(2000, 8, 10)),
"CIT": pnp.Vendor("Citifax Limited", "CIT", datetime.date(1997, 7, 16)),
"CIN": pnp.Vendor("Citron GmbH", "CIN", datetime.date(2005, 7, 28)),
"CLA": pnp.Vendor("Clarion Company Ltd", "CLA", datetime.date(1996, 11, 29)),
"CVS": pnp.Vendor("Clarity Visual Systems", "CVS", datetime.date(2000, 1, 13)),
"CLE": pnp.Vendor("Classe Audio", "CLE", datetime.date(2006, 2, 16)),
"CLV": pnp.Vendor("Clevo Company", "CLV", datetime.date(1998, 1, 30)),
"PPM": pnp.Vendor("Clinton Electronics Corp.", "PPM", datetime.date(2003, 10, 1)),
"CLO": pnp.Vendor("Clone Computers", "CLO", datetime.date(1996, 11, 29)),
"CSL": pnp.Vendor("Cloudium Systems Ltd.", "CSL", datetime.date(2013, 2, 14)),
"CMC": pnp.Vendor("CMC Ltd", "CMC", datetime.date(1996, 11, 29)),
"CMI": pnp.Vendor("C-Media Electronics", "CMI", datetime.date(1996, 11, 29)),
"JQE": pnp.Vendor("CNet Technical Inc", "JQE", datetime.date(1996, 11, 29)),
"COB": pnp.Vendor("COBY Electronics Co., Ltd", "COB", datetime.date(2007, 6, 13)),
"COD": pnp.Vendor("CODAN Pty. Ltd.", "COD", datetime.date(2000, 10, 23)),
"COI": pnp.Vendor("Codec Inc.", "COI", datetime.date(2001, 11, 30)),
"CDN": pnp.Vendor("Codenoll Technical Corporation", "CDN", datetime.date(1996, 11, 29)),
"CNT": pnp.Vendor("COINT Multimedia Systems", "CNT", datetime.date(1999, 3, 20)),
"CDE": pnp.Vendor("Colin.de", "CDE", datetime.date(2005, 1, 18)),
"CMD": pnp.Vendor("Colorado MicroDisplay, Inc.", "CMD", datetime.date(1999, 3, 20)),
"CVI": pnp.Vendor("Colorado Video, Inc.", "CVI", datetime.date(2012, 8, 15)),
"MVX": pnp.Vendor("COM 1", "MVX", datetime.date(1996, 11, 29)),
"CMX": pnp.Vendor("Comex Electronics AB", "CMX", datetime.date(2004, 5, 28)),
"CIC": pnp.Vendor("Comm. Intelligence Corporation", "CIC", datetime.date(1996, 11, 29)),
"CLD": pnp.Vendor("COMMAT L.t.d.", "CLD", datetime.date(2000, 8, 10)),
"SDH": pnp.Vendor("Communications Specialies, Inc.", "SDH", datetime.date(2005, 9, 6)),
"INX": pnp.Vendor("Communications Supply Corporation (A division of WESCO)", "INX", datetime.date(2012, 11, 7)),
"CPL": pnp.Vendor("Compal Electronics Inc", "CPL", datetime.date(1996, 11, 29)),
"CPQ": pnp.Vendor("Compaq Computer Company", "CPQ", datetime.date(1996, 11, 29)),
"CPP": pnp.Vendor("Compound Photonics", "CPP", datetime.date(2013, 10, 1)),
"CPD": pnp.Vendor("CompuAdd", "CPD", datetime.date(1996, 11, 29)),
"CMS": pnp.Vendor("CompuMaster Srl", "CMS", datetime.date(1999, 2, 22)),
"CDS": pnp.Vendor("Computer Diagnostic Systems", "CDS", datetime.date(2001, 3, 15)),
"CPI": pnp.Vendor("Computer Peripherals Inc", "CPI", datetime.date(1996, 11, 29)),
"CTP": pnp.Vendor("Computer Technology Corporation", "CTP", datetime.date(1998, 3, 26)),
"CBI": pnp.Vendor("ComputerBoards Inc", "CBI", datetime.date(1998, 2, 3)),
"CTM": pnp.Vendor("Computerm Corporation", "CTM", datetime.date(1996, 11, 29)),
"CTN": pnp.Vendor("Computone Products", "CTN", datetime.date(1996, 11, 29)),
"COX": pnp.Vendor("Comrex", "COX", datetime.date(2011, 10, 18)),
"CTS": pnp.Vendor("Comtec Systems Co., Ltd.", "CTS", datetime.date(2002, 4, 25)),
"CMM": pnp.Vendor("Comtime GmbH", "CMM", datetime.date(2002, 9, 23)),
"COM": pnp.Vendor("Comtrol Corporation", "COM", datetime.date(1996, 11, 29)),
"CDI": pnp.Vendor("Concept Development Inc", "CDI", datetime.date(1996, 11, 29)),
"CSE": pnp.Vendor("Concept Solutions & Engineering", "CSE", datetime.date(1996, 12, 11)),
"DCI": pnp.Vendor("Concepts Inc", "DCI", datetime.date(1996, 11, 29)),
"CXT": pnp.Vendor("Conexant Systems", "CXT", datetime.date(1999, 1, 20)),
"CGT": pnp.Vendor("congatec AG", "CGT", datetime.date(2011, 6, 16)),
"CNI": pnp.Vendor("Connect Int'l A/S", "CNI", datetime.date(1996, 11, 29)),
"CWR": pnp.Vendor("Connectware Inc", "CWR", datetime.date(1996, 11, 29)),
"CRC": pnp.Vendor("CONRAC GmbH", "CRC", datetime.date(2004, 4, 20)),
"CAT": pnp.Vendor("Consultancy in Advanced Technology", "CAT", datetime.date(1997, 9, 19)),
"CEA": pnp.Vendor("Consumer Electronics Association", "CEA", datetime.date(2006, 9, 5)),
"CCJ": pnp.Vendor("CONTEC CO.,LTD.", "CCJ", datetime.date(2000, 8, 10)),
"CON": pnp.Vendor("Contec Company Ltd", "CON", datetime.date(1996, 11, 29)),
"CRH": pnp.Vendor("Contemporary Research Corp.", "CRH", datetime.date(2015, 2, 24)),
"CTR": pnp.Vendor("Control4 Corporation", "CTR", datetime.date(2014, 5, 28)),
"CDD": pnp.Vendor("Convergent Data Devices", "CDD", datetime.date(2004, 2, 27)),
"CDV": pnp.Vendor("Convergent Design Inc.", "CDV", datetime.date(2006, 9, 5)),
"CDC": pnp.Vendor("Core Dynamics Corporation", "CDC", datetime.date(1996, 11, 29)),
"ART": pnp.Vendor("Corion Industrial Corporation", "ART", datetime.date(1996, 11, 29)),
"COT": pnp.Vendor("Core Technology Inc", "COT", datetime.date(2000, 4, 19)),
"CLG": pnp.Vendor("CoreLogic", "CLG", datetime.date(1998, 11, 27)),
"CRN": pnp.Vendor("Cornerstone Imaging", "CRN", datetime.date(1996, 11, 29)),
"COR": pnp.Vendor("Corollary Inc", "COR", datetime.date(1996, 12, 13)),
"CSM": pnp.Vendor("Cosmic Engineering Inc.", "CSM", datetime.date(2012, 4, 18)),
"COS": pnp.Vendor("CoStar Corporation", "COS", datetime.date(1996, 11, 29)),
"CTA": pnp.Vendor("CoSystems Inc", "CTA", datetime.date(1998, 10, 24)),
"CVA": pnp.Vendor("Covia Inc.", "CVA", datetime.date(2010, 5, 11)),
"CPT": pnp.Vendor("cPATH", "CPT", datetime.date(1998, 3, 9)),
"CRA": pnp.Vendor("CRALTECH ELECTRONICA, S.L.", "CRA", datetime.date(2015, 3, 24)),
"CDK": pnp.Vendor("Cray Communications", "CDK", datetime.date(1996, 11, 29)),
"IOA": pnp.Vendor("CRE Technology Corporation", "IOA", datetime.date(1997, 6, 30)),
"CRE": pnp.Vendor("Creative Labs Inc", "CRE", datetime.date(1996, 11, 29)),
"CRL": pnp.Vendor("Creative Logic", "CRL", datetime.date(1997, 10, 16)),
"CTL": pnp.Vendor("Creative Technology Ltd", "CTL", datetime.date(1996, 11, 29)),
"CTX": pnp.Vendor("Creatix Polymedia GmbH", "CTX", datetime.date(1996, 11, 29)),
"CRS": pnp.Vendor("Crescendo Communication Inc", "CRS", datetime.date(1996, 11, 29)),
"CSD": pnp.Vendor("Cresta Systems Inc", "CSD", datetime.date(1997, 8, 1)),
"CEI": pnp.Vendor("Crestron Electronics, Inc.", "CEI", datetime.date(2006, 5, 8)),
"CRI": pnp.Vendor("Crio Inc.", "CRI", datetime.date(1999, 9, 13)),
"CII": pnp.Vendor("Cromack Industries Inc", "CII", datetime.date(1997, 1, 22)),
"XTL": pnp.Vendor("Crystal Computer", "XTL", datetime.date(1996, 11, 29)),
"CSC": pnp.Vendor("Crystal Semiconductor", "CSC", datetime.date(1996, 11, 29)),
"CLM": pnp.Vendor("CrystaLake Multimedia", "CLM", datetime.date(1996, 11, 29)),
"CSS": pnp.Vendor("CSS Laboratories", "CSS", datetime.date(1997, 1, 2)),
"CST": pnp.Vendor("CSTI Inc", "CST", datetime.date(1996, 11, 29)),
"CTC": pnp.Vendor("CTC Communication Development Company Ltd", "CTC", datetime.date(1997, 10, 21)),
"CUB": pnp.Vendor("Cubix Corporation", "CUB", datetime.date(1996, 11, 29)),
"CWC": pnp.Vendor("Curtiss-Wright Controls, Inc.", "CWC", datetime.date(2013, 4, 5)),
"CYL": pnp.Vendor("Cyberlabs", "CYL", datetime.date(1998, 4, 14)),
"CYB": pnp.Vendor("CyberVision", "CYB", datetime.date(1997, 5, 13)),
"CYW": pnp.Vendor("Cyberware", "CYW", datetime.date(2000, 2, 21)),
"CBX": pnp.Vendor("Cybex Computer Products Corporation", "CBX", datetime.date(1999, 11, 8)),
"CYD": pnp.Vendor("Cyclades Corporation", "CYD", datetime.date(2001, 5, 7)),
"CYC": pnp.Vendor("Cylink Corporation", "CYC", datetime.date(1996, 11, 29)),
"CYX": pnp.Vendor("Cyrix Corporation", "CYX", datetime.date(1997, 10, 21)),
"CRX": pnp.Vendor("Cyrix Corporation", "CRX", datetime.date(1997, 3, 21)),
"CYT": pnp.Vendor("Cytechinfo Inc", "CYT", datetime.date(1998, 3, 13)),
"CYV": pnp.Vendor("Cyviz AS", "CYV", datetime.date(2002, 4, 25)),
"DMP": pnp.Vendor("D&M Holdings Inc, Professional Business Company", "DMP", datetime.date(2006, 9, 5)),
"OPI": pnp.Vendor("D.N.S. Corporation", "OPI", datetime.date(1996, 11, 29)),
"DDA": pnp.Vendor("DA2 Technologies Corporation", "DDA", datetime.date(2006, 3, 13)),
"DAW": pnp.Vendor("DA2 Technologies Inc", "DAW", datetime.date(2005, 9, 6)),
"DWE": pnp.Vendor("Daewoo Electronics Company Ltd", "DWE", datetime.date(1996, 11, 29)),
"TLT": pnp.Vendor("Dai Telecom S.p.A.", "TLT", datetime.date(2003, 6, 4)),
"DIN": pnp.Vendor("Daintelecom Co., Ltd", "DIN", datetime.date(1999, 11, 8)),
"DAI": pnp.Vendor("DAIS SET Ltd.", "DAI", datetime.date(2000, 2, 21)),
"DAK": pnp.Vendor("Daktronics", "DAK", datetime.date(2004, 6, 23)),
"DCC": pnp.Vendor("Dale Computer Corporation", "DCC", datetime.date(1996, 11, 29)),
"DCT": pnp.Vendor("Dancall Telecom A/S", "DCT", datetime.date(1997, 8, 12)),
"DAN": pnp.Vendor("Danelec Marine A/S", "DAN", datetime.date(2009, 12, 24)),
"DDD": pnp.Vendor("Danka Data Devices", "DDD", datetime.date(1996, 11, 29)),
"DAU": pnp.Vendor("Daou Tech Inc", "DAU", datetime.date(1996, 11, 29)),
"HCA": pnp.Vendor("DAT", "HCA", datetime.date(2001, 3, 15)),
"DAX": pnp.Vendor("Data Apex Ltd", "DAX", datetime.date(1996, 11, 29)),
"DDI": pnp.Vendor("Data Display AG", "DDI", datetime.date(2002, 7, 17)),
"DXP": pnp.Vendor("Data Expert Corporation", "DXP", datetime.date(1996, 11, 29)),
"EXP": pnp.Vendor("Data Export Corporation", "EXP", datetime.date(1996, 11, 29)),
"DMO": pnp.Vendor("Data Modul AG", "DMO", datetime.date(2013, 12, 3)),
"EBH": pnp.Vendor("Data Price Informatica", "EBH", datetime.date(2001, 5, 24)),
"DRI": pnp.Vendor("Data Race Inc", "DRI", datetime.date(1997, 7, 30)),
"DRC": pnp.Vendor("Data Ray Corp.", "DRC", datetime.date(2001, 11, 30)),
"DTX": pnp.Vendor("Data Translation", "DTX", datetime.date(1996, 11, 29)),
"DVT": pnp.Vendor("Data Video", "DVT", datetime.date(2007, 2, 13)),
"DBK": pnp.Vendor("Databook Inc", "DBK", datetime.date(1996, 11, 29)),
"DCD": pnp.Vendor("Datacast LLC", "DCD", datetime.date(1997, 12, 2)),
"TRN": pnp.Vendor("Datacommunicatie Tron B.V.", "TRN", datetime.date(1996, 11, 29)),
"DQB": pnp.Vendor("Datacube Inc", "DQB", datetime.date(1996, 11, 29)),
"DDT": pnp.Vendor("Datadesk Technologies Inc", "DDT", datetime.date(1998, 11, 27)),
"DKY": pnp.Vendor("Datakey Inc", "DKY", datetime.date(1998, 4, 6)),
"LJX": pnp.Vendor("Datalogic Corporation", "LJX", datetime.date(1996, 11, 29)),
"DTN": pnp.Vendor("Datang Telephone Co", "DTN", datetime.date(1998, 9, 23)),
"DII": pnp.Vendor("Dataq Instruments Inc", "DII", datetime.date(1996, 11, 29)),
"DDE": pnp.Vendor("Datasat Digital Entertainment", "DDE", datetime.date(2011, 11, 18)),
"DCV": pnp.Vendor("Datatronics Technology Inc", "DCV", datetime.date(1997, 1, 2)),
"DAT": pnp.Vendor("Datel Inc", "DAT", datetime.date(1996, 11, 29)),
"MSD": pnp.Vendor("Datenerfassungs- und Informationssysteme", "MSD", datetime.date(1998, 3, 16)),
"DAV": pnp.Vendor("Davicom Semiconductor Inc", "DAV", datetime.date(1997, 1, 15)),
"DAS": pnp.Vendor("DAVIS AS", "DAS", datetime.date(1998, 2, 3)),
"DBN": pnp.Vendor("DB Networks Inc", "DBN", datetime.date(1997, 12, 1)),
"HWC": pnp.Vendor("DBA Hans Wedemeyer", "HWC", datetime.date(1999, 3, 20)),
"DCM": pnp.Vendor("DCM Data Products", "DCM", datetime.date(1996, 11, 29)),
"DGT": pnp.Vendor("Dearborn Group Technology", "DGT", datetime.date(1997, 11, 11)),
"DXD": pnp.Vendor("DECIMATOR DESIGN PTY LTD", "DXD", datetime.date(2012, 3, 6)),
"DCR": pnp.Vendor("Decros Ltd", "DCR", datetime.date(1996, 11, 29)),
"MLD": pnp.Vendor("Deep Video Imaging Ltd", "MLD", datetime.date(2003, 8, 14)),
"DFT": pnp.Vendor("DEI Holdings dba Definitive Technology", "DFT", datetime.date(2011, 12, 9)),
"DEI": pnp.Vendor("Deico Electronics", "DEI", datetime.date(1996, 11, 29)),
"DLL": pnp.Vendor("Dell Inc", "DLL", datetime.date(2009, 3, 27)),
"DEL": pnp.Vendor("Dell Inc.", "DEL", datetime.date(2009, 12, 9)),
"DPH": pnp.Vendor("Delphi Automotive LLP", "DPH", datetime.date(2013, 10, 15)),
"DPC": pnp.Vendor("Delta Electronics Inc", "DPC", datetime.date(1996, 11, 29)),
"DDV": pnp.Vendor("Delta Information Systems, Inc", "DDV", datetime.date(2012, 1, 3)),
"DTA": pnp.Vendor("DELTATEC", "DTA", datetime.date(2009, 3, 13)),
"FPS": pnp.Vendor("Deltec Corporation", "FPS", datetime.date(1996, 11, 29)),
"DON": pnp.Vendor("DENON, Ltd.", "DON", datetime.date(2004, 4, 1)),
"DHD": pnp.Vendor("Dension Audio Systems", "DHD", datetime.date(2013, 3, 4)),
"DEN": pnp.Vendor("Densitron Computers Ltd", "DEN", datetime.date(1999, 9, 13)),
"DTT": pnp.Vendor("Design & Test Technology, Inc.", "DTT", datetime.date(2010, 9, 30)),
"LPI": pnp.Vendor("Design Technology", "LPI", datetime.date(1996, 11, 29)),
"DNI": pnp.Vendor("Deterministic Networks Inc.", "DNI", datetime.date(2000, 4, 19)),
"BCQ": pnp.Vendor("Deutsche Telekom Berkom GmbH", "BCQ", datetime.date(1997, 8, 12)),
"DTO": pnp.Vendor("Deutsche Thomson OHG", "DTO", datetime.date(2007, 6, 14)),
"DVL": pnp.Vendor("Devolo AG", "DVL", datetime.date(2002, 5, 30)),
"DXL": pnp.Vendor("Dextera Labs Inc", "DXL", datetime.date(2009, 12, 9)),
"DFI": pnp.Vendor("DFI", "DFI", datetime.date(1996, 11, 29)),
"DHP": pnp.Vendor("DH Print", "DHP", datetime.date(1996, 11, 29)),
"DIA": pnp.Vendor("Diadem", "DIA", datetime.date(1996, 11, 29)),
"DGS": pnp.Vendor("Diagsoft Inc", "DGS", datetime.date(1996, 11, 29)),
"DCO": pnp.Vendor("Dialogue Technology Corporation", "DCO", datetime.date(2004, 6, 16)),
"DCS": pnp.Vendor("Diamond Computer Systems Inc", "DCS", datetime.date(1996, 11, 29)),
"DLC": pnp.Vendor("Diamond Lane Comm. Corporation", "DLC", datetime.date(1996, 11, 29)),
"DNV": pnp.Vendor("DiCon", "DNV", datetime.date(2004, 12, 15)),
"DVD": pnp.Vendor("Dictaphone Corporation", "DVD", datetime.date(1998, 4, 3)),
"DBD": pnp.Vendor("Diebold Inc.", "DBD", datetime.date(2006, 9, 5)),
"DAE": pnp.Vendor("Digatron Industrie Elektronik GmbH", "DAE", datetime.date(1997, 2, 24)),
"DGI": pnp.Vendor("DIGI International", "DGI", datetime.date(1996, 11, 29)),
"DBI": pnp.Vendor("DigiBoard Inc", "DBI", datetime.date(1996, 11, 29)),
"DIG": pnp.Vendor("Digicom S.p.A.", "DIG", datetime.date(1996, 11, 29)),
"DMB": pnp.Vendor("Digicom Systems Inc", "DMB", datetime.date(1998, 3, 13)),
"DGP": pnp.Vendor("Digicorp European sales S.A.", "DGP", datetime.date(1997, 5, 22)),
"DGA": pnp.Vendor("Digiital Arts Inc", "DGA", datetime.date(2007, 6, 14)),
"DXC": pnp.Vendor("Digipronix Control Systems", "DXC", datetime.date(1999, 7, 16)),
"DAC": pnp.Vendor("Digital Acoustics Corporation", "DAC", datetime.date(2000, 5, 24)),
"DAL": pnp.Vendor("Digital Audio Labs Inc", "DAL", datetime.date(1996, 11, 29)),
"DCA": pnp.Vendor("Digital Communications Association", "DCA", datetime.date(1996, 11, 29)),
"SHR": pnp.Vendor("Digital Discovery", "SHR", datetime.date(1997, 9, 24)),
"PRF": pnp.Vendor("Schneider Electric Japan Holdings, Ltd.", "PRF", datetime.date(2003, 1, 2)),
"DEC": pnp.Vendor("Digital Equipment Corporation", "DEC", datetime.date(1996, 11, 29)),
"DPS": pnp.Vendor("Digital Processing Systems", "DPS", datetime.date(1996, 11, 29)),
"DPL": pnp.Vendor("Digital Projection Limited", "DPL", datetime.date(2002, 7, 9)),
"DRD": pnp.Vendor("DIGITAL REFLECTION INC.", "DRD", datetime.date(2000, 2, 21)),
"DVS": pnp.Vendor("Digital Video System", "DVS", datetime.date(1996, 11, 29)),
"DPA": pnp.Vendor("DigiTalk Pro AV", "DPA", datetime.date(2000, 10, 23)),
"DLG": pnp.Vendor("Digital-Logic GmbH", "DLG", datetime.date(2003, 9, 2)),
"DSI": pnp.Vendor("Digitan Systems Inc", "DSI", datetime.date(1996, 11, 29)),
"DLT": pnp.Vendor("Digitelec Informatique Park Cadera", "DLT", datetime.date(1996, 11, 29)),
"DTE": pnp.Vendor("Dimension Technologies, Inc.", "DTE", datetime.date(2010, 5, 3)),
"DMM": pnp.Vendor("Dimond Multimedia Systems Inc", "DMM", datetime.date(1996, 11, 29)),
"DIS": pnp.Vendor("Diseda S.A.", "DIS", datetime.date(1996, 11, 29)),
"DMT": pnp.Vendor("Distributed Management Task Force, Inc. (DMTF)", "DMT", datetime.date(2009, 3, 31)),
"DTI": pnp.Vendor("Diversified Technology, Inc.", "DTI", datetime.date(1996, 11, 29)),
"ABO": pnp.Vendor("D-Link Systems Inc", "ABO", datetime.date(1996, 11, 29)),
"DLK": pnp.Vendor("D-Link Systems Inc", "DLK", datetime.date(1996, 11, 29)),
"DNA": pnp.Vendor("DNA Enterprises, Inc.", "DNA", datetime.date(1998, 9, 1)),
"AUO": pnp.Vendor("DO NOT USE - AUO", "AUO", datetime.date(2008, 9, 16)),
"LPL": pnp.Vendor("DO NOT USE - LPL", "LPL", datetime.date(2008, 9, 16)),
"PHI": pnp.Vendor("DO NOT USE - PHI", "PHI", datetime.date(1996, 11, 29)),
"PTW": pnp.Vendor("DO NOT USE - PTW", "PTW", datetime.date(2009, 9, 9)),
"PVC": pnp.Vendor("DO NOT USE - PVC", "PVC", datetime.date(2009, 9, 9)),
"RTK": pnp.Vendor("DO NOT USE - RTK", "RTK", datetime.date(2009, 9, 9)),
"SEG": pnp.Vendor("DO NOT USE - SEG", "SEG", datetime.date(2009, 9, 9)),
"TNJ": pnp.Vendor("DO NOT USE - TNJ", "TNJ", datetime.date(2009, 9, 9)),
"UND": pnp.Vendor("DO NOT USE - UND", "UND", datetime.date(1996, 11, 29)),
"UNE": pnp.Vendor("DO NOT USE - UNE", "UNE", datetime.date(1996, 11, 29)),
"UNF": pnp.Vendor("DO NOT USE - UNF", "UNF", datetime.date(1996, 11, 29)),
"WAN": pnp.Vendor("DO NOT USE - WAN", "WAN", datetime.date(2009, 9, 9)),
"XER": pnp.Vendor("DO NOT USE - XER", "XER", datetime.date(2009, 9, 9)),
"XOC": pnp.Vendor("DO NOT USE - XOC", "XOC", datetime.date(2009, 9, 9)),
"DBL": pnp.Vendor("Doble Engineering Company", "DBL", datetime.date(1996, 11, 29)),
"DPI": pnp.Vendor("DocuPoint", "DPI", datetime.date(1996, 11, 29)),
"DLB": pnp.Vendor("Dolby Laboratories Inc.", "DLB", datetime.date(2010, 1, 27)),
"DOL": pnp.Vendor("Dolman Technologies Group Inc", "DOL", datetime.date(1997, 11, 11)),
"DSP": pnp.Vendor("Domain Technology Inc", "DSP", datetime.date(1996, 11, 29)),
"DMS": pnp.Vendor("DOME imaging systems", "DMS", datetime.date(2000, 10, 23)),
"DOM": pnp.Vendor("Dome Imaging Systems", "DOM", datetime.date(1996, 11, 29)),
"AIK": pnp.Vendor("Dongguan Alllike Electronics Co., Ltd.", "AIK", datetime.date(2015, 4, 11)),
"DUA": pnp.Vendor("Dosch & Amand GmbH & Company KG", "DUA", datetime.date(1997, 12, 2)),
"DOT": pnp.Vendor("Dotronic Mikroelektronik GmbH", "DOT", datetime.date(2002, 6, 28)),
"DIM": pnp.Vendor("dPict Imaging, Inc.", "DIM", datetime.date(2008, 2, 12)),
"DPX": pnp.Vendor("DpiX, Inc.", "DPX", datetime.date(1998, 9, 23)),
"DPT": pnp.Vendor("DPT", "DPT", datetime.date(1996, 11, 29)),
"DRB": pnp.Vendor("Dr. Bott KG", "DRB", datetime.date(2002, 4, 25)),
"DNT": pnp.Vendor("Dr. Neuhous Telekommunikation GmbH", "DNT", datetime.date(1996, 11, 29)),
"DIT": pnp.Vendor("Dragon Information Technology", "DIT", datetime.date(1996, 11, 29)),
"DRS": pnp.Vendor("DRS Defense Solutions, LLC", "DRS", datetime.date(2011, 10, 18)),
"DSD": pnp.Vendor("DS Multimedia Pte Ltd", "DSD", datetime.date(2006, 2, 14)),
"DSM": pnp.Vendor("DSM Digital Services GmbH", "DSM", datetime.date(1996, 11, 29)),
"DCE": pnp.Vendor("dSPACE GmbH", "DCE", datetime.date(1996, 12, 16)),
"DTC": pnp.Vendor("DTC Tech Corporation", "DTC", datetime.date(1996, 11, 29)),
"DGK": pnp.Vendor("DugoTech Co., LTD", "DGK", datetime.date(2007, 6, 14)),
"DMC": pnp.Vendor("Dune Microsystems Corporation", "DMC", datetime.date(1996, 11, 29)),
"DYC": pnp.Vendor("Dycam Inc", "DYC", datetime.date(1998, 1, 8)),
"DYM": pnp.Vendor("Dymo-CoStar Corporation", "DYM", datetime.date(1998, 12, 28)),
"DCL": pnp.Vendor("Dynamic Controls Ltd", "DCL", datetime.date(2000, 5, 24)),
"DTK": pnp.Vendor("Dynax Electronics (HK) Ltd", "DTK", datetime.date(1996, 11, 29)),
"DYX": pnp.Vendor("Dynax Electronics (HK) Ltd", "DYX", datetime.date(1996, 11, 29)),
"EDC": pnp.Vendor("e.Digital Corporation", "EDC", datetime.date(2000, 10, 23)),
"EEP": pnp.Vendor("E.E.P.D. GmbH", "EEP", datetime.date(2007, 6, 14)),
"EGL": pnp.Vendor("Eagle Technology", "EGL", datetime.date(1996, 11, 29)),
"KOD": pnp.Vendor("Eastman Kodak Company", "KOD", datetime.date(2000, 5, 24)),
"EKC": pnp.Vendor("Eastman Kodak Company", "EKC", datetime.date(1996, 11, 29)),
"TWI": pnp.Vendor("Easytel oy", "TWI", datetime.date(1999, 7, 16)),
"EBS": pnp.Vendor("EBS Euchner Büro- und Schulsysteme GmbH", "EBS", datetime.date(2013, 2, 5)),
"ECO": pnp.Vendor("Echo Speech Corporation", "ECO", datetime.date(1996, 11, 29)),
"ETI": pnp.Vendor("Eclipse Tech Inc", "ETI", datetime.date(1996, 11, 29)),
"ECM": pnp.Vendor("E-Cmos Tech Corporation", "ECM", datetime.date(1996, 11, 29)),
"ESC": pnp.Vendor("Eden Sistemas de Computacao S/A", "ESC", datetime.date(1996, 11, 29)),
"EDI": pnp.Vendor("Edimax Tech. Company Ltd", "EDI", datetime.date(1996, 11, 29)),
"EDM": pnp.Vendor("EDMI", "EDM", datetime.date(1998, 7, 16)),
"ELI": pnp.Vendor("Edsun Laboratories", "ELI", datetime.date(1996, 11, 29)),
"EES": pnp.Vendor("EE Solutions, Inc.", "EES", datetime.date(2003, 4, 16)),
"EEH": pnp.Vendor("EEH Datalink GmbH", "EEH", datetime.date(1997, 7, 3)),
"ENI": pnp.Vendor("Efficient Networks", "ENI", datetime.date(1996, 11, 29)),
"EGN": pnp.Vendor("Egenera, Inc.", "EGN", datetime.date(2002, 10, 8)),
"EIC": pnp.Vendor("Eicon Technology Corporation", "EIC", datetime.date(1996, 11, 29)),
"EGD": pnp.Vendor("EIZO GmbH Display Technologies", "EGD", datetime.date(2009, 2, 13)),
"ENC": pnp.Vendor("Eizo Nanao Corporation", "ENC", datetime.date(1998, 12, 28)),
"EKS": pnp.Vendor("EKSEN YAZILIM", "EKS", datetime.date(2002, 4, 25)),
"ELA": pnp.Vendor("ELAD srl", "ELA", datetime.date(2002, 4, 25)),
"ETD": pnp.Vendor("ELAN MICROELECTRONICS CORPORATION", "ETD", datetime.date(2009, 11, 3)),
"TSH": pnp.Vendor("ELAN MICROELECTRONICS CORPORATION", "TSH", datetime.date(2014, 11, 14)),
"ESA": pnp.Vendor("Elbit Systems of America", "ESA", datetime.date(2009, 6, 15)),
"ESG": pnp.Vendor("ELCON Systemtechnik GmbH", "ESG", datetime.date(1999, 7, 16)),
"LXS": pnp.Vendor("ELEA CardWare", "LXS", datetime.date(1998, 6, 25)),
"ECP": pnp.Vendor("Elecom Company Ltd", "ECP", datetime.date(1996, 11, 29)),
"ELE": pnp.Vendor("Elecom Company Ltd", "ELE", datetime.date(1996, 11, 29)),
"ECA": pnp.Vendor("Electro Cam Corp.", "ECA", datetime.date(2000, 8, 10)),
"ELC": pnp.Vendor("Electro Scientific Ind", "ELC", datetime.date(1996, 11, 29)),
"MMM": pnp.Vendor("Electronic Measurements", "MMM", datetime.date(1996, 11, 29)),
"ETS": pnp.Vendor("Electronic Trade Solutions Ltd", "ETS", datetime.date(2002, 8, 20)),
"EDG": pnp.Vendor("Electronic-Design GmbH", "EDG", datetime.date(1997, 8, 12)),
"ELL": pnp.Vendor("Electrosonic Ltd", "ELL", datetime.date(1999, 9, 13)),
"ELT": pnp.Vendor("Element Labs, Inc.", "ELT", datetime.date(2007, 10, 11)),
"EGA": pnp.Vendor("Elgato Systems LLC", "EGA", datetime.date(2011, 2, 8)),
"ECS": pnp.Vendor("Elitegroup Computer Systems Company Ltd", "ECS", datetime.date(1996, 11, 29)),
"UEG": pnp.Vendor("Elitegroup Computer Systems Company Ltd", "UEG", datetime.date(1996, 11, 29)),
"ELG": pnp.Vendor("Elmeg GmbH Kommunikationstechnik", "ELG", datetime.date(1996, 11, 29)),
"ELM": pnp.Vendor("Elmic Systems Inc", "ELM", datetime.date(1996, 11, 29)),
"EMO": pnp.Vendor("ELMO COMPANY, LIMITED", "EMO", datetime.date(2012, 6, 26)),
"ELO": pnp.Vendor("Elo TouchSystems Inc", "ELO", datetime.date(1996, 11, 29)),
"ELX": pnp.Vendor("Elonex PLC", "ELX", datetime.date(1996, 11, 29)),
"LPE": pnp.Vendor("El-PUSK Co., Ltd.", "LPE", datetime.date(2001, 8, 14)),
"ELS": pnp.Vendor("ELSA GmbH", "ELS", datetime.date(1996, 11, 29)),
"EAG": pnp.Vendor("ELTEC Elektronik AG", "EAG", datetime.date(2014, 11, 25)),
"EMB": pnp.Vendor("Embedded computing inc ltd", "EMB", datetime.date(2002, 2, 25)),
"EST": pnp.Vendor("Embedded Solution Technology", "EST", datetime.date(2000, 5, 24)),
"EMD": pnp.Vendor("Embrionix Design Inc.", "EMD", datetime.date(2013, 7, 24)),
"EMK": pnp.Vendor("Emcore Corporation", "EMK", datetime.date(2012, 5, 31)),
"EDT": pnp.Vendor("Emerging Display Technologies Corp", "EDT", datetime.date(2009, 8, 18)),
"EMG": pnp.Vendor("EMG Consultants Inc", "EMG", datetime.date(1996, 11, 29)),
"EME": pnp.Vendor("EMiNE TECHNOLOGY COMPANY, LTD.", "EME", datetime.date(2005, 6, 16)),
"EPC": pnp.Vendor("Empac", "EPC", datetime.date(1996, 12, 4)),
"EMU": pnp.Vendor("Emulex Corporation", "EMU", datetime.date(1996, 11, 29)),
"ECI": pnp.Vendor("Enciris Technologies", "ECI", datetime.date(2008, 11, 1)),
"ECT": pnp.Vendor("Enciris Technologies", "ECT", datetime.date(2008, 11, 1)),
"ENE": pnp.Vendor("ENE Technology Inc.", "ENE", datetime.date(2001, 3, 15)),
"DTL": pnp.Vendor("e-Net Inc", "DTL", datetime.date(1997, 10, 16)),
"EHN": pnp.Vendor("Enhansoft", "EHN", datetime.date(2010, 11, 16)),
"END": pnp.Vendor("ENIDAN Technologies Ltd", "END", datetime.date(2000, 4, 19)),
"ESD": pnp.Vendor("Ensemble Designs, Inc", "ESD", datetime.date(2009, 12, 9)),
"ENS": pnp.Vendor("Ensoniq Corporation", "ENS", datetime.date(1996, 11, 29)),
"ENT": pnp.Vendor("Enterprise Comm. & Computing Inc", "ENT", datetime.date(1996, 11, 29)),
"EPI": pnp.Vendor("Envision Peripherals, Inc", "EPI", datetime.date(1999, 2, 22)),
"EON": pnp.Vendor("Eon Instrumentation, Inc.", "EON", datetime.date(2015, 1, 15)),
"EPN": pnp.Vendor("EPiCON Inc.", "EPN", datetime.date(1998, 9, 23)),
"EPH": pnp.Vendor("Epiphan Systems Inc.", "EPH", datetime.date(2011, 3, 14)),
"EHJ": pnp.Vendor("Epson Research", "EHJ", datetime.date(1996, 11, 29)),
"EQX": pnp.Vendor("Equinox Systems Inc", "EQX", datetime.date(1996, 11, 29)),
"EQP": pnp.Vendor("Equipe Electronics Ltd.", "EQP", datetime.date(2005, 7, 14)),
"EGO": pnp.Vendor("Ergo Electronics", "EGO", datetime.date(1996, 11, 29)),
"ERG": pnp.Vendor("Ergo System", "ERG", datetime.date(1996, 11, 29)),
"ERI": pnp.Vendor("Ericsson Mobile Communications AB", "ERI", datetime.date(1997, 10, 22)),
"EUT": pnp.Vendor("Ericsson Mobile Networks B.V.", "EUT", datetime.date(1998, 4, 14)),
"ERN": pnp.Vendor("Ericsson, Inc.", "ERN", datetime.date(1998, 9, 23)),
"ESK": pnp.Vendor("ES&S", "ESK", datetime.date(1999, 11, 8)),
"ESN": pnp.Vendor("eSATURNUS", "ESN", datetime.date(2012, 2, 21)),
"ERT": pnp.Vendor("Escort Insturments Corporation", "ERT", datetime.date(1997, 5, 2)),
"ESS": pnp.Vendor("ESS Technology Inc", "ESS", datetime.date(1996, 11, 29)),
"ECC": pnp.Vendor("ESSential Comm. Corporation", "ECC", datetime.date(1996, 11, 29)),
"ESL": pnp.Vendor("Esterline Technologies", "ESL", datetime.date(2012, 1, 6)),
"ESB": pnp.Vendor("Esterline Belgium BVBA", "ESB", datetime.date(2015, 1, 15)),
"ESY": pnp.Vendor("E-Systems Inc", "ESY", datetime.date(1996, 11, 29)),
"EEE": pnp.Vendor("ET&T Technology Company Ltd", "EEE", datetime.date(1998, 5, 4)),
"ETT": pnp.Vendor("E-Tech Inc", "ETT", datetime.date(1996, 11, 29)),
"ETK": pnp.Vendor("eTEK Labs Inc.", "ETK", datetime.date(1998, 7, 16)),
"ETH": pnp.Vendor("Etherboot Project", "ETH", datetime.date(2010, 7, 9)),
"ECK": pnp.Vendor("Eugene Chukhlomin Sole Proprietorship, d.b.a.", "ECK", datetime.date(2008, 5, 3)),
"ERP": pnp.Vendor("Euraplan GmbH", "ERP", datetime.date(1996, 11, 29)),
"EAS": pnp.Vendor("Evans and Sutherland Computer", "EAS", datetime.date(2003, 1, 28)),
"EVX": pnp.Vendor("Everex", "EVX", datetime.date(1996, 11, 29)),
"ETC": pnp.Vendor("Everton Technology Company Ltd", "ETC", datetime.date(1997, 4, 10)),
"ETL": pnp.Vendor("Evertz Microsystems Ltd.", "ETL", datetime.date(2007, 6, 14)),
"EVI": pnp.Vendor("eviateg GmbH", "EVI", datetime.date(2000, 2, 21)),
"EMI": pnp.Vendor("Ex Machina Inc", "EMI", datetime.date(1996, 11, 29)),
"YHW": pnp.Vendor("Exacom SA", "YHW", datetime.date(1996, 11, 29)),
"EXT": pnp.Vendor("Exatech Computadores & Servicos Ltda", "EXT", datetime.date(1998, 9, 23)),
"ECL": pnp.Vendor("Excel Company Ltd", "ECL", datetime.date(1997, 5, 27)),
"EXC": pnp.Vendor("Excession Audio", "EXC", datetime.date(1998, 11, 6)),
"XFO": pnp.Vendor("EXFO Electro Optical Engineering", "XFO", datetime.date(1998, 4, 29)),
"EXI": pnp.Vendor("Exide Electronics", "EXI", datetime.date(1996, 11, 29)),
"ESI": pnp.Vendor("Extended Systems, Inc.", "ESI", datetime.date(1999, 7, 16)),
"EXY": pnp.Vendor("Exterity Ltd", "EXY", datetime.date(2009, 2, 12)),
"CRO": pnp.Vendor("Extraordinary Technologies PTY Limited", "CRO", datetime.date(2005, 4, 11)),
"EXX": pnp.Vendor("Exxact GmbH", "EXX", datetime.date(1996, 11, 29)),
"EYF": pnp.Vendor("eyefactive Gmbh", "EYF", datetime.date(2015, 7, 7)),
"EYE": pnp.Vendor("eyevis GmbH", "EYE", datetime.date(2011, 11, 18)),
"EZE": pnp.Vendor("EzE Technologies", "EZE", datetime.date(2005, 2, 21)),
"FJT": pnp.Vendor("F.J. Tieman BV", "FJT", datetime.date(1998, 6, 25)),
"FFI": pnp.Vendor("Fairfield Industries", "FFI", datetime.date(1996, 11, 29)),
"FAN": pnp.Vendor("Fantalooks Co., Ltd.", "FAN", datetime.date(2014, 3, 12)),
"FNC": pnp.Vendor("Fanuc LTD", "FNC", datetime.date(1997, 1, 29)),
"FAR": pnp.Vendor("Farallon Computing", "FAR", datetime.date(1996, 11, 29)),
"FRO": pnp.Vendor("FARO Technologies", "FRO", datetime.date(2012, 9, 21)),
"FLI": pnp.Vendor("Faroudja Laboratories", "FLI", datetime.date(2004, 6, 2)),
"FMA": pnp.Vendor("Fast Multimedia AG", "FMA", datetime.date(1996, 11, 29)),
"FTI": pnp.Vendor("FastPoint Technologies, Inc.", "FTI", datetime.date(2001, 6, 21)),
"FIT": pnp.Vendor("Feature Integration Technology Inc.", "FIT", datetime.date(2009, 8, 11)),
"FEL": pnp.Vendor("Fellowes & Questec", "FEL", datetime.date(1996, 11, 29)),
"FMI": pnp.Vendor("Fellowes, Inc.", "FMI", datetime.date(2001, 7, 5)),
"FEN": pnp.Vendor("Fen Systems Ltd.", "FEN", datetime.date(2010, 5, 4)),
"FER": pnp.Vendor("Ferranti Int'L", "FER", datetime.date(1996, 11, 29)),
"TLA": pnp.Vendor("Ferrari Electronic GmbH", "TLA", datetime.date(1996, 12, 4)),
"FHL": pnp.Vendor("FHLP", "FHL", datetime.date(1996, 11, 29)),
"FRI": pnp.Vendor("Fibernet Research Inc", "FRI", datetime.date(1996, 11, 29)),
"FIN": pnp.Vendor("Finecom Co., Ltd.", "FIN", datetime.date(1998, 11, 27)),
"FPC": pnp.Vendor("Fingerprint Cards AB", "FPC", datetime.date(2013, 6, 14)),
"PCG": pnp.Vendor("First Industrial Computer Inc", "PCG", datetime.date(1996, 11, 29)),
"LEO": pnp.Vendor("First International Computer Inc", "LEO", datetime.date(1997, 9, 19)),
"FCG": pnp.Vendor("First International Computer Ltd", "FCG", datetime.date(1997, 4, 10)),
"FVC": pnp.Vendor("First Virtual Corporation", "FVC", datetime.date(1996, 11, 29)),
"FWR": pnp.Vendor("Flat Connections Inc", "FWR", datetime.date(1996, 11, 29)),
"SSD": pnp.Vendor("FlightSafety International", "SSD", datetime.date(2000, 8, 10)),
"FIS": pnp.Vendor("FLY-IT Simulators", "FIS", datetime.date(1997, 9, 8)),
"FTS": pnp.Vendor("FocalTech Systems Co., Ltd.", "FTS", datetime.date(2013, 7, 23)),
"FCS": pnp.Vendor("Focus Enhancements, Inc.", "FCS", datetime.date(2002, 12, 12)),
"FOK": pnp.Vendor("Fokus Technologies GmbH", "FOK", datetime.date(2013, 10, 22)),
"FOA": pnp.Vendor("FOR-A Company Limited", "FOA", datetime.date(2008, 12, 6)),
"FRC": pnp.Vendor("Force Computers", "FRC", datetime.date(1996, 11, 29)),
"FMC": pnp.Vendor("Ford Microelectronics Inc", "FMC", datetime.date(1997, 3, 11)),
"FSI": pnp.Vendor("Fore Systems Inc", "FSI", datetime.date(1996, 11, 29)),
"FIL": pnp.Vendor("Forefront Int'l Ltd", "FIL", datetime.date(1996, 11, 29)),
"FIC": pnp.Vendor("Formosa Industrial Computing Inc", "FIC", datetime.date(1996, 11, 29)),
"FMZ": pnp.Vendor("Formoza-Altair", "FMZ", datetime.date(2003, 4, 25)),
"FDD": pnp.Vendor("Forth Dimension Displays Ltd", "FDD", datetime.date(2015, 7, 7)),
"FRE": pnp.Vendor("Forvus Research Inc", "FRE", datetime.date(1997, 4, 24)),
"FOS": pnp.Vendor("Foss Tecator", "FOS", datetime.date(1997, 10, 22)),
"FZC": pnp.Vendor("Founder Group Shenzhen Co.", "FZC", datetime.date(1999, 11, 8)),
"FTN": pnp.Vendor("Fountain Technologies Inc", "FTN", datetime.date(1996, 11, 29)),
"HHI": pnp.Vendor("Fraunhofer Heinrich-Hertz-Institute", "HHI", datetime.date(2012, 7, 27)),
"FRD": pnp.Vendor("Freedom Scientific BLV", "FRD", datetime.date(2007, 6, 15)),
"TCX": pnp.Vendor("FREEMARS Heavy Industries", "TCX", datetime.date(2001, 3, 15)),
"FTE": pnp.Vendor("Frontline Test Equipment Inc.", "FTE", datetime.date(1999, 1, 20)),
"FTG": pnp.Vendor("FTG Data Systems", "FTG", datetime.date(1996, 11, 29)),
"FXX": pnp.Vendor("Fuji Xerox", "FXX", datetime.date(1996, 11, 29)),
"FFC": pnp.Vendor("FUJIFILM Corporation", "FFC", datetime.date(2011, 8, 22)),
"FDT": pnp.Vendor("Fujitsu Display Technologies Corp.", "FDT", datetime.date(2002, 10, 23)),
"FGL": pnp.Vendor("Fujitsu General Limited.", "FGL", datetime.date(2000, 2, 21)),
"FUJ": pnp.Vendor("Fujitsu Ltd", "FUJ", datetime.date(1996, 11, 29)),
"FML": pnp.Vendor("Fujitsu Microelect Ltd", "FML", datetime.date(1996, 11, 29)),
"FPE": pnp.Vendor("Fujitsu Peripherals Ltd", "FPE", datetime.date(1997, 8, 19)),
"FUS": pnp.Vendor("Fujitsu Siemens Computers GmbH", "FUS", datetime.date(2000, 1, 13)),
"FJS": pnp.Vendor("Fujitsu Spain", "FJS", datetime.date(1996, 11, 29)),
"FJC": pnp.Vendor("Fujitsu Takamisawa Component Limited", "FJC", datetime.date(1999, 5, 16)),
"FTL": pnp.Vendor("FUJITSU TEN LIMITED", "FTL", datetime.date(2011, 12, 20)),
"FNI": pnp.Vendor("Funai Electric Co., Ltd.", "FNI", datetime.date(2005, 1, 18)),
"FCB": pnp.Vendor("Furukawa Electric Company Ltd", "FCB", datetime.date(1996, 11, 29)),
"FEC": pnp.Vendor("FURUNO ELECTRIC CO., LTD.", "FEC", datetime.date(1996, 11, 29)),
"FDI": pnp.Vendor("Future Designs, Inc.", "FDI", datetime.date(2014, 9, 29)),
"FDC": pnp.Vendor("Future Domain", "FDC", datetime.date(1996, 11, 29)),
"FSC": pnp.Vendor("Future Systems Consulting KK", "FSC", datetime.date(1996, 11, 29)),
"FTC": pnp.Vendor("Futuretouch Corporation", "FTC", datetime.date(1996, 11, 29)),
"FZI": pnp.Vendor("FZI Forschungszentrum Informatik", "FZI", datetime.date(1997, 8, 12)),
"SPH": pnp.Vendor("G&W Instruments GmbH", "SPH", datetime.date(2002, 2, 25)),
"GDI": pnp.Vendor("G. Diehl ISDN GmbH", "GDI", datetime.date(1996, 11, 29)),
"GLS": pnp.Vendor("Gadget Labs LLC", "GLS", datetime.date(1996, 11, 29)),
"GAG": pnp.Vendor("Gage Applied Sciences Inc", "GAG", datetime.date(1996, 11, 29)),
"HUB": pnp.Vendor("GAI-Tronics, A Hubbell Company", "HUB", datetime.date(2009, 3, 26)),
"GAL": pnp.Vendor("Galil Motion Control", "GAL", datetime.date(1996, 11, 29)),
"GRM": pnp.Vendor("Garmin International", "GRM", datetime.date(2011, 12, 9)),
"GTM": pnp.Vendor("Garnet System Company Ltd", "GTM", datetime.date(1996, 11, 29)),
"GWY": pnp.Vendor("Gateway 2000", "GWY", datetime.date(1996, 11, 29)),
"GCI": pnp.Vendor("Gateway Comm. Inc", "GCI", datetime.date(1996, 11, 29)),
"GWK": pnp.Vendor("Gateworks Corporation", "GWK", datetime.date(2013, 7, 31)),
"GAU": pnp.Vendor("Gaudi Co., Ltd.", "GAU", datetime.date(2003, 3, 31)),
"GCC": pnp.Vendor("GCC Technologies Inc", "GCC", datetime.date(1997, 6, 5)),
"GDS": pnp.Vendor("GDS", "GDS", datetime.date(2004, 6, 23)),
"GEF": pnp.Vendor("GE Fanuc Embedded Systems", "GEF", datetime.date(2007, 6, 14)),
"GEH": pnp.Vendor("Abaco Systems, Inc.", "GEH", datetime.date(2010, 9, 3)),
"GFN": pnp.Vendor("Gefen Inc.", "GFN", datetime.date(2007, 10, 11)),
"GEM": pnp.Vendor("Gem Plus", "GEM", datetime.date(1998, 2, 27)),
"GMN": pnp.Vendor("GEMINI 2000 Ltd", "GMN", datetime.date(2000, 10, 23)),
"GDC": pnp.Vendor("General Datacom", "GDC", datetime.date(1996, 11, 29)),
"GED": pnp.Vendor("General Dynamics C4 Systems", "GED", datetime.date(2013, 1, 9)),
"GML": pnp.Vendor("General Information Systems", "GML", datetime.date(2000, 1, 13)),
"GIC": pnp.Vendor("General Inst. Corporation", "GIC", datetime.date(1996, 11, 29)),
"GSC": pnp.Vendor("General Standards Corporation", "GSC", datetime.date(1998, 7, 16)),
"GTT": pnp.Vendor("General Touch Technology Co., Ltd.", "GTT", datetime.date(2002, 11, 21)),
"GEN": pnp.Vendor("Genesys ATE Inc", "GEN", datetime.date(1996, 11, 29)),
"GLM": pnp.Vendor("Genesys Logic", "GLM", datetime.date(1999, 11, 8)),
"GND": pnp.Vendor("Gennum Corporation", "GND", datetime.date(2006, 9, 5)),
"GEO": pnp.Vendor("GEO Sense", "GEO", datetime.date(1996, 11, 29)),
"GTS": pnp.Vendor("Geotest Marvin Test Systems Inc", "GTS", datetime.date(1998, 2, 24)),
"GER": pnp.Vendor("GERMANEERS GmbH", "GER", datetime.date(2011, 12, 20)),
"GES": pnp.Vendor("GES Singapore Pte Ltd", "GES", datetime.date(2001, 3, 15)),
"GET": pnp.Vendor("Getac Technology Corporation", "GET", datetime.date(2010, 5, 11)),
"GFM": pnp.Vendor("GFMesstechnik GmbH", "GFM", datetime.date(2001, 3, 15)),
"GIP": pnp.Vendor("GI Provision Ltd", "GIP", datetime.date(2012, 2, 8)),
"PST": pnp.Vendor("Global Data SA", "PST", datetime.date(1996, 11, 29)),
"GVL": pnp.Vendor("Global Village Communication", "GVL", datetime.date(1996, 11, 29)),
"GMK": pnp.Vendor("GMK Electronic Design GmbH", "GMK", datetime.date(2008, 1, 18)),
"GMM": pnp.Vendor("GMM Research Inc", "GMM", datetime.date(1996, 11, 29)),
"GMX": pnp.Vendor("GMX Inc", "GMX", datetime.date(1996, 11, 29)),
"GNN": pnp.Vendor("GN Nettest Inc", "GNN", datetime.date(1997, 7, 30)),
"GOE": pnp.Vendor("GOEPEL electronic GmbH", "GOE", datetime.date(2013, 6, 24)),
"GLD": pnp.Vendor("Goldmund - Digital Audio SA", "GLD", datetime.date(2012, 2, 6)),
"GRE": pnp.Vendor("GOLD RAIN ENTERPRISES CORP.", "GRE", datetime.date(2003, 6, 4)),
"GSM": pnp.Vendor("Goldstar Company Ltd", "GSM", datetime.date(1996, 11, 29)),
"GTI": pnp.Vendor("Goldtouch", "GTI", datetime.date(1997, 8, 6)),
"GGL": pnp.Vendor("Google Inc.", "GGL", datetime.date(2010, 5, 26)),
"GPR": pnp.Vendor("GoPro, Inc.", "GPR", datetime.date(2015, 1, 15)),
"GRH": pnp.Vendor("Granch Ltd", "GRH", datetime.date(2002, 9, 23)),
"GJN": pnp.Vendor("Grand Junction Networks", "GJN", datetime.date(1996, 11, 29)),
"GSN": pnp.Vendor("Grandstream Networks, Inc.", "GSN", datetime.date(2014, 3, 3)),
"GST": pnp.Vendor("Graphic SystemTechnology", "GST", datetime.date(1996, 11, 29)),
"GRA": pnp.Vendor("Graphica Computer", "GRA", datetime.date(1996, 11, 29)),
"GTC": pnp.Vendor("Graphtec Corporation", "GTC", datetime.date(1996, 11, 29)),
"TGV": pnp.Vendor("Grass Valley Germany GmbH", "TGV", datetime.date(2007, 6, 14)),
"GCS": pnp.Vendor("Grey Cell Systems Ltd", "GCS", datetime.date(1997, 4, 29)),
"GSY": pnp.Vendor("Grossenbacher Systeme AG", "GSY", datetime.date(2000, 4, 19)),
"GTK": pnp.Vendor("G-Tech Corporation", "GTK", datetime.date(1996, 11, 29)),
"GIM": pnp.Vendor("Guillemont International", "GIM", datetime.date(1997, 10, 29)),
"GZE": pnp.Vendor("GUNZE Limited", "GZE", datetime.date(2005, 5, 2)),
"GNZ": pnp.Vendor("Gunze Ltd", "GNZ", datetime.date(1996, 11, 29)),
"GUD": pnp.Vendor("Guntermann & Drunck GmbH", "GUD", datetime.date(2003, 3, 10)),
"GUZ": pnp.Vendor("Guzik Technical Enterprises", "GUZ", datetime.date(1996, 11, 29)),
"GVC": pnp.Vendor("GVC Corporation", "GVC", datetime.date(1996, 11, 29)),
"HPR": pnp.Vendor("H.P.R. Electronics GmbH", "HPR", datetime.date(2007, 8, 29)),
"HSC": pnp.Vendor("Hagiwara Sys-Com Company Ltd", "HSC", datetime.date(1996, 11, 29)),
"GWI": pnp.Vendor("GW Instruments", "GWI", datetime.date(1996, 11, 29)),
"HAE": pnp.Vendor("Haider electronics", "HAE", datetime.date(2001, 7, 5)),
"HAI": pnp.Vendor("Haivision Systems Inc.", "HAI", datetime.date(2007, 11, 15)),
"HAL": pnp.Vendor("Halberthal", "HAL", datetime.date(1998, 2, 10)),
"HRI": pnp.Vendor("Hall Research", "HRI", datetime.date(2012, 5, 10)),
"HPK": pnp.Vendor("HAMAMATSU PHOTONICS K.K.", "HPK", datetime.date(2006, 12, 20)),
"HTI": pnp.Vendor("Hampshire Company, Inc.", "HTI", datetime.date(1999, 1, 20)),
"HAN": pnp.Vendor("Hanchang System Corporation", "HAN", datetime.date(2003, 6, 21)),
"HSD": pnp.Vendor("HannStar Display Corp", "HSD", datetime.date(2009, 8, 11)),
"HSP": pnp.Vendor("HannStar Display Corp", "HSP", datetime.date(2009, 8, 11)),
"HDC": pnp.Vendor("HardCom Elektronik & Datateknik", "HDC", datetime.date(1998, 4, 14)),
"HII": pnp.Vendor("Harman International Industries, Inc", "HII", datetime.date(2015, 1, 9)),
"HJI": pnp.Vendor("Harris & Jeffries Inc", "HJI", datetime.date(1996, 11, 29)),
"HWA": pnp.Vendor("Harris Canada Inc", "HWA", datetime.date(1998, 3, 13)),
"HAR": pnp.Vendor("Harris Corporation", "HAR", datetime.date(2011, 12, 20)),
"HRS": pnp.Vendor("Harris Semiconductor", "HRS", datetime.date(1997, 1, 2)),
"HCW": pnp.Vendor("Hauppauge Computer Works Inc", "HCW", datetime.date(1996, 11, 29)),
"HAY": pnp.Vendor("Hayes Microcomputer Products Inc", "HAY", datetime.date(1996, 11, 29)),
"HCL": pnp.Vendor("HCL America Inc", "HCL", datetime.date(1996, 11, 29)),
"HCM": pnp.Vendor("HCL Peripherals", "HCM", datetime.date(2001, 10, 2)),
"HDI": pnp.Vendor("HD-INFO d.o.o.", "HDI", datetime.date(2001, 10, 8)),
"HPI": pnp.Vendor("Headplay, Inc.", "HPI", datetime.date(2007, 4, 30)),
"HYT": pnp.Vendor("Heng Yu Technology (HK) Limited", "HYT", datetime.date(2000, 10, 23)),
"HRC": pnp.Vendor("Hercules", "HRC", datetime.date(2001, 3, 15)),
"HRT": pnp.Vendor("HERCULES", "HRT", datetime.date(2001, 3, 15)),
"HET": pnp.Vendor("HETEC Datensysteme GmbH", "HET", datetime.date(2004, 2, 3)),
"HWP": pnp.Vendor("Hewlett Packard", "HWP", datetime.date(2001, 3, 15)),
"HPD": pnp.Vendor("Hewlett Packard", "HPD", datetime.date(1997, 5, 2)),
"HPC": pnp.Vendor("Hewlett-Packard Co.", "HPC", datetime.date(2000, 8, 10)),
"HPQ": pnp.Vendor("Hewlett-Packard Co.", "HPQ", datetime.date(2004, 7, 12)),
"HXM": pnp.Vendor("Hexium Ltd.", "HXM", datetime.date(2008, 4, 15)),
"HIB": pnp.Vendor("Hibino Corporation", "HIB", datetime.date(2003, 7, 9)),
"HWD": pnp.Vendor("Highwater Designs Ltd", "HWD", datetime.date(1996, 11, 29)),
"HIK": pnp.Vendor("Hikom Co., Ltd.", "HIK", datetime.date(2003, 10, 13)),
"HIL": pnp.Vendor("Hilevel Technology", "HIL", datetime.date(1996, 11, 29)),
"HHC": pnp.Vendor("HIRAKAWA HEWTECH CORP.", "HHC", datetime.date(2008, 5, 20)),
"HIT": pnp.Vendor("Hitachi America Ltd", "HIT", datetime.date(1996, 11, 29)),
"HCE": pnp.Vendor("Hitachi Consumer Electronics Co., Ltd", "HCE", datetime.date(2009, 5, 15)),
"HIC": pnp.Vendor("Hitachi Information Technology Co., Ltd.", "HIC", datetime.date(2000, 4, 19)),
"HTC": pnp.Vendor("Hitachi Ltd", "HTC", datetime.date(1996, 11, 29)),
"MXL": pnp.Vendor("Hitachi Maxell, Ltd.", "MXL", datetime.date(2000, 1, 13)),
"HEL": pnp.Vendor("Hitachi Micro Systems Europe Ltd", "HEL", datetime.date(1997, 7, 9)),
"HTX": pnp.Vendor("Hitex Systementwicklung GmbH", "HTX", datetime.date(1998, 1, 30)),
"HMK": pnp.Vendor("hmk Daten-System-Technik BmbH", "HMK", datetime.date(1997, 9, 30)),
"HOB": pnp.Vendor("HOB Electronic GmbH", "HOB", datetime.date(1996, 11, 29)),
"HOL": pnp.Vendor("Holoeye Photonics AG", "HOL", datetime.date(2005, 2, 2)),
"HDV": pnp.Vendor("Holografika kft.", "HDV", datetime.date(2005, 3, 31)),
"HTK": pnp.Vendor("Holtek Microelectronics Inc", "HTK", datetime.date(1996, 11, 29)),
"INC": pnp.Vendor("Home Row Inc", "INC", datetime.date(1996, 11, 29)),
"FOX": pnp.Vendor("HON HAI PRECISON IND.CO.,LTD.", "FOX", datetime.date(2010, 8, 2)),
"HKA": pnp.Vendor("HONKO MFG. CO., LTD.", "HKA", datetime.date(2004, 12, 1)),
"HIS": pnp.Vendor("Hope Industrial Systems, Inc.", "HIS", datetime.date(2014, 1, 13)),
"APG": pnp.Vendor("Horner Electric Inc", "APG", datetime.date(1996, 11, 29)),
"HST": pnp.Vendor("Horsent Technology Co., Ltd.", "HST", datetime.date(2015, 4, 11)),
"HOE": pnp.Vendor("Hosiden Corporation", "HOE", datetime.date(1997, 8, 5)),
"HTL": pnp.Vendor("HTBLuVA Mödling", "HTL", datetime.date(2014, 2, 17)),
"HMC": pnp.Vendor("Hualon Microelectric Corporation", "HMC", datetime.date(1996, 11, 29)),
"EBT": pnp.Vendor("HUALONG TECHNOLOGY CO., LTD", "EBT", datetime.date(2007, 6, 15)),
"HNS": pnp.Vendor("Hughes Network Systems", "HNS", datetime.date(1996, 11, 29)),
"HMX": pnp.Vendor("HUMAX Co., Ltd.", "HMX", datetime.date(2006, 2, 14)),
"HYO": pnp.Vendor("HYC CO., LTD.", "HYO", datetime.date(2006, 4, 12)),
"HYD": pnp.Vendor("Hydis Technologies.Co.,LTD", "HYD", datetime.date(2010, 11, 22)),
"HYV": pnp.Vendor("Hynix Semiconductor", "HYV", datetime.date(2008, 11, 29)),
"HYC": pnp.Vendor("Hypercope Gmbh Aachen", "HYC", datetime.date(1997, 12, 1)),
"HYR": pnp.Vendor("Hypertec Pty Ltd", "HYR", datetime.date(1996, 11, 29)),
"HYP": pnp.Vendor("Hyphen Ltd", "HYP", datetime.date(1996, 11, 29)),
"ITT": pnp.Vendor("I&T Telecom.", "ITT", datetime.date(1999, 11, 8)),
"IOT": pnp.Vendor("I/OTech Inc", "IOT", datetime.date(1996, 11, 29)),
"IAT": pnp.Vendor("IAT Germany GmbH", "IAT", datetime.date(1996, 11, 29)),
"IBM": pnp.Vendor("IBM Brasil", "IBM", datetime.date(1996, 11, 29)),
"CDT": pnp.Vendor("IBM Corporation", "CDT", datetime.date(1996, 11, 29)),
"IBP": pnp.Vendor("IBP Instruments GmbH", "IBP", datetime.date(1998, 9, 23)),
"IBR": pnp.Vendor("IBR GmbH", "IBR", datetime.date(1998, 1, 16)),
"ICE": pnp.Vendor("IC Ensemble", "ICE", datetime.date(1997, 9, 19)),
"ICA": pnp.Vendor("ICA Inc", "ICA", datetime.date(2002, 5, 20)),
"ICX": pnp.Vendor("ICCC A/S", "ICX", datetime.date(1996, 11, 29)),
"ICD": pnp.Vendor("ICD Inc", "ICD", datetime.date(1997, 6, 9)),
"ARE": pnp.Vendor("ICET S.p.A.", "ARE", datetime.date(1999, 5, 16)),
"ICP": pnp.Vendor("ICP Electronics, Inc./iEi Technology Corp.", "ICP", datetime.date(2012, 9, 7)),
"IUC": pnp.Vendor("ICSL", "IUC", datetime.date(1997, 8, 14)),
"XTD": pnp.Vendor("Icuiti Corporation", "XTD", datetime.date(2007, 6, 14)),
"IWR": pnp.Vendor("Icuiti Corporation", "IWR", datetime.date(2007, 3, 6)),
"ISC": pnp.Vendor("Id3 Semiconductors", "ISC", datetime.date(2001, 3, 15)),
"IDE": pnp.Vendor("IDE Associates", "IDE", datetime.date(1996, 11, 29)),
"IDO": pnp.Vendor("IDEO Product Development", "IDO", datetime.date(1997, 9, 30)),
"DEX": pnp.Vendor("idex displays", "DEX", datetime.date(2002, 4, 25)),
"IDX": pnp.Vendor("IDEXX Labs", "IDX", datetime.date(1996, 11, 29)),
"IDK": pnp.Vendor("IDK Corporation", "IDK", datetime.date(2003, 4, 16)),
"IDN": pnp.Vendor("Idneo Technologies", "IDN", datetime.date(2012, 7, 5)),
"ITS": pnp.Vendor("IDTECH", "ITS", datetime.date(2002, 6, 17)),
"IEE": pnp.Vendor("IEE", "IEE", datetime.date(2001, 6, 21)),
"IGM": pnp.Vendor("IGM Communi", "IGM", datetime.date(1996, 11, 29)),
"IIN": pnp.Vendor("IINFRA Co., Ltd", "IIN", datetime.date(2003, 5, 9)),
"IVM": pnp.Vendor("Iiyama North America", "IVM", datetime.date(1996, 11, 29)),
"IKE": pnp.Vendor("Ikegami Tsushinki Co. Ltd.", "IKE", datetime.date(2014, 11, 14)),
"IKS": pnp.Vendor("Ikos Systems Inc", "IKS", datetime.date(1996, 11, 29)),
"IND": pnp.Vendor("ILC", "IND", datetime.date(2004, 6, 16)),
"ILC": pnp.Vendor("Image Logic Corporation", "ILC", datetime.date(1996, 11, 29)),
"ISM": pnp.Vendor("Image Stream Medical", "ISM", datetime.date(2010, 5, 27)),
"IMG": pnp.Vendor("IMAGENICS Co., Ltd.", "IMG", datetime.date(2006, 9, 5)),
"IQT": pnp.Vendor("IMAGEQUEST Co., Ltd", "IQT", datetime.date(2002, 10, 8)),
"IME": pnp.Vendor("Imagraph", "IME", datetime.date(1996, 12, 4)),
"IMA": pnp.Vendor("Imagraph", "IMA", datetime.date(1996, 11, 29)),
"IMD": pnp.Vendor("ImasDe Canarias S.A.", "IMD", datetime.date(1997, 7, 3)),
"IMC": pnp.Vendor("IMC Networks", "IMC", datetime.date(1996, 11, 29)),
"IMM": pnp.Vendor("Immersion Corporation", "IMM", datetime.date(1997, 7, 16)),
"HUM": pnp.Vendor("IMP Electronics Ltd.", "HUM", datetime.date(2004, 6, 16)),
"IMP": pnp.Vendor("Impinj", "IMP", datetime.date(2012, 8, 14)),
"IMN": pnp.Vendor("Impossible Production", "IMN", datetime.date(2000, 8, 10)),
"IFS": pnp.Vendor("In Focus Systems Inc", "IFS", datetime.date(1996, 11, 29)),
"ALD": pnp.Vendor("In4S Inc", "ALD", datetime.date(1997, 12, 5)),
"IBI": pnp.Vendor("INBINE.CO.LTD", "IBI", datetime.date(2001, 11, 6)),
"INK": pnp.Vendor("Indtek Co., Ltd.", "INK", datetime.date(2007, 3, 26)),
"IQI": pnp.Vendor("IneoQuest Technologies, Inc", "IQI", datetime.date(2011, 2, 18)),
"IPD": pnp.Vendor("Industrial Products Design, Inc.", "IPD", datetime.date(1999, 7, 16)),
"INS": pnp.Vendor("Ines GmbH", "INS", datetime.date(1996, 11, 29)),
"IFX": pnp.Vendor("Infineon Technologies AG", "IFX", datetime.date(2000, 4, 19)),
"IFZ": pnp.Vendor("Infinite Z", "IFZ", datetime.date(2012, 1, 4)),
"IIT": pnp.Vendor("Informatik Information Technologies", "IIT", datetime.date(2013, 8, 14)),
"IFT": pnp.Vendor("Informtech", "IFT", datetime.date(1996, 11, 29)),
"ICI": pnp.Vendor("Infotek Communication Inc", "ICI", datetime.date(1996, 11, 29)),
"ITR": pnp.Vendor("Infotronic America, Inc.", "ITR", datetime.date(2001, 6, 21)),
"INF": pnp.Vendor("Inframetrics Inc", "INF", datetime.date(1996, 11, 29)),
"VSN": pnp.Vendor("Ingram Macrotron", "VSN", datetime.date(2000, 8, 10)),
"VID": pnp.Vendor("Ingram Macrotron Germany", "VID", datetime.date(2000, 5, 24)),
"IHE": pnp.Vendor("InHand Electronics", "IHE", datetime.date(2010, 4, 20)),
"INI": pnp.Vendor("Initio Corporation", "INI", datetime.date(1996, 11, 29)),
"IMT": pnp.Vendor("Inmax Technology Corporation", "IMT", datetime.date(2003, 2, 12)),
"INO": pnp.Vendor("Innolab Pte Ltd", "INO", datetime.date(1999, 1, 20)),
"INL": pnp.Vendor("InnoLux Display Corporation", "INL", datetime.date(2004, 12, 15)),
"INM": pnp.Vendor("InnoMedia Inc", "INM", datetime.date(1996, 11, 29)),
"ILS": pnp.Vendor("Innotech Corporation", "ILS", datetime.date(2000, 10, 23)),
"ATE": pnp.Vendor("Innovate Ltd", "ATE", datetime.date(1996, 11, 29)),
"INN": pnp.Vendor("Innovent Systems, Inc.", "INN", datetime.date(2000, 4, 19)),
"WII": pnp.Vendor("Innoware Inc", "WII", datetime.date(1998, 1, 30)),
"inu": pnp.Vendor("Inovatec S.p.A.", "inu", datetime.date(2001, 3, 15)),
"ICV": pnp.Vendor("Inside Contactless", "ICV", datetime.date(2010, 11, 4)),
"ION": pnp.Vendor("Inside Out Networks", "ION", datetime.date(1998, 12, 28)),
"ISG": pnp.Vendor("Insignia Solutions Inc", "ISG", datetime.date(1996, 11, 29)),
"ISR": pnp.Vendor("INSIS Co., LTD.", "ISR", datetime.date(2003, 2, 12)),
"IAF": pnp.Vendor("Institut f r angewandte Funksystemtechnik GmbH", "IAF", datetime.date(1999, 3, 20)),
"ING": pnp.Vendor("Integraph Corporation", "ING", datetime.date(1996, 11, 29)),
"IBC": pnp.Vendor("Integrated Business Systems", "IBC", datetime.date(1996, 11, 29)),
"IDP": pnp.Vendor("Integrated Device Technology, Inc.", "IDP", datetime.date(2010, 1, 27)),
"ITE": pnp.Vendor("Integrated Tech Express Inc", "ITE", datetime.date(1996, 11, 29)),
"SRC": pnp.Vendor("Integrated Tech Express Inc", "SRC", datetime.date(1996, 11, 29)),
"ITX": pnp.Vendor("integrated Technology Express Inc", "ITX", datetime.date(1997, 6, 25)),
"IAI": pnp.Vendor("Integration Associates, Inc.", "IAI", datetime.date(2004, 3, 17)),
"ICO": pnp.Vendor("Intel Corp", "ICO", datetime.date(2000, 8, 10)),
"III": pnp.Vendor("Intelligent Instrumentation", "III", datetime.date(1996, 11, 29)),
"IPI": pnp.Vendor("Intelligent Platform Management Interface (IPMI) forum (Intel, HP, NEC, Dell)", "IPI", datetime.date(2000, 5, 24)),
"IWX": pnp.Vendor("Intelliworxx, Inc.", "IWX", datetime.date(1999, 5, 16)),
"SVC": pnp.Vendor("Intellix Corp.", "SVC", datetime.date(2008, 1, 18)),
"TCH": pnp.Vendor("Interaction Systems, Inc", "TCH", datetime.date(1999, 3, 20)),
"PEN": pnp.Vendor("Interactive Computer Products Inc", "PEN", datetime.date(1997, 1, 15)),
"ITC": pnp.Vendor("Intercom Inc", "ITC", datetime.date(1996, 11, 29)),
"IDS": pnp.Vendor("Interdigital Sistemas de Informacao", "IDS", datetime.date(1997, 10, 28)),
"FBI": pnp.Vendor("Interface Corporation", "FBI", datetime.date(1996, 11, 29)),
"ISI": pnp.Vendor("Interface Solutions", "ISI", datetime.date(1996, 11, 29)),
"IGC": pnp.Vendor("Intergate Pty Ltd", "IGC", datetime.date(1996, 11, 29)),
"IEC": pnp.Vendor("Interlace Engineering Corporation", "IEC", datetime.date(1996, 11, 29)),
"IEI": pnp.Vendor("Interlink Electronics", "IEI", datetime.date(1998, 10, 16)),
"IDC": pnp.Vendor("International Datacasting Corporation", "IDC", datetime.date(1997, 2, 25)),
"IDT": pnp.Vendor("International Display Technology", "IDT", datetime.date(2002, 5, 16)),
"ISY": pnp.Vendor("International Integrated Systems,Inc.(IISI)", "ISY", datetime.date(2000, 8, 10)),
"IMI": pnp.Vendor("International Microsystems Inc", "IMI", datetime.date(1996, 11, 29)),
"IPT": pnp.Vendor("International Power Technologies", "IPT", datetime.date(1997, 4, 11)),
"ITD": pnp.Vendor("Internet Technology Corporation", "ITD", datetime.date(1997, 12, 5)),
"INP": pnp.Vendor("Interphase Corporation", "INP", datetime.date(1996, 11, 29)),
"INT": pnp.Vendor("Interphase Corporation", "INT", datetime.date(1996, 11, 29)),
"LSD": pnp.Vendor("Intersil Corporation", "LSD", datetime.date(2012, 3, 14)),
"IST": pnp.Vendor("Intersolve Technologies", "IST", datetime.date(1999, 3, 20)),
"ITL": pnp.Vendor("Inter-Tel", "ITL", datetime.date(1997, 3, 21)),
"IXD": pnp.Vendor("Intertex Data AB", "IXD", datetime.date(1996, 11, 29)),
"IVI": pnp.Vendor("Intervoice Inc", "IVI", datetime.date(1996, 11, 29)),
"IVS": pnp.Vendor("Intevac Photonics Inc.", "IVS", datetime.date(2011, 2, 16)),
"ICM": pnp.Vendor("Intracom SA", "ICM", datetime.date(1998, 8, 3)),
"SDD": pnp.Vendor("Intrada-SDD Ltd", "SDD", datetime.date(2007, 11, 21)),
"ISP": pnp.Vendor("IntreSource Systems Pte Ltd", "ISP", datetime.date(1997, 8, 27)),
"SRG": pnp.Vendor("Intuitive Surgical, Inc.", "SRG", datetime.date(2006, 2, 16)),
"INA": pnp.Vendor("Inventec Corporation", "INA", datetime.date(2013, 9, 13)),
"INE": pnp.Vendor("Inventec Electronics (M) Sdn. Bhd.", "INE", datetime.date(1998, 7, 21)),
"INV": pnp.Vendor("Inviso, Inc.", "INV", datetime.date(2000, 10, 23)),
"IOD": pnp.Vendor("I-O Data Device Inc", "IOD", datetime.date(1996, 11, 29)),
"IOS": pnp.Vendor("i-O Display System", "IOS", datetime.date(2001, 3, 15)),
"IOM": pnp.Vendor("Iomega", "IOM", datetime.date(1996, 11, 29)),
"IPP": pnp.Vendor("IP Power Technologies GmbH", "IPP", datetime.date(2010, 12, 6)),
"IPQ": pnp.Vendor("IP3 Technology Ltd.", "IPQ", datetime.date(2013, 11, 11)),
"IPC": pnp.Vendor("IPC Corporation", "IPC", datetime.date(1996, 11, 29)),
"IPM": pnp.Vendor("IPM Industria Politecnica Meridionale SpA", "IPM", datetime.date(1998, 9, 23)),
"IPS": pnp.Vendor("IPS, Inc. (Intellectual Property Solutions, Inc.)", "IPS", datetime.date(2001, 9, 5)),
"IPW": pnp.Vendor("IPWireless, Inc", "IPW", datetime.date(2001, 3, 15)),
"IIC": pnp.Vendor("ISIC Innoscan Industrial Computers A/S", "IIC", datetime.date(2003, 7, 23)),
"ISL": pnp.Vendor("Isolation Systems", "ISL", datetime.date(1996, 11, 29)),
"ISS": pnp.Vendor("ISS Inc", "ISS", datetime.date(1996, 11, 29)),
"ITA": pnp.Vendor("Itausa Export North America", "ITA", datetime.date(1996, 11, 29)),
"IPR": pnp.Vendor("Ithaca Peripherals", "IPR", datetime.date(1997, 7, 1)),
"ITK": pnp.Vendor("ITK Telekommunikation AG", "ITK", datetime.date(1996, 11, 29)),
"ITM": pnp.Vendor("ITM inc.", "ITM", datetime.date(2001, 4, 24)),
"ITP": pnp.Vendor("IT-PRO Consulting und Systemhaus GmbH", "ITP", datetime.date(2000, 10, 23)),
"JCE": pnp.Vendor("Jace Tech Inc", "JCE", datetime.date(1996, 11, 29)),
"JIC": pnp.Vendor("Jaeik Information & Communication Co., Ltd.", "JIC", datetime.date(2000, 10, 23)),
"XFG": pnp.Vendor("Jan Strapko - FOTO", "XFG", datetime.date(2001, 5, 7)),
"JUK": pnp.Vendor("Janich & Klass Computertechnik GmbH", "JUK", datetime.date(2002, 10, 8)),
"JAS": pnp.Vendor("Janz Automationssysteme AG", "JAS", datetime.date(2009, 11, 3)),
"JAE": pnp.Vendor("Japan Aviation Electronics Industry, Limited", "JAE", datetime.date(2001, 3, 15)),
"JDL": pnp.Vendor("Japan Digital Laboratory Co.,Ltd.", "JDL", datetime.date(2000, 4, 19)),
"JDI": pnp.Vendor("Japan Display Inc.", "JDI", datetime.date(2013, 4, 18)),
"JAT": pnp.Vendor("Jaton Corporation", "JAT", datetime.date(1997, 9, 24)),
"JET": pnp.Vendor("JET POWER TECHNOLOGY CO., LTD.", "JET", datetime.date(2001, 3, 15)),
"JWY": pnp.Vendor("Jetway Information Co., Ltd", "JWY", datetime.date(2003, 9, 22)),
"JTY": pnp.Vendor("jetway security micro,inc", "JTY", datetime.date(2009, 11, 11)),
"SHI": pnp.Vendor("Jiangsu Shinco Electronic Group Co., Ltd", "SHI", datetime.date(2004, 8, 10)),
"JFX": pnp.Vendor("Jones Futurex Inc", "JFX", datetime.date(1996, 11, 29)),
"LTI": pnp.Vendor("Jongshine Tech Inc", "LTI", datetime.date(1996, 11, 29)),
"HKG": pnp.Vendor("Josef Heim KG", "HKG", datetime.date(1996, 11, 29)),
"JPC": pnp.Vendor("JPC Technology Limited", "JPC", datetime.date(2000, 10, 23)),
"JSD": pnp.Vendor("JS DigiTech, Inc", "JSD", datetime.date(2000, 10, 23)),
"JTS": pnp.Vendor("JS Motorsports", "JTS", datetime.date(1997, 12, 5)),
"TPJ": pnp.Vendor("Junnila", "TPJ", datetime.date(2001, 3, 15)),
"JUP": pnp.Vendor("Jupiter Systems", "JUP", datetime.date(2006, 9, 5)),
"JSI": pnp.Vendor("Jupiter Systems, Inc.", "JSI", datetime.date(2007, 6, 14)),
"JVC": pnp.Vendor("JVC", "JVC", datetime.date(2000, 10, 23)),
"JKC": pnp.Vendor("JVC KENWOOD Corporation", "JKC", datetime.date(2012, 3, 8)),
"JWS": pnp.Vendor("JWSpencer & Co.", "JWS", datetime.date(1999, 7, 16)),
"SGE": pnp.Vendor("Kansai Electric Company Ltd", "SGE", datetime.date(1996, 12, 4)),
"HIQ": pnp.Vendor("Kaohsiung Opto Electronics Americas, Inc.", "HIQ", datetime.date(2012, 3, 14)),
"KSL": pnp.Vendor("Karn Solutions Ltd.", "KSL", datetime.date(2006, 5, 8)),
"KAR": pnp.Vendor("Karna", "KAR", datetime.date(2000, 2, 21)),
"KTN": pnp.Vendor("Katron Tech Inc", "KTN", datetime.date(1996, 11, 29)),
"KTG": pnp.Vendor("Kayser-Threde GmbH", "KTG", datetime.date(1996, 11, 29)),
"KDT": pnp.Vendor("KDDI Technology Corporation", "KDT", datetime.date(2012, 5, 22)),
"KDE": pnp.Vendor("KDE", "KDE", datetime.date(2001, 8, 14)),
"KDS": pnp.Vendor("KDS USA", "KDS", datetime.date(1996, 11, 29)),
"KGL": pnp.Vendor("KEISOKU GIKEN Co.,Ltd.", "KGL", datetime.date(2012, 4, 17)),
"KML": pnp.Vendor("Kensington Microware Ltd", "KML", datetime.date(1996, 11, 29)),
"KWD": pnp.Vendor("Kenwood Corporation", "KWD", datetime.date(2008, 2, 22)),
"EPS": pnp.Vendor("KEPS", "EPS", datetime.date(1996, 11, 29)),
"KES": pnp.Vendor("Kesa Corporation", "KES", datetime.date(1996, 11, 29)),
"KEY": pnp.Vendor("Key Tech Inc", "KEY", datetime.date(1996, 11, 29)),
"KTK": pnp.Vendor("Key Tronic Corporation", "KTK", datetime.date(1996, 11, 29)),
"KCL": pnp.Vendor("Keycorp Ltd", "KCL", datetime.date(1997, 5, 20)),
"KVX": pnp.Vendor("KeyView", "KVX", datetime.date(2012, 8, 13)),
"KBI": pnp.Vendor("Kidboard Inc", "KBI", datetime.date(1997, 4, 24)),
"KME": pnp.Vendor("KIMIN Electronics Co., Ltd.", "KME", datetime.date(2011, 2, 15)),
"KSC": pnp.Vendor("Kinetic Systems Corporation", "KSC", datetime.date(1996, 11, 29)),
"KPC": pnp.Vendor("King Phoenix Company", "KPC", datetime.date(1996, 11, 29)),
"KSX": pnp.Vendor("King Tester Corporation", "KSX", datetime.date(1998, 7, 16)),
"KTC": pnp.Vendor("Kingston Tech Corporation", "KTC", datetime.date(1996, 11, 29)),
"KIO": pnp.Vendor("Kionix, Inc.", "KIO", datetime.date(2013, 12, 23)),
"KIS": pnp.Vendor("KiSS Technology A/S", "KIS", datetime.date(2005, 6, 16)),
"PVP": pnp.Vendor("Klos Technologies, Inc.", "PVP", datetime.date(2000, 8, 10)),
"KBL": pnp.Vendor("Kobil Systems GmbH", "KBL", datetime.date(2001, 3, 15)),
"KOB": pnp.Vendor("Kobil Systems GmbH", "KOB", datetime.date(2001, 3, 15)),
"KDK": pnp.Vendor("Kodiak Tech", "KDK", datetime.date(1996, 11, 29)),
"KFX": pnp.Vendor("Kofax Image Products", "KFX", datetime.date(1996, 11, 29)),
"KOL": pnp.Vendor("Kollmorgen Motion Technologies Group", "KOL", datetime.date(1996, 11, 29)),
"KOE": pnp.Vendor("KOLTER ELECTRONIC", "KOE", datetime.date(2001, 3, 15)),
"KFE": pnp.Vendor("Komatsu Forest", "KFE", datetime.date(2010, 4, 20)),
"KNC": pnp.Vendor("Konica corporation", "KNC", datetime.date(1997, 8, 5)),
"KTI": pnp.Vendor("Konica Technical Inc", "KTI", datetime.date(1996, 11, 29)),
"TWE": pnp.Vendor("Kontron Electronik", "TWE", datetime.date(1996, 11, 29)),
"KEM": pnp.Vendor("Kontron Embedded Modules GmbH", "KEM", datetime.date(2007, 8, 29)),
"KEU": pnp.Vendor("Kontron Europe GmbH", "KEU", datetime.date(2014, 2, 20)),
"KDM": pnp.Vendor("Korea Data Systems Co., Ltd.", "KDM", datetime.date(2003, 12, 18)),
"KOU": pnp.Vendor("KOUZIRO Co.,Ltd.", "KOU", datetime.date(2012, 7, 27)),
"KOW": pnp.Vendor("KOWA Company,LTD.", "KOW", datetime.date(2008, 3, 12)),
"KMR": pnp.Vendor("Kramer Electronics Ltd. International", "KMR", datetime.date(2013, 7, 10)),
"KRL": pnp.Vendor("Krell Industries Inc.", "KRL", datetime.date(2004, 8, 3)),
"KRM": pnp.Vendor("Kroma Telecom", "KRM", datetime.date(2010, 5, 5)),
"KRY": pnp.Vendor("Kroy LLC", "KRY", datetime.date(1998, 7, 16)),
"KTE": pnp.Vendor("K-Tech", "KTE", datetime.date(2003, 3, 31)),
"KSG": pnp.Vendor("KUPA China Shenzhen Micro Technology Co., Ltd. Gold Institute", "KSG", datetime.date(2014, 4, 22)),
"KUR": pnp.Vendor("Kurta Corporation", "KUR", datetime.date(1996, 11, 29)),
"KVA": pnp.Vendor("Kvaser AB", "KVA", datetime.date(1997, 1, 24)),
"KYE": pnp.Vendor("KYE Syst Corporation", "KYE", datetime.date(1996, 11, 29)),
"KYC": pnp.Vendor("Kyocera Corporation", "KYC", datetime.date(1996, 11, 29)),
"KEC": pnp.Vendor("Kyushu Electronics Systems Inc", "KEC", datetime.date(1998, 1, 12)),
"KZN": pnp.Vendor("K-Zone International", "KZN", datetime.date(2001, 6, 21)),
"KZI": pnp.Vendor("K-Zone International co. Ltd.", "KZI", datetime.date(2000, 8, 10)),
"LLL": pnp.Vendor("L-3 Communications", "LLL", datetime.date(2010, 5, 11)),
"LCE": pnp.Vendor("La Commande Electronique", "LCE", datetime.date(1996, 11, 29)),
"LCT": pnp.Vendor("Labcal Technologies", "LCT", datetime.date(1999, 11, 8)),
"LTC": pnp.Vendor("Labtec Inc", "LTC", datetime.date(1997, 12, 8)),
"LWC": pnp.Vendor("Labway Corporation", "LWC", datetime.date(1996, 12, 4)),
"LAC": pnp.Vendor("LaCie", "LAC", datetime.date(1998, 12, 28)),
"LAG": pnp.Vendor("Laguna Systems", "LAG", datetime.date(1996, 11, 29)),
"LND": pnp.Vendor("Land Computer Company Ltd", "LND", datetime.date(1996, 11, 29)),
"LNT": pnp.Vendor("LANETCO International", "LNT", datetime.date(2003, 5, 2)),
"LWW": pnp.Vendor("Lanier Worldwide", "LWW", datetime.date(1996, 11, 29)),
"LHA": pnp.Vendor("Lars Haagh ApS", "LHA", datetime.date(1997, 1, 9)),
"LAS": pnp.Vendor("LASAT Comm. A/S", "LAS", datetime.date(1996, 11, 29)),
"LMT": pnp.Vendor("Laser Master", "LMT", datetime.date(1996, 11, 29)),
"LDN": pnp.Vendor("Laserdyne Technologies", "LDN", datetime.date(2013, 10, 16)),
"LGX": pnp.Vendor("Lasergraphics, Inc.", "LGX", datetime.date(2000, 2, 21)),
"LCM": pnp.Vendor("Latitude Comm.", "LCM", datetime.date(1996, 11, 29)),
"LAV": pnp.Vendor("Lava Computer MFG Inc", "LAV", datetime.date(1997, 4, 14)),
"LCC": pnp.Vendor("LCI", "LCC", datetime.date(2000, 8, 10)),
"LEC": pnp.Vendor("Lectron Company Ltd", "LEC", datetime.date(1997, 3, 27)),
"LMP": pnp.Vendor("Leda Media Products", "LMP", datetime.date(1998, 5, 11)),
"LEG": pnp.Vendor("Legerity, Inc", "LEG", datetime.date(2005, 1, 18)),
"LTV": pnp.Vendor("Leitch Technology International Inc.", "LTV", datetime.date(2003, 12, 9)),
"LNV": pnp.Vendor("Lenovo", "LNV", datetime.date(2005, 7, 14)),
"LIN": pnp.Vendor("Lenovo Beijing Co. Ltd.", "LIN", datetime.date(2012, 5, 22)),
"LEN": pnp.Vendor("Lenovo Group Limited", "LEN", datetime.date(2005, 6, 3)),
"LEX": pnp.Vendor("Lexical Ltd", "LEX", datetime.date(1996, 11, 29)),
"LCN": pnp.Vendor("LEXICON", "LCN", datetime.date(2005, 3, 1)),
"PRS": pnp.Vendor("Leutron Vision", "PRS", datetime.date(1996, 11, 29)),
"LMI": pnp.Vendor("Lexmark Int'l Inc", "LMI", datetime.date(1996, 11, 29)),
"LGS": pnp.Vendor("LG Semicom Company Ltd", "LGS", datetime.date(1996, 11, 29)),
"MAN": pnp.Vendor("LGIC", "MAN", datetime.date(2000, 2, 21)),
"LSC": pnp.Vendor("LifeSize Communications", "LSC", datetime.date(2006, 2, 14)),
"LHT": pnp.Vendor("Lighthouse Technologies Limited", "LHT", datetime.date(2010, 5, 4)),
"LWR": pnp.Vendor("Lightware Visual Engineering", "LWR", datetime.date(2009, 2, 4)),
"LTW": pnp.Vendor("Lightware, Inc", "LTW", datetime.date(1998, 10, 16)),
"LZX": pnp.Vendor("Lightwell Company Ltd", "LZX", datetime.date(1997, 12, 2)),
"LKM": pnp.Vendor("Likom Technology Sdn. Bhd.", "LKM", datetime.date(1998, 4, 23)),
"LNR": pnp.Vendor("Linear Systems Ltd.", "LNR", datetime.date(2007, 10, 11)),
"LNK": pnp.Vendor("Link Tech Inc", "LNK", datetime.date(1996, 11, 29)),
"LIP": pnp.Vendor("Linked IP GmbH", "LIP", datetime.date(2010, 7, 19)),
"FGD": pnp.Vendor("Lisa Draexlmaier GmbH", "FGD", datetime.date(1999, 2, 22)),
"LOL": pnp.Vendor("Litelogic Operations Ltd", "LOL", datetime.date(2011, 12, 9)),
"LCI": pnp.Vendor("Lite-On Communication Inc", "LCI", datetime.date(1996, 11, 29)),
"LIT": pnp.Vendor("Lithics Silicon Technology", "LIT", datetime.date(2001, 3, 15)),
"LTN": pnp.Vendor("Litronic Inc", "LTN", datetime.date(1998, 2, 3)),
"LOC": pnp.Vendor("Locamation B.V.", "LOC", datetime.date(2004, 1, 9)),
"LOE": pnp.Vendor("Loewe Opta GmbH", "LOE", datetime.date(2005, 5, 2)),
"LGC": pnp.Vendor("Logic Ltd", "LGC", datetime.date(1994, 4, 2)),
"LSL": pnp.Vendor("Logical Solutions", "LSL", datetime.date(1996, 11, 29)),
"LOG": pnp.Vendor("Logicode Technology Inc", "LOG", datetime.date(1996, 11, 29)),
"LGI": pnp.Vendor("Logitech Inc", "LGI", datetime.date(1996, 11, 29)),
"LDT": pnp.Vendor("LogiDataTech Electronic GmbH", "LDT", datetime.date(2001, 3, 15)),
"SGO": pnp.Vendor("Logos Design A/S", "SGO", datetime.date(2001, 4, 24)),
"LED": pnp.Vendor("Long Engineering Design Inc", "LED", datetime.date(1996, 11, 29)),
"LCS": pnp.Vendor("Longshine Electronics Company", "LCS", datetime.date(1996, 11, 29)),
"LSI": pnp.Vendor("Loughborough Sound Images", "LSI", datetime.date(1996, 11, 29)),
"LSJ": pnp.Vendor("LSI Japan Company Ltd", "LSJ", datetime.date(1996, 11, 29)),
"LSY": pnp.Vendor("LSI Systems Inc", "LSY", datetime.date(1996, 11, 29)),
"LTS": pnp.Vendor("LTS Scale LLC", "LTS", datetime.date(2007, 11, 15)),
"LBO": pnp.Vendor("Lubosoft", "LBO", datetime.date(2001, 4, 24)),
"LUC": pnp.Vendor("Lucent Technologies", "LUC", datetime.date(2000, 4, 19)),
"LMG": pnp.Vendor("Lucent Technologies", "LMG", datetime.date(1997, 1, 13)),
"LTK": pnp.Vendor("Lucidity Technology Company Ltd", "LTK", datetime.date(1998, 5, 18)),
"LUM": pnp.Vendor("Lumagen, Inc.", "LUM", datetime.date(2004, 8, 12)),
"LHE": pnp.Vendor("Lung Hwa Electronics Company Ltd", "LHE", datetime.date(1998, 6, 12)),
"LXN": pnp.Vendor("Luxeon", "LXN", datetime.date(2001, 3, 15)),
"LUX": pnp.Vendor("Luxxell Research Inc", "LUX", datetime.date(1997, 6, 9)),
"LVI": pnp.Vendor("LVI Low Vision International AB", "LVI", datetime.date(2011, 1, 21)),
"LXC": pnp.Vendor("LXCO Technologies AG", "LXC", datetime.date(2012, 1, 11)),
"MAC": pnp.Vendor("MAC System Company Ltd", "MAC", datetime.date(1997, 9, 26)),
"MEJ": pnp.Vendor("Mac-Eight Co., LTD.", "MEJ", datetime.date(2011, 1, 19)),
"OCD": pnp.Vendor("Macraigor Systems Inc", "OCD", datetime.date(1998, 3, 23)),
"VHI": pnp.Vendor("Macrocad Development Inc.", "VHI", datetime.date(2000, 4, 19)),
"MXI": pnp.Vendor("Macronix Inc", "MXI", datetime.date(1996, 11, 29)),
"MDG": pnp.Vendor("Madge Networks", "MDG", datetime.date(1996, 11, 29)),
"MAE": pnp.Vendor("Maestro Pty Ltd", "MAE", datetime.date(1996, 12, 4)),
"MAG": pnp.Vendor("MAG InnoVision", "MAG", datetime.date(1996, 11, 29)),
"MLP": pnp.Vendor("Magic Leap", "MLP", datetime.date(2014, 11, 14)),
"MCP": pnp.Vendor("Magni Systems Inc", "MCP", datetime.date(1996, 11, 29)),
"EKA": pnp.Vendor("MagTek Inc.", "EKA", datetime.date(2006, 2, 14)),
"MDT": pnp.Vendor("Magus Data Tech", "MDT", datetime.date(1996, 11, 29)),
"MPN": pnp.Vendor("Mainpine Limited", "MPN", datetime.date(2007, 6, 30)),
"MUK": pnp.Vendor("Mainpine Limited", "MUK", datetime.date(1999, 9, 13)),
"PAK": pnp.Vendor("Many CNC System Co., Ltd.", "PAK", datetime.date(2004, 3, 12)),
"MPL": pnp.Vendor("Maple Research Inst. Company Ltd", "MPL", datetime.date(1996, 11, 29)),
"MJI": pnp.Vendor("MARANTZ JAPAN, INC.", "MJI", datetime.date(2000, 10, 23)),
"MIL": pnp.Vendor("Marconi Instruments Ltd", "MIL", datetime.date(1996, 11, 29)),
"MRC": pnp.Vendor("Marconi Simulation & Ty-Coch Way Training", "MRC", datetime.date(1996, 11, 29)),
"MCR": pnp.Vendor("Marina Communicaitons", "MCR", datetime.date(1996, 11, 29)),
"MLN": pnp.Vendor("Mark Levinson", "MLN", datetime.date(2005, 2, 28)),
"MTU": pnp.Vendor("Mark of the Unicorn Inc", "MTU", datetime.date(1997, 3, 21)),
"MNI": pnp.Vendor("Marseille, Inc.", "MNI", datetime.date(2013, 2, 27)),
"MBM": pnp.Vendor("Marshall Electronics", "MBM", datetime.date(2006, 3, 13)),
"MTC": pnp.Vendor("Mars-Tech Corporation", "MTC", datetime.date(1996, 11, 29)),
"MRK": pnp.Vendor("Maruko & Company Ltd", "MRK", datetime.date(1996, 11, 29)),
"MSR": pnp.Vendor("MASPRO DENKOH Corp.", "MSR", datetime.date(2012, 10, 25)),
"MAS": pnp.Vendor("Mass Inc.", "MAS", datetime.date(2002, 2, 25)),
"MEQ": pnp.Vendor("Matelect Ltd.", "MEQ", datetime.date(2002, 5, 30)),
"MTX": pnp.Vendor("Matrox", "MTX", datetime.date(1996, 11, 29)),
"MCQ": pnp.Vendor("Mat's Computers", "MCQ", datetime.date(2004, 7, 22)),
"WPA": pnp.Vendor("Matsushita Communication Industrial Co., Ltd.", "WPA", datetime.date(2001, 3, 15)),
"MAT": pnp.Vendor("Matsushita Electric Ind. Company Ltd", "MAT", datetime.date(1996, 11, 29)),
"MTI": pnp.Vendor("MaxCom Technical Inc", "MTI", datetime.date(1996, 11, 29)),
"VOB": pnp.Vendor("MaxData Computer AG", "VOB", datetime.date(2000, 2, 21)),
"MXD": pnp.Vendor("MaxData Computer GmbH & Co.KG", "MXD", datetime.date(2000, 4, 19)),
"MXP": pnp.Vendor("Maxpeed Corporation", "MXP", datetime.date(1997, 2, 19)),
"MXT": pnp.Vendor("Maxtech Corporation", "MXT", datetime.date(1996, 11, 29)),
"MXV": pnp.Vendor("MaxVision Corporation", "MXV", datetime.date(1999, 7, 16)),
"DJP": pnp.Vendor("Maygay Machines, Ltd", "DJP", datetime.date(2000, 8, 10)),
"MAY": pnp.Vendor("Maynard Electronics", "MAY", datetime.date(1996, 11, 29)),
"MAZ": pnp.Vendor("MAZeT GmbH", "MAZ", datetime.date(1998, 8, 11)),
"MBC": pnp.Vendor("MBC", "MBC", datetime.date(1996, 11, 29)),
"MCD": pnp.Vendor("McDATA Corporation", "MCD", datetime.date(1996, 11, 29)),
"MLI": pnp.Vendor("McIntosh Laboratory Inc.", "MLI", datetime.date(2008, 1, 18)),
"MIT": pnp.Vendor("MCM Industrial Technology GmbH", "MIT", datetime.date(2004, 10, 29)),
"CEM": pnp.Vendor("MEC Electronics GmbH", "CEM", datetime.date(2000, 4, 19)),
"MDR": pnp.Vendor("Medar Inc", "MDR", datetime.date(1996, 12, 11)),
"MTB": pnp.Vendor("Media Technologies Ltd.", "MTB", datetime.date(2009, 1, 5)),
"MKC": pnp.Vendor("Media Tek Inc.", "MKC", datetime.date(2007, 6, 14)),
"MVI": pnp.Vendor("Media Vision Inc", "MVI", datetime.date(1996, 11, 29)),
"MDA": pnp.Vendor("Media4 Inc", "MDA", datetime.date(1997, 3, 20)),
"OWL": pnp.Vendor("Mediacom Technologies Pte Ltd", "OWL", datetime.date(1996, 11, 29)),
"MEK": pnp.Vendor("Mediaedge Corporation", "MEK", datetime.date(2013, 11, 19)),
"MFR": pnp.Vendor("MediaFire Corp.", "MFR", datetime.date(1998, 12, 28)),
"FTR": pnp.Vendor("Mediasonic", "FTR", datetime.date(1996, 11, 29)),
"MTE": pnp.Vendor("MediaTec GmbH", "MTE", datetime.date(1996, 12, 13)),
"MDK": pnp.Vendor("Mediatek Corporation", "MDK", datetime.date(1997, 3, 13)),
"MPI": pnp.Vendor("Mediatrix Peripherals Inc", "MPI", datetime.date(1997, 4, 24)),
"MRO": pnp.Vendor("Medikro Oy", "MRO", datetime.date(1997, 9, 19)),
"MEC": pnp.Vendor("Mega System Technologies Inc", "MEC", datetime.date(1997, 12, 29)),
"MGA": pnp.Vendor("Mega System Technologies, Inc.", "MGA", datetime.date(1998, 12, 28)),
"MSK": pnp.Vendor("Megasoft Inc", "MSK", datetime.date(1996, 11, 29)),
"MGT": pnp.Vendor("Megatech R & D Company", "MGT", datetime.date(1996, 11, 29)),
"MEP": pnp.Vendor("Meld Technology", "MEP", datetime.date(2012, 8, 16)),
"MEN": pnp.Vendor("MEN Mikroelectronik Nueruberg GmbH", "MEN", datetime.date(1997, 5, 23)),
"MGC": pnp.Vendor("Mentor Graphics Corporation", "MGC", datetime.date(2009, 7, 30)),
"RLD": pnp.Vendor("MEPCO", "RLD", datetime.date(2001, 3, 15)),
"PPD": pnp.Vendor("MEPhI", "PPD", datetime.date(1998, 11, 27)),
"MRT": pnp.Vendor("Merging Technologies", "MRT", datetime.date(1996, 11, 29)),
"MAL": pnp.Vendor("Meridian Audio Ltd", "MAL", datetime.date(2009, 2, 4)),
"MED": pnp.Vendor("Messeltronik Dresden GmbH", "MED", datetime.date(1996, 11, 29)),
"MDV": pnp.Vendor("MET Development Inc", "MDV", datetime.date(1996, 11, 29)),
"MTA": pnp.Vendor("Meta Watch Ltd", "MTA", datetime.date(2013, 8, 29)),
"MET": pnp.Vendor("Metheus Corporation", "MET", datetime.date(1996, 11, 29)),
"MCM": pnp.Vendor("Metricom Inc", "MCM", datetime.date(1996, 11, 29)),
"QCH": pnp.Vendor("Metronics Inc", "QCH", datetime.date(1996, 11, 29)),
"NET": pnp.Vendor("Mettler Toledo", "NET", datetime.date(1996, 11, 29)),
"MCE": pnp.Vendor("Metz-Werke GmbH & Co KG", "MCE", datetime.date(2005, 6, 30)),
"MGL": pnp.Vendor("M-G Technology Ltd", "MGL", datetime.date(1997, 10, 29)),
"MIC": pnp.Vendor("Micom Communications Inc", "MIC", datetime.date(1997, 5, 5)),
"MSX": pnp.Vendor("Micomsoft Co., Ltd.", "MSX", datetime.date(2008, 4, 10)),
"MCS": pnp.Vendor("Micro Computer Systems", "MCS", datetime.date(1996, 11, 29)),
"MDI": pnp.Vendor("Micro Design Inc", "MDI", datetime.date(1998, 1, 20)),
"MDS": pnp.Vendor("Micro Display Systems Inc", "MDS", datetime.date(1996, 11, 29)),
"MFI": pnp.Vendor("Micro Firmware", "MFI", datetime.date(1997, 12, 30)),
"MCC": pnp.Vendor("Micro Industries", "MCC", datetime.date(2003, 4, 21)),
"BPD": pnp.Vendor("Micro Solutions, Inc.", "BPD", datetime.date(2000, 4, 19)),
"MSA": pnp.Vendor("Micro Systemation AB", "MSA", datetime.date(1999, 11, 8)),
"JMT": pnp.Vendor("Micro Technical Company Ltd", "JMT", datetime.date(1996, 11, 29)),
"MBD": pnp.Vendor("Microbus PLC", "MBD", datetime.date(2002, 8, 13)),
"MNP": pnp.Vendor("Microcom", "MNP", datetime.date(1996, 11, 29)),
"MDX": pnp.Vendor("MicroDatec GmbH", "MDX", datetime.date(1999, 9, 13)),
"MRD": pnp.Vendor("MicroDisplay Corporation", "MRD", datetime.date(2007, 6, 14)),
"MDY": pnp.Vendor("Microdyne Inc", "MDY", datetime.date(1996, 12, 18)),
"MFG": pnp.Vendor("MicroField Graphics Inc", "MFG", datetime.date(1996, 11, 29)),
"MPJ": pnp.Vendor("Microlab", "MPJ", datetime.date(1997, 5, 23)),
"LAF": pnp.Vendor("Microline", "LAF", datetime.date(1999, 9, 13)),
"MLG": pnp.Vendor("Micrologica AG", "MLG", datetime.date(1998, 10, 6)),
"MMD": pnp.Vendor("Micromed Biotecnologia Ltd", "MMD", datetime.date(1996, 12, 11)),
"MMA": pnp.Vendor("Micromedia AG", "MMA", datetime.date(1997, 4, 24)),
"MCN": pnp.Vendor("Micron Electronics Inc", "MCN", datetime.date(1997, 2, 20)),
"MCI": pnp.Vendor("Micronics Computers", "MCI", datetime.date(1996, 11, 29)),
"MIP": pnp.Vendor("micronpc.com", "MIP", datetime.date(2000, 8, 10)),
"MYX": pnp.Vendor("Micronyx Inc", "MYX", datetime.date(1996, 11, 29)),
"MPX": pnp.Vendor("Micropix Technologies, Ltd.", "MPX", datetime.date(2001, 10, 8)),
"MSL": pnp.Vendor("MicroSlate Inc.", "MSL", datetime.date(1999, 5, 16)),
"PNP": pnp.Vendor("Microsoft", "PNP", datetime.date(2004, 3, 5)),
"MSH": pnp.Vendor("Microsoft", "MSH", datetime.date(1996, 11, 29)),
"PNG": pnp.Vendor("Microsoft", "PNG", datetime.date(1996, 11, 29)),
"WBN": pnp.Vendor("MicroSoftWare", "WBN", datetime.date(1998, 1, 14)),
"MSI": pnp.Vendor("Microstep", "MSI", datetime.date(1996, 11, 29)),
"MCT": pnp.Vendor("Microtec", "MCT", datetime.date(1996, 11, 29)),
"MTH": pnp.Vendor("Micro-Tech Hearing Instruments", "MTH", datetime.date(1997, 12, 15)),
"MKT": pnp.Vendor("MICROTEK Inc.", "MKT", datetime.date(2005, 7, 14)),
"MTK": pnp.Vendor("Microtek International Inc.", "MTK", datetime.date(2002, 2, 25)),
"MSY": pnp.Vendor("MicroTouch Systems Inc", "MSY", datetime.date(2000, 8, 10)),
"MVS": pnp.Vendor("Microvision", "MVS", datetime.date(2009, 2, 13)),
"MVD": pnp.Vendor("Microvitec PLC", "MVD", datetime.date(1996, 11, 29)),
"MWY": pnp.Vendor("Microway Inc", "MWY", datetime.date(1996, 11, 29)),
"MDC": pnp.Vendor("Midori Electronics", "MDC", datetime.date(1996, 11, 29)),
"SFT": pnp.Vendor("Mikroforum Ring 3", "SFT", datetime.date(2004, 11, 2)),
"MLS": pnp.Vendor("Milestone EPE", "MLS", datetime.date(1998, 8, 11)),
"MLM": pnp.Vendor("Millennium Engineering Inc", "MLM", datetime.date(1996, 11, 29)),
"MLL": pnp.Vendor("Millogic Ltd.", "MLL", datetime.date(2014, 1, 9)),
"MCX": pnp.Vendor("Millson Custom Solutions Inc.", "MCX", datetime.date(2013, 10, 17)),
"VTM": pnp.Vendor("Miltope Corporation", "VTM", datetime.date(2009, 9, 23)),
"MIM": pnp.Vendor("Mimio – A Newell Rubbermaid Company", "MIM", datetime.date(2012, 7, 31)),
"MTD": pnp.Vendor("MindTech Display Co. Ltd", "MTD", datetime.date(2007, 6, 14)),
"FTW": pnp.Vendor("MindTribe Product Engineering, Inc.", "FTW", datetime.date(2011, 2, 14)),
"MNC": pnp.Vendor("Mini Micro Methods Ltd", "MNC", datetime.date(1996, 11, 29)),
"MIN": pnp.Vendor("Minicom Digital Signage", "MIN", datetime.date(2010, 8, 13)),
"MMN": pnp.Vendor("MiniMan Inc", "MMN", datetime.date(1996, 11, 29)),
"MMF": pnp.Vendor("Minnesota Mining and Manufacturing", "MMF", datetime.date(2001, 3, 15)),
"MRA": pnp.Vendor("Miranda Technologies Inc", "MRA", datetime.date(1996, 11, 29)),
"MRL": pnp.Vendor("Miratel", "MRL", datetime.date(1998, 10, 16)),
"MIR": pnp.Vendor("Miro Computer Prod.", "MIR", datetime.date(1996, 11, 29)),
"MID": pnp.Vendor("miro Displays", "MID", datetime.date(1999, 3, 20)),
"MSP": pnp.Vendor("Mistral Solutions [P] Ltd.", "MSP", datetime.date(1998, 9, 23)),
"MII": pnp.Vendor("Mitec Inc", "MII", datetime.date(1996, 11, 29)),
"MTL": pnp.Vendor("Mitel Corporation", "MTL", datetime.date(1997, 8, 1)),
"MTR": pnp.Vendor("Mitron computer Inc", "MTR", datetime.date(1996, 11, 29)),
"MEL": pnp.Vendor("Mitsubishi Electric Corporation", "MEL", datetime.date(1996, 11, 29)),
"MEE": pnp.Vendor("Mitsubishi Electric Engineering Co., Ltd.", "MEE", datetime.date(2005, 10, 3)),
"KMC": pnp.Vendor("Mitsumi Company Ltd", "KMC", datetime.date(1996, 11, 29)),
"MJS": pnp.Vendor("MJS Designs", "MJS", datetime.date(1996, 11, 29)),
"MKS": pnp.Vendor("MK Seiko Co., Ltd.", "MKS", datetime.date(2013, 6, 18)),
"OHW": pnp.Vendor("M-Labs Limited", "OHW", datetime.date(2013, 11, 27)),
"MMS": pnp.Vendor("MMS Electronics", "MMS", datetime.date(1998, 2, 24)),
"FST": pnp.Vendor("Modesto PC Inc", "FST", datetime.date(1997, 2, 27)),
"MDD": pnp.Vendor("MODIS", "MDD", datetime.date(1999, 11, 8)),
"MIS": pnp.Vendor("Modular Industrial Solutions Inc", "MIS", datetime.date(1996, 11, 29)),
"MOD": pnp.Vendor("Modular Technology", "MOD", datetime.date(1997, 6, 9)),
"MOM": pnp.Vendor("Momentum Data Systems", "MOM", datetime.date(2008, 1, 18)),
"MNL": pnp.Vendor("Monorail Inc", "MNL", datetime.date(1997, 2, 18)),
"MYA": pnp.Vendor("Monydata", "MYA", datetime.date(1996, 11, 29)),
"MBV": pnp.Vendor("Moreton Bay", "MBV", datetime.date(2000, 1, 13)),
"MOS": pnp.Vendor("Moses Corporation", "MOS", datetime.date(1996, 11, 29)),
"MSV": pnp.Vendor("Mosgi Corporation", "MSV", datetime.date(1996, 11, 29)),
"MCO": pnp.Vendor("Motion Computing Inc.", "MCO", datetime.date(2002, 5, 30)),
"MTM": pnp.Vendor("Motium", "MTM", datetime.date(2012, 6, 19)),
"MSU": pnp.Vendor("motorola", "MSU", datetime.date(2001, 3, 15)),
"MCL": pnp.Vendor("Motorola Communications Israel", "MCL", datetime.date(2002, 7, 2)),
"MCG": pnp.Vendor("Motorola Computer Group", "MCG", datetime.date(1997, 8, 14)),
"MOT": pnp.Vendor("Motorola UDS", "MOT", datetime.date(1996, 11, 29)),
"MSC": pnp.Vendor("Mouse Systems Corporation", "MSC", datetime.date(1996, 11, 29)),
"MPC": pnp.Vendor("M-Pact Inc", "MPC", datetime.date(1996, 11, 29)),
"MPS": pnp.Vendor("mps Software GmbH", "MPS", datetime.date(1996, 11, 29)),
"MST": pnp.Vendor("MS Telematica", "MST", datetime.date(1997, 4, 28)),
"MEX": pnp.Vendor("MSC Vertriebs GmbH", "MEX", datetime.date(2012, 6, 4)),
"MSG": pnp.Vendor("MSI GmbH", "MSG", datetime.date(1999, 9, 13)),
"MSF": pnp.Vendor("M-Systems Flash Disk Pioneers", "MSF", datetime.date(1997, 12, 17)),
"MTN": pnp.Vendor("Mtron Storage Technology Co., Ltd.", "MTN", datetime.date(2008, 6, 17)),
"MUD": pnp.Vendor("Multi-Dimension Institute", "MUD", datetime.date(2000, 10, 23)),
"MMI": pnp.Vendor("Multimax", "MMI", datetime.date(1996, 11, 29)),
"MTS": pnp.Vendor("Multi-Tech Systems", "MTS", datetime.date(1996, 11, 29)),
"MWI": pnp.Vendor("Multiwave Innovation Pte Ltd", "MWI", datetime.date(1996, 11, 29)),
"MAI": pnp.Vendor("Mutoh America Inc", "MAI", datetime.date(1999, 9, 13)),
"MWR": pnp.Vendor("mware", "MWR", datetime.date(2001, 4, 24)),
"MLX": pnp.Vendor("Mylex Corporation", "MLX", datetime.date(1996, 11, 29)),
"MYR": pnp.Vendor("Myriad Solutions Ltd", "MYR", datetime.date(1996, 11, 29)),
"WYS": pnp.Vendor("Myse Technology", "WYS", datetime.date(1996, 11, 29)),
"NBL": pnp.Vendor("N*Able Technologies Inc", "NBL", datetime.date(1998, 4, 28)),
"NAD": pnp.Vendor("NAD Electronics", "NAD", datetime.date(2007, 6, 14)),
"NDK": pnp.Vendor("Naitoh Densei CO., LTD.", "NDK", datetime.date(2006, 4, 12)),
"NCP": pnp.Vendor("Najing CEC Panda FPD Technology CO. ltd", "NCP", datetime.date(2015, 2, 24)),
"NAK": pnp.Vendor("Nakano Engineering Co.,Ltd.", "NAK", datetime.date(2009, 7, 22)),
"NYC": pnp.Vendor("Nakayo Relecommunications, Inc.", "NYC", datetime.date(2000, 8, 10)),
"SCS": pnp.Vendor("Nanomach Anstalt", "SCS", datetime.date(1996, 11, 29)),
"ADR": pnp.Vendor("Nasa Ames Research Center", "ADR", datetime.date(1996, 11, 29)),
"NDC": pnp.Vendor("National DataComm Corporaiton", "NDC", datetime.date(1996, 11, 29)),
"NDI": pnp.Vendor("National Display Systems", "NDI", datetime.date(2003, 8, 8)),
"NIC": pnp.Vendor("National Instruments Corporation", "NIC", datetime.date(1996, 11, 29)),
"NBS": pnp.Vendor("National Key Lab. on ISN", "NBS", datetime.date(1998, 7, 16)),
"NSC": pnp.Vendor("National Semiconductor Corporation", "NSC", datetime.date(1996, 11, 29)),
"TTB": pnp.Vendor("National Semiconductor Japan Ltd", "TTB", datetime.date(1997, 4, 14)),
"NTL": pnp.Vendor("National Transcomm. Ltd", "NTL", datetime.date(1996, 11, 29)),
"ZIC": pnp.Vendor("Nationz Technologies Inc.", "ZIC", datetime.date(2009, 3, 12)),
"NMS": pnp.Vendor("Natural Micro System", "NMS", datetime.date(1996, 11, 29)),
"NAT": pnp.Vendor("NaturalPoint Inc.", "NAT", datetime.date(2010, 9, 3)),
"NVT": pnp.Vendor("Navatek Engineering Corporation", "NVT", datetime.date(1998, 3, 2)),
"NME": pnp.Vendor("Navico, Inc.", "NME", datetime.date(2012, 11, 28)),
"NAV": pnp.Vendor("Navigation Corporation", "NAV", datetime.date(1999, 2, 22)),
"NAX": pnp.Vendor("Naxos Tecnologia", "NAX", datetime.date(1997, 12, 12)),
"DUN": pnp.Vendor("NCR Corporation", "DUN", datetime.date(2002, 4, 25)),
"NCC": pnp.Vendor("NCR Corporation", "NCC", datetime.date(1996, 11, 29)),
"NCR": pnp.Vendor("NCR Electronics", "NCR", datetime.date(1996, 11, 29)),
"NDF": pnp.Vendor("NDF Special Light Products B.V.", "NDF", datetime.date(2014, 9, 18)),
"DMV": pnp.Vendor("NDS Ltd", "DMV", datetime.date(1997, 6, 25)),
"NEC": pnp.Vendor("NEC Corporation", "NEC", datetime.date(2000, 5, 24)),
"NCT": pnp.Vendor("NEC CustomTechnica, Ltd.", "NCT", datetime.date(2002, 10, 23)),
"NMV": pnp.Vendor("NEC-Mitsubishi Electric Visual Systems Corporation", "NMV", datetime.date(2002, 2, 25)),
"NEO": pnp.Vendor("NEO TELECOM CO.,LTD.", "NEO", datetime.date(1999, 11, 8)),
"NMX": pnp.Vendor("Neomagic", "NMX", datetime.date(1996, 11, 29)),
"NTC": pnp.Vendor("NeoTech S.R.L", "NTC", datetime.date(1997, 11, 11)),
"NTX": pnp.Vendor("Netaccess Inc", "NTX", datetime.date(1997, 2, 7)),
"NCL": pnp.Vendor("NetComm Ltd", "NCL", datetime.date(1996, 11, 29)),
"NVC": pnp.Vendor("NetVision Corporation", "NVC", datetime.date(1996, 11, 29)),
"NAL": pnp.Vendor("Network Alchemy", "NAL", datetime.date(1997, 9, 30)),
"NDL": pnp.Vendor("Network Designers", "NDL", datetime.date(1996, 11, 29)),
"NGC": pnp.Vendor("Network General", "NGC", datetime.date(1997, 8, 26)),
"NIT": pnp.Vendor("Network Info Technology", "NIT", datetime.date(1996, 11, 29)),
"NPI": pnp.Vendor("Network Peripherals Inc", "NPI", datetime.date(1996, 11, 29)),
"NST": pnp.Vendor("Network Security Technology Co", "NST", datetime.date(1999, 2, 22)),
"NTW": pnp.Vendor("Networth Inc", "NTW", datetime.date(1996, 11, 29)),
"NSA": pnp.Vendor("NeuroSky, Inc.", "NSA", datetime.date(2013, 8, 28)),
"NEU": pnp.Vendor("NEUROTEC - EMPRESA DE PESQUISA E DESENVOLVIMENTO EM BIOMEDICINA", "NEU", datetime.date(2001, 3, 15)),
"NTI": pnp.Vendor("New Tech Int'l Company", "NTI", datetime.date(1996, 11, 29)),
"NCI": pnp.Vendor("NewCom Inc", "NCI", datetime.date(1997, 1, 9)),
"NWS": pnp.Vendor("Newisys, Inc.", "NWS", datetime.date(2002, 10, 8)),
"NSS": pnp.Vendor("Newport Systems Solutions", "NSS", datetime.date(1996, 11, 29)),
"NXG": pnp.Vendor("Nexgen", "NXG", datetime.date(1996, 11, 29)),
"NEX": pnp.Vendor("Nexgen Mediatech Inc.,", "NEX", datetime.date(2003, 11, 11)),
"NXQ": pnp.Vendor("Nexiq Technologies, Inc.", "NXQ", datetime.date(2001, 10, 8)),
"NLC": pnp.Vendor("Next Level Communications", "NLC", datetime.date(1996, 11, 29)),
"NXC": pnp.Vendor("NextCom K.K.", "NXC", datetime.date(1996, 11, 29)),
"NBT": pnp.Vendor("NingBo Bestwinning Technology CO., Ltd", "NBT", datetime.date(2006, 9, 5)),
"BOI": pnp.Vendor("NINGBO BOIGLE DIGITAL TECHNOLOGY CO.,LTD", "BOI", datetime.date(2009, 11, 25)),
"AVI": pnp.Vendor("Nippon Avionics Co.,Ltd", "AVI", datetime.date(2000, 10, 23)),
"GSB": pnp.Vendor("NIPPONDENCHI CO,.LTD", "GSB", datetime.date(2000, 5, 24)),
"NSI": pnp.Vendor("NISSEI ELECTRIC CO.,LTD", "NSI", datetime.date(2000, 1, 13)),
"NIS": pnp.Vendor("Nissei Electric Company", "NIS", datetime.date(1996, 11, 29)),
"NTS": pnp.Vendor("Nits Technology Inc.", "NTS", datetime.date(2006, 12, 19)),
"NCA": pnp.Vendor("Nixdorf Company", "NCA", datetime.date(1996, 11, 29)),
"NNC": pnp.Vendor("NNC", "NNC", datetime.date(1996, 11, 29)),
"NDS": pnp.Vendor("Nokia Data", "NDS", datetime.date(1996, 11, 29)),
"NOK": pnp.Vendor("Nokia Display Products", "NOK", datetime.date(1996, 11, 29)),
"NMP": pnp.Vendor("Nokia Mobile Phones", "NMP", datetime.date(1996, 11, 29)),
"NOR": pnp.Vendor("Norand Corporation", "NOR", datetime.date(1997, 3, 19)),
"NCE": pnp.Vendor("Norcent Technology, Inc.", "NCE", datetime.date(2007, 6, 20)),
"NOE": pnp.Vendor("NordicEye AB", "NOE", datetime.date(2009, 9, 23)),
"NOI": pnp.Vendor("North Invent A/S", "NOI", datetime.date(2010, 5, 4)),
"NCS": pnp.Vendor("Northgate Computer Systems", "NCS", datetime.date(1996, 11, 29)),
"NOT": pnp.Vendor("Not Limited Inc", "NOT", datetime.date(1998, 1, 30)),
"NWP": pnp.Vendor("NovaWeb Technologies Inc", "NWP", datetime.date(1998, 6, 12)),
"NVL": pnp.Vendor("Novell Inc", "NVL", datetime.date(1996, 11, 29)),
"NSP": pnp.Vendor("Nspire System Inc.", "NSP", datetime.date(2007, 2, 13)),
"NTR": pnp.Vendor("N-trig Innovative Technologies, Inc.", "NTR", datetime.date(2005, 10, 3)),
"NTT": pnp.Vendor("NTT Advanced Technology Corporation", "NTT", datetime.date(2004, 8, 19)),
"NUI": pnp.Vendor("NU Inc.", "NUI", datetime.date(2007, 8, 29)),
"NUG": pnp.Vendor("NU Technology, Inc.", "NUG", datetime.date(2004, 4, 16)),
"NFS": pnp.Vendor("Number Five Software", "NFS", datetime.date(1999, 2, 22)),
"KNX": pnp.Vendor("Nutech Marketing PTL", "KNX", datetime.date(1996, 11, 29)),
"NVI": pnp.Vendor("NuVision US, Inc.", "NVI", datetime.date(2006, 9, 5)),
"NTN": pnp.Vendor("Nuvoton Technology Corporation", "NTN", datetime.date(2008, 10, 9)),
"NVD": pnp.Vendor("Nvidia", "NVD", datetime.date(1996, 11, 29)),
"JEN": pnp.Vendor("N-Vision", "JEN", datetime.date(2000, 10, 23)),
"NXP": pnp.Vendor("NXP Semiconductors bv.", "NXP", datetime.date(2007, 6, 14)),
"NWC": pnp.Vendor("NW Computer Engineering", "NWC", datetime.date(1997, 2, 3)),
"OAK": pnp.Vendor("Oak Tech Inc", "OAK", datetime.date(1996, 11, 29)),
"OAS": pnp.Vendor("Oasys Technology Company", "OAS", datetime.date(1996, 11, 29)),
"OMC": pnp.Vendor("OBJIX Multimedia Corporation", "OMC", datetime.date(1996, 11, 29)),
"PCB": pnp.Vendor("OCTAL S.A.", "PCB", datetime.date(1998, 2, 24)),
"OVR": pnp.Vendor("Oculus VR, Inc.", "OVR", datetime.date(2012, 10, 19)),
"ODR": pnp.Vendor("Odrac", "ODR", datetime.date(2001, 6, 21)),
"ATV": pnp.Vendor("Office Depot, Inc.", "ATV", datetime.date(2007, 6, 13)),
"OKI": pnp.Vendor("OKI Electric Industrial Company Ltd", "OKI", datetime.date(1996, 11, 29)),
"OQI": pnp.Vendor("Oksori Company Ltd", "OQI", datetime.date(1996, 11, 29)),
"OSR": pnp.Vendor("Oksori Company Ltd", "OSR", datetime.date(1996, 11, 29)),
"OCN": pnp.Vendor("Olfan", "OCN", datetime.date(1996, 11, 29)),
"OLC": pnp.Vendor("Olicom A/S", "OLC", datetime.date(1996, 11, 29)),
"OLD": pnp.Vendor("Olidata S.p.A.", "OLD", datetime.date(2006, 3, 13)),
"OLT": pnp.Vendor("Olitec S.A.", "OLT", datetime.date(1996, 11, 29)),
"OLV": pnp.Vendor("Olitec S.A.", "OLV", datetime.date(1996, 11, 29)),
"OLI": pnp.Vendor("Olivetti", "OLI", datetime.date(1996, 11, 29)),
"OLY": pnp.Vendor("OLYMPUS CORPORATION", "OLY", datetime.date(2005, 5, 2)),
"OTK": pnp.Vendor("OmniTek", "OTK", datetime.date(2013, 9, 19)),
"OMN": pnp.Vendor("Omnitel", "OMN", datetime.date(1998, 4, 28)),
"OMR": pnp.Vendor("Omron Corporation", "OMR", datetime.date(1996, 11, 29)),
"ONS": pnp.Vendor("On Systems Inc", "ONS", datetime.date(1996, 11, 29)),
"ONE": pnp.Vendor("Oneac Corporation", "ONE", datetime.date(1998, 4, 14)),
"ONK": pnp.Vendor("ONKYO Corporation", "ONK", datetime.date(2005, 6, 16)),
"ONL": pnp.Vendor("OnLive, Inc", "ONL", datetime.date(2010, 9, 3)),
"TIV": pnp.Vendor("OOO Technoinvest", "TIV", datetime.date(1997, 8, 5)),
"OPC": pnp.Vendor("Opcode Inc", "OPC", datetime.date(1996, 11, 29)),
"OCS": pnp.Vendor("Open Connect Solutions", "OCS", datetime.date(1999, 9, 13)),
"ONW": pnp.Vendor("OPEN Networks Ltd", "ONW", datetime.date(2003, 4, 25)),
"OSI": pnp.Vendor("Open Stack, Inc.", "OSI", datetime.date(2013, 7, 22)),
"OPP": pnp.Vendor("OPPO Digital, Inc.", "OPP", datetime.date(2012, 6, 19)),
"OPT": pnp.Vendor("OPTi Inc", "OPT", datetime.date(1996, 11, 29)),
"OBS": pnp.Vendor("Optibase Technologies", "OBS", datetime.date(2010, 11, 1)),
"OSD": pnp.Vendor("Optical Systems Design Pty Ltd", "OSD", datetime.date(2013, 6, 3)),
"OIC": pnp.Vendor("Option Industrial Computers", "OIC", datetime.date(2001, 5, 7)),
"OIN": pnp.Vendor("Option International", "OIN", datetime.date(2000, 10, 23)),
"OIM": pnp.Vendor("Option International", "OIM", datetime.date(1997, 1, 30)),
"OSP": pnp.Vendor("OPTI-UPS Corporation", "OSP", datetime.date(1997, 7, 1)),
"OPV": pnp.Vendor("Optivision Inc", "OPV", datetime.date(1996, 11, 29)),
"OTT": pnp.Vendor("OPTO22, Inc.", "OTT", datetime.date(1998, 10, 6)),
"OTM": pnp.Vendor("Optoma Corporation", "OTM", datetime.date(2010, 4, 20)),
"OEI": pnp.Vendor("Optum Engineering Inc.", "OEI", datetime.date(2010, 8, 2)),
"OTI": pnp.Vendor("Orchid Technology", "OTI", datetime.date(1996, 11, 29)),
"ORG": pnp.Vendor("ORGA Kartensysteme GmbH", "ORG", datetime.date(1998, 10, 24)),
"TOP": pnp.Vendor("Orion Communications Co., Ltd.", "TOP", datetime.date(2007, 4, 30)),
"ORN": pnp.Vendor("ORION ELECTRIC CO., LTD.", "ORN", datetime.date(2005, 1, 19)),
"OEC": pnp.Vendor("ORION ELECTRIC CO.,LTD", "OEC", datetime.date(2000, 1, 13)),
"OSA": pnp.Vendor("OSAKA Micro Computer, Inc.", "OSA", datetime.date(2003, 9, 5)),
"ORI": pnp.Vendor("OSR Open Systems Resources, Inc.", "ORI", datetime.date(1999, 1, 20)),
"OOS": pnp.Vendor("OSRAM", "OOS", datetime.date(2002, 4, 25)),
"OUK": pnp.Vendor("OUK Company Ltd", "OUK", datetime.date(1996, 11, 29)),
"OTB": pnp.Vendor("outsidetheboxstuff.com", "OTB", datetime.date(2010, 9, 3)),
"OXU": pnp.Vendor("Oxus Research S.A.", "OXU", datetime.date(1996, 11, 29)),
"OZC": pnp.Vendor("OZ Corporation", "OZC", datetime.date(2012, 8, 7)),
"PAC": pnp.Vendor("Pacific Avionics Corporation", "PAC", datetime.date(1996, 11, 29)),
"PCW": pnp.Vendor("Pacific CommWare Inc", "PCW", datetime.date(1996, 11, 29)),
"PIE": pnp.Vendor("Pacific Image Electronics Company Ltd", "PIE", datetime.date(1997, 10, 21)),
"PBL": pnp.Vendor("Packard Bell Electronics", "PBL", datetime.date(1996, 11, 29)),
"PBN": pnp.Vendor("Packard Bell NEC", "PBN", datetime.date(1996, 11, 29)),
"PGI": pnp.Vendor("PACSGEAR, Inc.", "PGI", datetime.date(2012, 8, 13)),
"QFF": pnp.Vendor("Padix Co., Inc.", "QFF", datetime.date(1999, 9, 13)),
"PJT": pnp.Vendor("Pan Jit International Inc.", "PJT", datetime.date(2004, 8, 3)),
"MDO": pnp.Vendor("Panasonic", "MDO", datetime.date(1996, 11, 29)),
"PLF": pnp.Vendor("Panasonic Avionics Corporation", "PLF", datetime.date(2010, 8, 13)),
"MEI": pnp.Vendor("Panasonic Industry Company", "MEI", datetime.date(1996, 11, 29)),
"PNL": pnp.Vendor("Panelview, Inc.", "PNL", datetime.date(2003, 8, 4)),
"PTL": pnp.Vendor("Pantel Inc", "PTL", datetime.date(1996, 11, 29)),
"PTA": pnp.Vendor("PAR Tech Inc.", "PTA", datetime.date(2011, 1, 26)),
"PRT": pnp.Vendor("Parade Technologies, Ltd.", "PRT", datetime.date(2012, 4, 6)),
"PGM": pnp.Vendor("Paradigm Advanced Research Centre", "PGM", datetime.date(2005, 6, 16)),
"PAR": pnp.Vendor("Parallan Comp Inc", "PAR", datetime.date(1996, 11, 29)),
"PLX": pnp.Vendor("Parallax Graphics", "PLX", datetime.date(1996, 11, 29)),
"RCE": pnp.Vendor("Parc d'Activite des Bellevues", "RCE", datetime.date(1996, 11, 29)),
"POT": pnp.Vendor("Parrot", "POT", datetime.date(2014, 11, 25)),
"PTH": pnp.Vendor("Pathlight Technology Inc", "PTH", datetime.date(1996, 11, 29)),
"PCX": pnp.Vendor("PC Xperten", "PCX", datetime.date(1998, 2, 24)),
"PCK": pnp.Vendor("PCBANK21", "PCK", datetime.date(2007, 2, 13)),
"PCM": pnp.Vendor("PCM Systems Corporation", "PCM", datetime.date(1997, 3, 25)),
"PCT": pnp.Vendor("PC-Tel Inc", "PCT", datetime.date(1997, 5, 2)),
"PDS": pnp.Vendor("PD Systems International Ltd", "PDS", datetime.date(1999, 3, 20)),
"PDT": pnp.Vendor("PDTS - Prozessdatentechnik und Systeme", "PDT", datetime.date(1998, 2, 10)),
"PEG": pnp.Vendor("Pegatron Corporation", "PEG", datetime.date(2013, 8, 27)),
"PEI": pnp.Vendor("PEI Electronics Inc", "PEI", datetime.date(1998, 4, 6)),
"PVM": pnp.Vendor("Penta Studiotechnik GmbH", "PVM", datetime.date(2010, 5, 5)),
"PCL": pnp.Vendor("pentel.co.,ltd", "PCL", datetime.date(2002, 2, 25)),
"PEP": pnp.Vendor("Peppercon AG", "PEP", datetime.date(2006, 4, 12)),
"PPX": pnp.Vendor("Perceptive Pixel Inc.", "PPX", datetime.date(2010, 5, 4)),
"PER": pnp.Vendor("Perceptive Signal Technologies", "PER", datetime.date(1997, 5, 13)),
"PRC": pnp.Vendor("PerComm", "PRC", datetime.date(2001, 4, 24)),
"PCO": pnp.Vendor("Performance Concepts Inc.,", "PCO", datetime.date(2002, 9, 24)),
"IPN": pnp.Vendor("Performance Technologies", "IPN", datetime.date(2004, 2, 24)),
"PSL": pnp.Vendor("Perle Systems Limited", "PSL", datetime.date(1999, 2, 22)),
"PON": pnp.Vendor("Perpetual Technologies, LLC", "PON", datetime.date(2000, 1, 13)),
"PAM": pnp.Vendor("Peter Antesberger Messtechnik", "PAM", datetime.date(1998, 4, 28)),
"PSD": pnp.Vendor("Peus-Systems GmbH", "PSD", datetime.date(1996, 11, 29)),
"PCA": pnp.Vendor("Philips BU Add On Card", "PCA", datetime.date(1996, 11, 29)),
"PHS": pnp.Vendor("Philips Communication Systems", "PHS", datetime.date(1996, 11, 29)),
"PHL": pnp.Vendor("Philips Consumer Electronics Company", "PHL", datetime.date(1996, 11, 29)),
"PHE": pnp.Vendor("Philips Medical Systems Boeblingen GmbH", "PHE", datetime.date(2010, 4, 20)),
"PSC": pnp.Vendor("Philips Semiconductors", "PSC", datetime.date(1996, 11, 29)),
"PXC": pnp.Vendor("Phoenix Contact", "PXC", datetime.date(2008, 2, 27)),
"PNX": pnp.Vendor("Phoenix Technologies, Ltd.", "PNX", datetime.date(1999, 11, 8)),
"PPC": pnp.Vendor("Phoenixtec Power Company Ltd", "PPC", datetime.date(1999, 5, 16)),
"PHO": pnp.Vendor("Photonics Systems Inc.", "PHO", datetime.date(2002, 6, 3)),
"RSC": pnp.Vendor("PhotoTelesis", "RSC", datetime.date(1998, 3, 16)),
"PHY": pnp.Vendor("Phylon Communications", "PHY", datetime.date(1996, 11, 29)),
"PPR": pnp.Vendor("PicPro", "PPR", datetime.date(2004, 10, 18)),
"PHC": pnp.Vendor("Pijnenburg Beheer N.V.", "PHC", datetime.date(2001, 4, 24)),
"PCI": pnp.Vendor("Pioneer Computer Inc", "PCI", datetime.date(1996, 11, 29)),
"PIO": pnp.Vendor("Pioneer Electronic Corporation", "PIO", datetime.date(1997, 7, 16)),
"PBV": pnp.Vendor("Pitney Bowes", "PBV", datetime.date(1999, 9, 13)),
"PBI": pnp.Vendor("Pitney Bowes", "PBI", datetime.date(1996, 11, 29)),
"PQI": pnp.Vendor("Pixel Qi", "PQI", datetime.date(2009, 6, 24)),
"PVN": pnp.Vendor("Pixel Vision", "PVN", datetime.date(1996, 11, 29)),
"PXE": pnp.Vendor("PIXELA CORPORATION", "PXE", datetime.date(2007, 11, 21)),
"PIX": pnp.Vendor("Pixie Tech Inc", "PIX", datetime.date(1996, 11, 29)),
"PTS": pnp.Vendor("Plain Tree Systems Inc", "PTS", datetime.date(1996, 11, 29)),
"PNR": pnp.Vendor("Planar Systems, Inc.", "PNR", datetime.date(2003, 8, 11)),
"PLV": pnp.Vendor("PLUS Vision Corp.", "PLV", datetime.date(2001, 7, 5)),
"PMC": pnp.Vendor("PMC Consumer Electronics Ltd", "PMC", datetime.date(1996, 12, 11)),
"SPR": pnp.Vendor("pmns GmbH", "SPR", datetime.date(2002, 10, 8)),
"PMM": pnp.Vendor("Point Multimedia System", "PMM", datetime.date(1997, 6, 9)),
"PLY": pnp.Vendor("Polycom Inc.", "PLY", datetime.date(2002, 6, 19)),
"POL": pnp.Vendor("PolyComp (PTY) Ltd.", "POL", datetime.date(2006, 2, 14)),
"COW": pnp.Vendor("Polycow Productions", "COW", datetime.date(2001, 3, 15)),
"POR": pnp.Vendor("Portalis LC", "POR", datetime.date(2008, 11, 1)),
"ARO": pnp.Vendor("Poso International B.V.", "ARO", datetime.date(1997, 8, 1)),
"PEC": pnp.Vendor("POTRANS Electrical Corp.", "PEC", datetime.date(1999, 7, 16)),
"PCC": pnp.Vendor("PowerCom Technology Company Ltd", "PCC", datetime.date(1997, 9, 2)),
"CPX": pnp.Vendor("Powermatic Data Systems", "CPX", datetime.date(1996, 11, 29)),
"PET": pnp.Vendor("Practical Electronic Tools", "PET", datetime.date(1999, 2, 22)),
"PPI": pnp.Vendor("Practical Peripherals", "PPI", datetime.date(1996, 11, 29)),
"PSE": pnp.Vendor("Practical Solutions Pte., Ltd.", "PSE", datetime.date(1998, 10, 6)),
"PRD": pnp.Vendor("Praim S.R.L.", "PRD", datetime.date(1996, 11, 29)),
"PEL": pnp.Vendor("Primax Electric Ltd", "PEL", datetime.date(1996, 11, 29)),
"SYX": pnp.Vendor("Prime Systems, Inc.", "SYX", datetime.date(2003, 10, 21)),
"PVI": pnp.Vendor("Prime view international Co., Ltd", "PVI", datetime.date(2009, 7, 6)),
"PGS": pnp.Vendor("Princeton Graphic Systems", "PGS", datetime.date(1996, 11, 29)),
"PIM": pnp.Vendor("Prism, LLC", "PIM", datetime.date(2007, 7, 24)),
"PRI": pnp.Vendor("Priva Hortimation BV", "PRI", datetime.date(1997, 10, 22)),
"PRA": pnp.Vendor("PRO/AUTOMATION", "PRA", datetime.date(1999, 7, 16)),
"PCP": pnp.Vendor("Procomp USA Inc", "PCP", datetime.date(1996, 11, 29)),
"PSY": pnp.Vendor("Prodea Systems Inc.", "PSY", datetime.date(2013, 2, 4)),
"PDV": pnp.Vendor("Prodrive B.V.", "PDV", datetime.date(2005, 1, 18)),
"PJA": pnp.Vendor("Projecta", "PJA", datetime.date(1997, 1, 29)),
"DHT": pnp.Vendor("Projectavision Inc", "DHT", datetime.date(1998, 1, 14)),
"PJD": pnp.Vendor("Projectiondesign AS", "PJD", datetime.date(2002, 9, 23)),
"PLM": pnp.Vendor("PROLINK Microsystems Corp.", "PLM", datetime.date(2002, 2, 25)),
"PLC": pnp.Vendor("Pro-Log Corporation", "PLC", datetime.date(1996, 11, 29)),
"PMT": pnp.Vendor("Promate Electronic Co., Ltd.", "PMT", datetime.date(2003, 1, 13)),
"PRM": pnp.Vendor("Prometheus", "PRM", datetime.date(1996, 11, 29)),
"PTI": pnp.Vendor("Promise Technology Inc", "PTI", datetime.date(1997, 1, 2)),
"PAD": pnp.Vendor("Promotion and Display Technology Ltd.", "PAD", datetime.date(2001, 4, 24)),
"TEL": pnp.Vendor("Promotion and Display Technology Ltd.", "TEL", datetime.date(2001, 4, 24)),
"PGP": pnp.Vendor("propagamma kommunikation", "PGP", datetime.date(2000, 4, 19)),
"PSM": pnp.Vendor("Prosum", "PSM", datetime.date(1996, 11, 29)),
"PRO": pnp.Vendor("Proteon", "PRO", datetime.date(1996, 11, 29)),
"PVG": pnp.Vendor("Proview Global Co., Ltd", "PVG", datetime.date(2002, 10, 8)),
"PXM": pnp.Vendor("Proxim Inc", "PXM", datetime.date(1997, 9, 19)),
"PRX": pnp.Vendor("Proxima Corporation", "PRX", datetime.date(1996, 11, 29)),
"PTC": pnp.Vendor("PS Technology Corporation", "PTC", datetime.date(1997, 1, 29)),
"PDM": pnp.Vendor("Psion Dacom Plc.", "PDM", datetime.date(1999, 11, 8)),
"PSI": pnp.Vendor("PSI-Perceptive Solutions Inc", "PSI", datetime.date(1996, 11, 29)),
"PLT": pnp.Vendor("PT Hartono Istana Teknologi", "PLT", datetime.date(2010, 5, 5)),
"PUL": pnp.Vendor("Pulse-Eight Ltd", "PUL", datetime.date(2012, 9, 12)),
"PDR": pnp.Vendor("Pure Data Inc", "PDR", datetime.date(1996, 11, 29)),
"PPP": pnp.Vendor("Purup Prepress AS", "PPP", datetime.date(1996, 11, 29)),
"HRE": pnp.Vendor("Qingdao Haier Electronics Co., Ltd.", "HRE", datetime.date(2006, 4, 12)),
"QLC": pnp.Vendor("Q-Logic", "QLC", datetime.date(1996, 11, 29)),
"QTR": pnp.Vendor("Qtronix Corporation", "QTR", datetime.date(1996, 11, 29)),
"DHQ": pnp.Vendor("Quadram", "DHQ", datetime.date(1996, 11, 29)),
"QDM": pnp.Vendor("Quadram", "QDM", datetime.date(1996, 11, 29)),
"QCL": pnp.Vendor("Quadrant Components Inc", "QCL", datetime.date(1997, 4, 3)),
"QCC": pnp.Vendor("QuakeCom Company Ltd", "QCC", datetime.date(1998, 3, 23)),
"QCP": pnp.Vendor("Qualcomm Inc", "QCP", datetime.date(1999, 5, 16)),
"QCI": pnp.Vendor("Quanta Computer Inc", "QCI", datetime.date(1996, 11, 29)),
"QDS": pnp.Vendor("Quanta Display Inc.", "QDS", datetime.date(2002, 4, 25)),
"QTM": pnp.Vendor("Quantum", "QTM", datetime.date(1996, 11, 29)),
"QTD": pnp.Vendor("Quantum 3D Inc", "QTD", datetime.date(1997, 5, 23)),
"QDI": pnp.Vendor("Quantum Data Incorporated", "QDI", datetime.date(2001, 3, 15)),
"QVU": pnp.Vendor("Quartics", "QVU", datetime.date(2010, 11, 4)),
"QUA": pnp.Vendor("Quatographic AG", "QUA", datetime.date(2000, 1, 13)),
"QTH": pnp.Vendor("Questech Ltd", "QTH", datetime.date(2000, 1, 13)),
"QUE": pnp.Vendor("Questra Consulting", "QUE", datetime.date(1998, 1, 30)),
"QCK": pnp.Vendor("Quick Corporation", "QCK", datetime.date(1996, 11, 29)),
"QFI": pnp.Vendor("Quickflex, Inc", "QFI", datetime.date(1998, 8, 4)),
"QTI": pnp.Vendor("Quicknet Technologies Inc", "QTI", datetime.date(1996, 11, 29)),
"RSQ": pnp.Vendor("R Squared", "RSQ", datetime.date(1999, 11, 8)),
"RPT": pnp.Vendor("R.P.T.Intergroups", "RPT", datetime.date(1996, 11, 29)),
"RII": pnp.Vendor("Racal Interlan Inc", "RII", datetime.date(1996, 11, 29)),
"TSF": pnp.Vendor("Racal-Airtech Software Forge Ltd", "TSF", datetime.date(1996, 11, 29)),
"RAC": pnp.Vendor("Racore Computer Products Inc", "RAC", datetime.date(1996, 11, 29)),
"RRI": pnp.Vendor("Radicom Research Inc", "RRI", datetime.date(1997, 12, 2)),
"RCN": pnp.Vendor("Radio Consult SRL", "RCN", datetime.date(2002, 9, 24)),
"RDN": pnp.Vendor("RADIODATA GmbH", "RDN", datetime.date(2012, 7, 25)),
"RLN": pnp.Vendor("RadioLAN Inc", "RLN", datetime.date(1996, 11, 29)),
"RSN": pnp.Vendor("Radiospire Networks, Inc.", "RSN", datetime.date(2007, 6, 14)),
"RAD": pnp.Vendor("Radisys Corporation", "RAD", datetime.date(1996, 11, 29)),
"RDS": pnp.Vendor("Radius Inc", "RDS", datetime.date(1997, 3, 7)),
"RFI": pnp.Vendor("RAFI GmbH & Co. KG", "RFI", datetime.date(2015, 8, 24)),
"RDI": pnp.Vendor("Rainbow Displays, Inc.", "RDI", datetime.date(1998, 9, 23)),
"RNB": pnp.Vendor("Rainbow Technologies", "RNB", datetime.date(1996, 11, 29)),
"RTS": pnp.Vendor("Raintree Systems", "RTS", datetime.date(2001, 10, 2)),
"BOB": pnp.Vendor("Rainy Orchard", "BOB", datetime.date(2000, 2, 21)),
"RSI": pnp.Vendor("Rampage Systems Inc", "RSI", datetime.date(1996, 11, 29)),
"RAN": pnp.Vendor("Rancho Tech Inc", "RAN", datetime.date(1996, 11, 29)),
"RTI": pnp.Vendor("Rancho Tech Inc", "RTI", datetime.date(1996, 11, 29)),
"RSX": pnp.Vendor("Rapid Tech Corporation", "RSX", datetime.date(1996, 11, 29)),
"RMC": pnp.Vendor("Raritan Computer, Inc", "RMC", datetime.date(1998, 11, 27)),
"RAR": pnp.Vendor("Raritan, Inc.", "RAR", datetime.date(2007, 6, 14)),
"RAS": pnp.Vendor("RAScom Inc", "RAS", datetime.date(1996, 11, 29)),
"REX": pnp.Vendor("RATOC Systems, Inc.", "REX", datetime.date(2012, 1, 6)),
"RAY": pnp.Vendor("Raylar Design, Inc.", "RAY", datetime.date(2000, 1, 13)),
"RCI": pnp.Vendor("RC International", "RCI", datetime.date(1996, 11, 29)),
"RCH": pnp.Vendor("Reach Technology Inc", "RCH", datetime.date(1998, 2, 9)),
"RKC": pnp.Vendor("Reakin Technolohy Corporation", "RKC", datetime.date(2001, 3, 15)),
"REA": pnp.Vendor("Real D", "REA", datetime.date(2007, 11, 15)),
"RTL": pnp.Vendor("Realtek Semiconductor Company Ltd", "RTL", datetime.date(1996, 11, 29)),
"ALG": pnp.Vendor("Realtek Semiconductor Corp.", "ALG", datetime.date(2002, 10, 25)),
"RVI": pnp.Vendor("Realvision Inc", "RVI", datetime.date(1996, 11, 29)),
"REC": pnp.Vendor("ReCom", "REC", datetime.date(1999, 5, 16)),
"RWC": pnp.Vendor("Red Wing Corporation", "RWC", datetime.date(1998, 1, 8)),
"RFX": pnp.Vendor("Redfox Technologies Inc.", "RFX", datetime.date(2014, 1, 14)),
"REF": pnp.Vendor("Reflectivity, Inc.", "REF", datetime.date(2000, 4, 19)),
"REH": pnp.Vendor("Rehan Electronics Ltd.", "REH", datetime.date(2012, 2, 15)),
"RTC": pnp.Vendor("Relia Technologies", "RTC", datetime.date(1996, 11, 29)),
"REL": pnp.Vendor("Reliance Electric Ind Corporation", "REL", datetime.date(1996, 11, 29)),
"REN": pnp.Vendor("Renesas Technology Corp.", "REN", datetime.date(2007, 6, 14)),
"RAT": pnp.Vendor("Rent-A-Tech", "RAT", datetime.date(1999, 2, 22)),
"RED": pnp.Vendor("Research Electronics Development Inc", "RED", datetime.date(1997, 12, 2)),
"RMP": pnp.Vendor("Research Machines", "RMP", datetime.date(1996, 11, 29)),
"RES": pnp.Vendor("ResMed Pty Ltd", "RES", datetime.date(2000, 2, 21)),
"RET": pnp.Vendor("Resonance Technology, Inc.", "RET", datetime.date(2011, 2, 9)),
"WTS": pnp.Vendor("Restek Electric Company Ltd", "WTS", datetime.date(1996, 11, 29)),
"RVL": pnp.Vendor("Reveal Computer Prod", "RVL", datetime.date(1996, 11, 29)),
"REV": pnp.Vendor("Revolution Display, Inc.", "REV", datetime.date(2014, 3, 19)),
"RGB": pnp.Vendor("RGB Spectrum", "RGB", datetime.date(2012, 11, 14)),
"EXN": pnp.Vendor("RGB Systems, Inc. dba Extron Electronics", "EXN", datetime.date(2008, 7, 6)),
"RIC": pnp.Vendor("RICOH COMPANY, LTD.", "RIC", datetime.date(2010, 5, 13)),
"RHD": pnp.Vendor("RightHand Technologies", "RHD", datetime.date(2012, 5, 1)),
"RIO": pnp.Vendor("Rios Systems Company Ltd", "RIO", datetime.date(1996, 11, 29)),
"RIT": pnp.Vendor("Ritech Inc", "RIT", datetime.date(1998, 4, 14)),
"RIV": pnp.Vendor("Rivulet Communications", "RIV", datetime.date(2007, 7, 19)),
"BSG": pnp.Vendor("Robert Bosch GmbH", "BSG", datetime.date(2014, 5, 15)),
"GRY": pnp.Vendor("Robert Gray Company", "GRY", datetime.date(1998, 3, 31)),
"RGL": pnp.Vendor("Robertson Geologging Ltd", "RGL", datetime.date(2000, 8, 10)),
"ROB": pnp.Vendor("Robust Electronics GmbH", "ROB", datetime.date(2008, 1, 18)),
"RAI": pnp.Vendor("Rockwell Automation/Intecolor", "RAI", datetime.date(1998, 3, 13)),
"RCO": pnp.Vendor("Rockwell Collins", "RCO", datetime.date(2010, 9, 10)),
"ASY": pnp.Vendor("Rockwell Collins / Airshow Systems", "ASY", datetime.date(2004, 12, 2)),
"COL": pnp.Vendor("Rockwell Collins, Inc.", "COL", datetime.date(2007, 6, 14)),
"ROK": pnp.Vendor("Rockwell International", "ROK", datetime.date(1996, 11, 29)),
"RSS": pnp.Vendor("Rockwell Semiconductor Systems", "RSS", datetime.date(1996, 11, 29)),
"MAX": pnp.Vendor("Rogen Tech Distribution Inc", "MAX", datetime.date(1996, 11, 29)),
"ROS": pnp.Vendor("Rohde & Schwarz", "ROS", datetime.date(2012, 1, 20)),
"ROH": pnp.Vendor("Rohm Co., Ltd.", "ROH", datetime.date(2004, 6, 16)),
"RHM": pnp.Vendor("Rohm Company Ltd", "RHM", datetime.date(1997, 5, 13)),
"RJA": pnp.Vendor("Roland Corporation", "RJA", datetime.date(1996, 11, 29)),
"RPI": pnp.Vendor("RoomPro Technologies", "RPI", datetime.date(2010, 7, 9)),
"ROP": pnp.Vendor("Roper International Ltd", "ROP", datetime.date(1999, 5, 16)),
"RMT": pnp.Vendor("Roper Mobile", "RMT", datetime.date(2010, 7, 2)),
"RSV": pnp.Vendor("Ross Video Ltd", "RSV", datetime.date(2012, 6, 11)),
"TRL": pnp.Vendor("Royal Information", "TRL", datetime.date(1996, 11, 29)),
"RZS": pnp.Vendor("Rozsnyó, s.r.o.", "RZS", datetime.date(2014, 3, 24)),
"RVC": pnp.Vendor("RSI Systems Inc", "RVC", datetime.date(1998, 4, 28)),
"RUN": pnp.Vendor("RUNCO International", "RUN", datetime.date(2004, 4, 1)),
"SNK": pnp.Vendor("S&K Electronics", "SNK", datetime.date(2000, 2, 21)),
"TLV": pnp.Vendor("S3 Inc", "TLV", datetime.date(1997, 1, 7)),
"SIM": pnp.Vendor("S3 Inc", "SIM", datetime.date(1996, 11, 29)),
"SSS": pnp.Vendor("S3 Inc", "SSS", datetime.date(1996, 11, 29)),
"SAE": pnp.Vendor("Saab Aerotech", "SAE", datetime.date(2007, 6, 14)),
"SAI": pnp.Vendor("Sage Inc", "SAI", datetime.date(1997, 7, 16)),
"SGM": pnp.Vendor("SAGEM", "SGM", datetime.date(2003, 9, 5)),
"SDK": pnp.Vendor("SAIT-Devlonics", "SDK", datetime.date(1996, 11, 29)),
"SAK": pnp.Vendor("Saitek Ltd", "SAK", datetime.date(1999, 5, 16)),
"SLT": pnp.Vendor("Salt Internatioinal Corp.", "SLT", datetime.date(2006, 9, 5)),
"SAM": pnp.Vendor("Samsung Electric Company", "SAM", datetime.date(1996, 11, 29)),
"SKT": pnp.Vendor("Samsung Electro-Mechanics Company Ltd", "SKT", datetime.date(1996, 11, 29)),
"STN": pnp.Vendor("Samsung Electronics America", "STN", datetime.date(2000, 8, 10)),
"KYK": pnp.Vendor("Samsung Electronics America Inc", "KYK", datetime.date(1998, 2, 24)),
"SSE": pnp.Vendor("Samsung Electronic Co.", "SSE", datetime.date(2000, 8, 10)),
"SEM": pnp.Vendor("Samsung Electronics Company Ltd", "SEM", datetime.date(1996, 11, 29)),
"SDI": pnp.Vendor("Samtron Displays Inc", "SDI", datetime.date(1996, 11, 29)),
"JSK": pnp.Vendor("SANKEN ELECTRIC CO., LTD", "JSK", datetime.date(1999, 9, 13)),
"SSJ": pnp.Vendor("Sankyo Seiki Mfg.co., Ltd", "SSJ", datetime.date(2003, 1, 28)),
"SAA": pnp.Vendor("Sanritz Automation Co.,Ltd.", "SAA", datetime.date(2002, 2, 25)),
"STK": pnp.Vendor("SANTAK CORP.", "STK", datetime.date(1998, 11, 27)),
"SOC": pnp.Vendor("Santec Corporation", "SOC", datetime.date(2015, 1, 12)),
"SAN": pnp.Vendor("Sanyo Electric Co.,Ltd.", "SAN", datetime.date(1999, 11, 8)),
"SCD": pnp.Vendor("Sanyo Electric Company Ltd", "SCD", datetime.date(1996, 11, 29)),
"SIB": pnp.Vendor("Sanyo Electric Company Ltd", "SIB", datetime.date(1996, 11, 29)),
"TSC": pnp.Vendor("Sanyo Electric Company Ltd", "TSC", datetime.date(1996, 11, 29)),
"ICN": pnp.Vendor("Sanyo Icon", "ICN", datetime.date(1996, 11, 29)),
"SPN": pnp.Vendor("Sapience Corporation", "SPN", datetime.date(1996, 11, 29)),
"SDA": pnp.Vendor("SAT (Societe Anonyme)", "SDA", datetime.date(1996, 11, 29)),
"AVV": pnp.Vendor("SBS Technologies (Canada), Inc. (was Avvida Systems, Inc.)", "AVV", datetime.date(2002, 12, 17)),
"SBS": pnp.Vendor("SBS-or Industrial Computers GmbH", "SBS", datetime.date(1998, 12, 28)),
"SGI": pnp.Vendor("Scan Group Ltd", "SGI", datetime.date(1996, 11, 29)),
"SCN": pnp.Vendor("Scanport, Inc.", "SCN", datetime.date(2002, 8, 5)),
"KFC": pnp.Vendor("SCD Tech", "KFC", datetime.date(2002, 10, 23)),
"SPT": pnp.Vendor("Sceptre Tech Inc", "SPT", datetime.date(1996, 11, 29)),
"SMB": pnp.Vendor("Schlumberger", "SMB", datetime.date(1999, 7, 16)),
"SCH": pnp.Vendor("Schlumberger Cards", "SCH", datetime.date(1998, 4, 28)),
"SLR": pnp.Vendor("Schlumberger Technology Corporate", "SLR", datetime.date(2000, 8, 10)),
"SKD": pnp.Vendor("Schneider & Koch", "SKD", datetime.date(1996, 11, 29)),
"MGE": pnp.Vendor("Schneider Electric S.A.", "MGE", datetime.date(1996, 11, 29)),
"SLS": pnp.Vendor("Schnick-Schnack-Systems GmbH", "SLS", datetime.date(2009, 5, 6)),
"REM": pnp.Vendor("SCI Systems Inc.", "REM", datetime.date(2000, 8, 10)),
"SCM": pnp.Vendor("SCM Microsystems Inc", "SCM", datetime.date(1996, 11, 29)),
"SCP": pnp.Vendor("Scriptel Corporation", "SCP", datetime.date(2007, 6, 14)),
"SDR": pnp.Vendor("SDR Systems", "SDR", datetime.date(2001, 3, 15)),
"STY": pnp.Vendor("SDS Technologies", "STY", datetime.date(1999, 11, 8)),
"SDX": pnp.Vendor("SDX Business Systems Ltd", "SDX", datetime.date(1996, 11, 29)),
"NIX": pnp.Vendor("Seanix Technology Inc", "NIX", datetime.date(2007, 4, 9)),
"SEA": pnp.Vendor("Seanix Technology Inc.", "SEA", datetime.date(1998, 2, 24)),
"SAG": pnp.Vendor("Sedlbauer", "SAG", datetime.date(1996, 11, 29)),
"SEE": pnp.Vendor("SeeColor Corporation", "SEE", datetime.date(1996, 11, 29)),
"SCB": pnp.Vendor("SeeCubic B.V.", "SCB", datetime.date(2012, 11, 2)),
"SRT": pnp.Vendor("SeeReal Technologies GmbH", "SRT", datetime.date(2005, 6, 27)),
"SEC": pnp.Vendor("Seiko Epson Corporation", "SEC", datetime.date(1996, 11, 29)),
"SID": pnp.Vendor("Seiko Instruments Information Devices Inc", "SID", datetime.date(1996, 12, 16)),
"SIU": pnp.Vendor("Seiko Instruments USA Inc", "SIU", datetime.date(1996, 11, 29)),
"SEI": pnp.Vendor("Seitz & Associates Inc", "SEI", datetime.date(1998, 1, 30)),
"SJE": pnp.Vendor("Sejin Electron Inc", "SJE", datetime.date(1997, 8, 20)),
"SXG": pnp.Vendor("SELEX GALILEO", "SXG", datetime.date(2012, 10, 1)),
"STH": pnp.Vendor("Semtech Corporation", "STH", datetime.date(2001, 11, 30)),
"SET": pnp.Vendor("SendTek Corporation", "SET", datetime.date(1999, 11, 8)),
"SBT": pnp.Vendor("Senseboard Technologies AB", "SBT", datetime.date(2002, 9, 3)),
"SEN": pnp.Vendor("Sencore", "SEN", datetime.date(1997, 5, 23)),
"STU": pnp.Vendor("Sentelic Corporation", "STU", datetime.date(2012, 7, 27)),
"SEO": pnp.Vendor("SEOS Ltd", "SEO", datetime.date(2003, 2, 20)),
"SNC": pnp.Vendor("Sentronic International Corp.", "SNC", datetime.date(2000, 10, 23)),
"SEP": pnp.Vendor("SEP Eletronica Ltda.", "SEP", datetime.date(2001, 5, 7)),
"SQT": pnp.Vendor("Sequent Computer Systems Inc", "SQT", datetime.date(1996, 11, 29)),
"SES": pnp.Vendor("Session Control LLC", "SES", datetime.date(2010, 9, 3)),
"SRD": pnp.Vendor("Setred", "SRD", datetime.date(2006, 9, 5)),
"SVT": pnp.Vendor("SEVIT Co., Ltd.", "SVT", datetime.date(2002, 6, 25)),
"SVA": pnp.Vendor("SGEG", "SVA", datetime.date(2000, 2, 21)),
"SYT": pnp.Vendor("Seyeon Tech Company Ltd", "SYT", datetime.date(1997, 12, 2)),
"STM": pnp.Vendor("SGS Thomson Microelectronics", "STM", datetime.date(1997, 11, 11)),
"OYO": pnp.Vendor("Shadow Systems", "OYO", datetime.date(1996, 11, 29)),
"SBC": pnp.Vendor("Shanghai Bell Telephone Equip Mfg Co", "SBC", datetime.date(1998, 4, 30)),
"SGW": pnp.Vendor("Shanghai Guowei Science and Technology Co., Ltd.", "SGW", datetime.date(2011, 1, 28)),
"XQU": pnp.Vendor("SHANGHAI SVA-DAV ELECTRONICS CO., LTD", "XQU", datetime.date(2003, 7, 24)),
"SWL": pnp.Vendor("Sharedware Ltd", "SWL", datetime.date(1998, 8, 11)),
"SMM": pnp.Vendor("Shark Multimedia Inc", "SMM", datetime.date(1996, 11, 29)),
"DFK": pnp.Vendor("SharkTec A/S", "DFK", datetime.date(2006, 2, 14)),
"SHP": pnp.Vendor("Sharp Corporation", "SHP", datetime.date(1996, 11, 29)),
"SXT": pnp.Vendor("SHARP TAKAYA ELECTRONIC INDUSTRY CO.,LTD.", "SXT", datetime.date(2010, 6, 24)),
"CZC": pnp.Vendor("Shenzhen ChuangZhiCheng Technology Co., Ltd.", "CZC", datetime.date(2013, 10, 23)),
"IXN": pnp.Vendor("Shenzhen Inet Mobile Internet Technology Co., LTD", "IXN", datetime.date(2014, 11, 4)),
"SZM": pnp.Vendor("Shenzhen MTC Co., Ltd", "SZM", datetime.date(2013, 8, 9)),
"RMS": pnp.Vendor("Shenzhen Ramos Digital Technology Co., Ltd", "RMS", datetime.date(2014, 10, 29)),
"SSL": pnp.Vendor("Shenzhen South-Top Computer Co., Ltd.", "SSL", datetime.date(2013, 12, 6)),
"AZH": pnp.Vendor("Shenzhen three Connaught Information Technology Co., Ltd. (3nod Group)", "AZH", datetime.date(2013, 9, 17)),
"XYE": pnp.Vendor("Shenzhen Zhuona Technology Co., Ltd.", "XYE", datetime.date(2013, 10, 1)),
"HTR": pnp.Vendor("Shenzhen ZhuoYi HengTong Computer Technology Limited", "HTR", datetime.date(2013, 12, 13)),
"ZWE": pnp.Vendor("Shenzhen Zowee Technology Co., LTD", "ZWE", datetime.date(2015, 5, 26)),
"SDE": pnp.Vendor("Sherwood Digital Electronics Corporation", "SDE", datetime.date(1996, 11, 29)),
"SHC": pnp.Vendor("ShibaSoku Co., Ltd.", "SHC", datetime.date(2005, 5, 26)),
"SHT": pnp.Vendor("Shin Ho Tech", "SHT", datetime.date(1996, 11, 29)),
"SLB": pnp.Vendor("Shlumberger Ltd", "SLB", datetime.date(1996, 11, 29)),
"SAT": pnp.Vendor("Shuttle Tech", "SAT", datetime.date(1996, 11, 29)),
"CHG": pnp.Vendor("Sichuan Changhong Electric CO, LTD.", "CHG", datetime.date(2003, 2, 26)),
"CHO": pnp.Vendor("Sichuang Changhong Corporation", "CHO", datetime.date(2001, 11, 30)),
"SIE": pnp.Vendor("Siemens", "SIE", datetime.date(1996, 11, 29)),
"SDT": pnp.Vendor("Siemens AG", "SDT", datetime.date(2006, 2, 14)),
"SIA": pnp.Vendor("SIEMENS AG", "SIA", datetime.date(2001, 3, 15)),
"SNI": pnp.Vendor("Siemens Microdesign GmbH", "SNI", datetime.date(1996, 11, 29)),
"SNP": pnp.Vendor("Siemens Nixdorf Info Systems", "SNP", datetime.date(1996, 11, 29)),
"SSC": pnp.Vendor("Sierra Semiconductor Inc", "SSC", datetime.date(1996, 11, 29)),
"SWI": pnp.Vendor("Sierra Wireless Inc.", "SWI", datetime.date(2003, 7, 10)),
"SIG": pnp.Vendor("Sigma Designs Inc", "SIG", datetime.date(1996, 11, 29)),
"SGD": pnp.Vendor("Sigma Designs, Inc.", "SGD", datetime.date(2006, 2, 14)),
"SCL": pnp.Vendor("Sigmacom Co., Ltd.", "SCL", datetime.date(2002, 4, 25)),
"STL": pnp.Vendor("SigmaTel Inc", "STL", datetime.date(1997, 3, 3)),
"DXS": pnp.Vendor("Signet", "DXS", datetime.date(2000, 10, 23)),
"STE": pnp.Vendor("SII Ido-Tsushin Inc", "STE", datetime.date(1997, 4, 3)),
"SMT": pnp.Vendor("Silcom Manufacturing Tech Inc", "SMT", datetime.date(1996, 11, 29)),
"SXD": pnp.Vendor("Silex technology, Inc.", "SXD", datetime.date(2009, 3, 12)),
"SMS": pnp.Vendor("Silicom Multimedia Systems Inc", "SMS", datetime.date(1996, 12, 4)),
"SGX": pnp.Vendor("Silicon Graphics Inc", "SGX", datetime.date(1996, 11, 29)),
"SII": pnp.Vendor("Silicon Image, Inc.", "SII", datetime.date(2000, 1, 13)),
"SIS": pnp.Vendor("Silicon Integrated Systems Corporation", "SIS", datetime.date(1996, 11, 29)),
"SIL": pnp.Vendor("Silicon Laboratories, Inc", "SIL", datetime.date(1998, 7, 16)),
"SLH": pnp.Vendor("Silicon Library Inc.", "SLH", datetime.date(2008, 11, 1)),
"SOI": pnp.Vendor("Silicon Optix Corporation", "SOI", datetime.date(2005, 7, 28)),
"SLK": pnp.Vendor("Silitek Corporation", "SLK", datetime.date(1997, 7, 16)),
"SPU": pnp.Vendor("SIM2 Multimedia S.P.A.", "SPU", datetime.date(2002, 9, 5)),
"SMP": pnp.Vendor("Simple Computing", "SMP", datetime.date(1996, 11, 29)),
"SPX": pnp.Vendor("Simplex Time Recorder Co.", "SPX", datetime.date(2001, 3, 15)),
"SIN": pnp.Vendor("Singular Technology Co., Ltd.", "SIN", datetime.date(1999, 11, 8)),
"SNO": pnp.Vendor("SINOSUN TECHNOLOGY CO., LTD", "SNO", datetime.date(2005, 6, 27)),
"SIR": pnp.Vendor("Sirius Technologies Pty Ltd", "SIR", datetime.date(1998, 3, 13)),
"FUN": pnp.Vendor("sisel muhendislik", "FUN", datetime.date(2002, 4, 25)),
"STS": pnp.Vendor("SITECSYSTEM CO., LTD.", "STS", datetime.date(2005, 3, 16)),
"SIT": pnp.Vendor("Sitintel", "SIT", datetime.date(1996, 11, 29)),
"SKY": pnp.Vendor("SKYDATA S.P.A.", "SKY", datetime.date(1997, 9, 19)),
"SCT": pnp.Vendor("Smart Card Technology", "SCT", datetime.date(2000, 8, 10)),
"SMA": pnp.Vendor("SMART Modular Technologies", "SMA", datetime.date(1997, 4, 4)),
"SPL": pnp.Vendor("Smart Silicon Systems Pty Ltd", "SPL", datetime.date(2000, 8, 10)),
"STI": pnp.Vendor("Smart Tech Inc", "STI", datetime.date(1996, 11, 29)),
"SBI": pnp.Vendor("SMART Technologies Inc.", "SBI", datetime.date(2007, 6, 14)),
"SMK": pnp.Vendor("SMK CORPORATION", "SMK", datetime.date(2000, 2, 21)),
"SNW": pnp.Vendor("Snell & Wilcox", "SNW", datetime.date(2002, 4, 25)),
"MVM": pnp.Vendor("SOBO VISION", "MVM", datetime.date(2007, 6, 14)),
"SCX": pnp.Vendor("Socionext Inc.", "SCX", datetime.date(2015, 5, 14)),
"LAN": pnp.Vendor("Sodeman Lancom Inc", "LAN", datetime.date(1996, 11, 29)),
"SDF": pnp.Vendor("SODIFF E&T CO., Ltd.", "SDF", datetime.date(2007, 6, 1)),
"SHG": pnp.Vendor("Soft & Hardware development Goldammer GmbH", "SHG", datetime.date(1996, 11, 29)),
"SBD": pnp.Vendor("Softbed - Consulting & Development Ltd", "SBD", datetime.date(1997, 12, 23)),
"SWC": pnp.Vendor("Software Café", "SWC", datetime.date(1996, 11, 29)),
"SWT": pnp.Vendor("Software Technologies Group,Inc.", "SWT", datetime.date(2008, 11, 29)),
"SOL": pnp.Vendor("Solitron Technologies Inc", "SOL", datetime.date(1996, 11, 29)),
"SLM": pnp.Vendor("Solomon Technology Corporation", "SLM", datetime.date(1998, 1, 16)),
"SXL": pnp.Vendor("SolutionInside", "SXL", datetime.date(2001, 5, 8)),
"ONX": pnp.Vendor("SOMELEC Z.I. Du Vert Galanta", "ONX", datetime.date(1996, 11, 29)),
"HON": pnp.Vendor("Sonitronix", "HON", datetime.date(2011, 2, 3)),
"SNX": pnp.Vendor("Sonix Comm. Ltd", "SNX", datetime.date(1996, 11, 29)),
"SNY": pnp.Vendor("Sony", "SNY", datetime.date(1996, 11, 29)),
"SON": pnp.Vendor("Sony", "SON", datetime.date(1996, 11, 29)),
"SER": pnp.Vendor("Sony Ericsson Mobile Communications Inc.", "SER", datetime.date(2004, 4, 16)),
"SCO": pnp.Vendor("SORCUS Computer GmbH", "SCO", datetime.date(2000, 1, 13)),
"SOR": pnp.Vendor("Sorcus Computer GmbH", "SOR", datetime.date(1996, 11, 29)),
"SCC": pnp.Vendor("SORD Computer Corporation", "SCC", datetime.date(1996, 11, 29)),
"SOT": pnp.Vendor("Sotec Company Ltd", "SOT", datetime.date(1997, 5, 21)),
"FRS": pnp.Vendor("South Mountain Technologies, LTD", "FRS", datetime.date(2006, 2, 14)),
"SOY": pnp.Vendor("SOYO Group, Inc", "SOY", datetime.date(2006, 12, 18)),
"SPI": pnp.Vendor("SPACE-I Co., Ltd.", "SPI", datetime.date(2005, 5, 11)),
"SMI": pnp.Vendor("SpaceLabs Medical Inc", "SMI", datetime.date(1996, 11, 29)),
"SPE": pnp.Vendor("SPEA Software AG", "SPE", datetime.date(1996, 11, 29)),
"SPK": pnp.Vendor("SpeakerCraft", "SPK", datetime.date(2010, 4, 20)),
"SLX": pnp.Vendor("Specialix", "SLX", datetime.date(1996, 11, 29)),
"SGC": pnp.Vendor("Spectragraphics Corporation", "SGC", datetime.date(1996, 11, 29)),
"SSP": pnp.Vendor("Spectrum Signal Proecessing Inc", "SSP", datetime.date(1996, 11, 29)),
"SRS": pnp.Vendor("SR-Systems e.K.", "SRS", datetime.date(2012, 11, 19)),
"SSI": pnp.Vendor("S-S Technology Inc", "SSI", datetime.date(1996, 11, 29)),
"STA": pnp.Vendor("ST Electronics Systems Assembly Pte Ltd", "STA", datetime.date(1998, 12, 28)),
"STC": pnp.Vendor("STAC Electronics", "STC", datetime.date(1996, 11, 29)),
"SMC": pnp.Vendor("Standard Microsystems Corporation", "SMC", datetime.date(1996, 11, 29)),
"STT": pnp.Vendor("Star Paging Telecom Tech (Shenzhen) Co. Ltd.", "STT", datetime.date(1998, 9, 23)),
"STF": pnp.Vendor("Starflight Electronics", "STF", datetime.date(1997, 5, 23)),
"SGT": pnp.Vendor("Stargate Technology", "SGT", datetime.date(1996, 11, 29)),
"SLF": pnp.Vendor("StarLeaf", "SLF", datetime.date(2010, 11, 1)),
"STR": pnp.Vendor("Starlight Networks Inc", "STR", datetime.date(1996, 11, 29)),
"STW": pnp.Vendor("Starwin Inc.", "STW", datetime.date(2001, 4, 24)),
"SWS": pnp.Vendor("Static", "SWS", datetime.date(1999, 5, 16)),
"STB": pnp.Vendor("STB Systems Inc", "STB", datetime.date(1996, 11, 29)),
"STD": pnp.Vendor("STD Computer Inc", "STD", datetime.date(1996, 11, 29)),
"STG": pnp.Vendor("StereoGraphics Corp.", "STG", datetime.date(2001, 10, 2)),
"STX": pnp.Vendor("ST-Ericsson", "STX", datetime.date(2011, 12, 9)),
"SMO": pnp.Vendor("STMicroelectronics", "SMO", datetime.date(2007, 6, 14)),
"STO": pnp.Vendor("Stollmann E+V GmbH", "STO", datetime.date(1997, 3, 27)),
"SAS": pnp.Vendor("Stores Automated Systems Inc", "SAS", datetime.date(1997, 3, 19)),
"EZP": pnp.Vendor("Storm Technology", "EZP", datetime.date(1996, 10, 17)),
"STP": pnp.Vendor("StreamPlay Ltd", "STP", datetime.date(2009, 2, 4)),
"SYK": pnp.Vendor("Stryker Communications", "SYK", datetime.date(2005, 10, 10)),
"SUB": pnp.Vendor("Subspace Comm. Inc", "SUB", datetime.date(1996, 11, 29)),
"SML": pnp.Vendor("Sumitomo Metal Industries, Ltd.", "SML", datetime.date(1999, 9, 13)),
"SUM": pnp.Vendor("Summagraphics Corporation", "SUM", datetime.date(1996, 11, 29)),
"SCE": pnp.Vendor("Sun Corporation", "SCE", datetime.date(1996, 11, 29)),
"SUN": pnp.Vendor("Sun Electronics Corporation", "SUN", datetime.date(1996, 11, 29)),
"SVI": pnp.Vendor("Sun Microsystems", "SVI", datetime.date(2003, 1, 13)),
"SNN": pnp.Vendor("SUNNY ELEKTRONIK", "SNN", datetime.date(2014, 11, 14)),
"SDS": pnp.Vendor("SunRiver Data System", "SDS", datetime.date(1996, 11, 29)),
"SGL": pnp.Vendor("Super Gate Technology Company Ltd", "SGL", datetime.date(1997, 12, 30)),
"SNT": pnp.Vendor("SuperNet Inc", "SNT", datetime.date(1998, 4, 23)),
"SUP": pnp.Vendor("Supra Corporation", "SUP", datetime.date(1996, 11, 29)),
"SUR": pnp.Vendor("Surenam Computer Corporation", "SUR", datetime.date(1996, 11, 29)),
"SRF": pnp.Vendor("Surf Communication Solutions Ltd", "SRF", datetime.date(1998, 3, 23)),
"SVD": pnp.Vendor("SVD Computer", "SVD", datetime.date(1998, 4, 14)),
"SVS": pnp.Vendor("SVSI", "SVS", datetime.date(2008, 8, 9)),
"SYE": pnp.Vendor("SY Electronics Ltd", "SYE", datetime.date(2010, 9, 20)),
"SYL": pnp.Vendor("Sylvania Computer Products", "SYL", datetime.date(1998, 6, 12)),
"SLI": pnp.Vendor("Symbios Logic Inc", "SLI", datetime.date(1996, 11, 29)),
"ISA": pnp.Vendor("Symbol Technologies", "ISA", datetime.date(1997, 6, 2)),
"SYM": pnp.Vendor("Symicron Computer Communications Ltd.", "SYM", datetime.date(1996, 11, 29)),
"SYN": pnp.Vendor("Synaptics Inc", "SYN", datetime.date(1996, 11, 29)),
"SPS": pnp.Vendor("Synopsys Inc", "SPS", datetime.date(1996, 11, 29)),
"SXB": pnp.Vendor("Syntax-Brillian", "SXB", datetime.date(2006, 5, 8)),
"SYP": pnp.Vendor("SYPRO Co Ltd", "SYP", datetime.date(1998, 11, 27)),
"SYS": pnp.Vendor("Sysgration Ltd", "SYS", datetime.date(1997, 4, 28)),
"SLC": pnp.Vendor("Syslogic Datentechnik AG", "SLC", datetime.date(1999, 1, 20)),
"SME": pnp.Vendor("Sysmate Company", "SME", datetime.date(1997, 9, 2)),
"SIC": pnp.Vendor("Sysmate Corporation", "SIC", datetime.date(1997, 5, 5)),
"SYC": pnp.Vendor("Sysmic", "SYC", datetime.date(1996, 11, 29)),
"SGZ": pnp.Vendor("Systec Computer GmbH", "SGZ", datetime.date(1997, 10, 2)),
"SCI": pnp.Vendor("System Craft", "SCI", datetime.date(1996, 11, 29)),
"SEB": pnp.Vendor("system elektronik GmbH", "SEB", datetime.date(2000, 4, 19)),
"SLA": pnp.Vendor("Systeme Lauer GmbH&Co KG", "SLA", datetime.date(1999, 3, 20)),
"UPS": pnp.Vendor("Systems Enhancement", "UPS", datetime.date(1996, 11, 29)),
"SST": pnp.Vendor("SystemSoft Corporation", "SST", datetime.date(1996, 11, 29)),
"SCR": pnp.Vendor("Systran Corporation", "SCR", datetime.date(1996, 11, 29)),
"SYV": pnp.Vendor("SYVAX Inc", "SYV", datetime.date(1996, 11, 29)),
"TUA": pnp.Vendor("T+A elektroakustik GmbH", "TUA", datetime.date(2011, 1, 5)),
"TCD": pnp.Vendor("Taicom Data Systems Co., Ltd.", "TCD", datetime.date(2001, 10, 8)),
"TMR": pnp.Vendor("Taicom International Inc", "TMR", datetime.date(1996, 11, 29)),
"TKC": pnp.Vendor("Taiko Electric Works.LTD", "TKC", datetime.date(2001, 3, 15)),
"TVM": pnp.Vendor("Taiwan Video & Monitor Corporation", "TVM", datetime.date(1996, 11, 29)),
"KTD": pnp.Vendor("Takahata Electronics Co.,Ltd.", "KTD", datetime.date(2009, 7, 22)),
"TAM": pnp.Vendor("Tamura Seisakusyo Ltd", "TAM", datetime.date(1997, 7, 17)),
"TAA": pnp.Vendor("Tandberg", "TAA", datetime.date(2003, 10, 21)),
"TDD": pnp.Vendor("Tandberg Data Display AS", "TDD", datetime.date(1996, 11, 29)),
"TDM": pnp.Vendor("Tandem Computer Europe Inc", "TDM", datetime.date(1996, 11, 29)),
"TCC": pnp.Vendor("Tandon Corporation", "TCC", datetime.date(1996, 11, 29)),
"TDY": pnp.Vendor("Tandy Electronics", "TDY", datetime.date(1996, 11, 29)),
"TAS": pnp.Vendor("Taskit Rechnertechnik GmbH", "TAS", datetime.date(1997, 12, 15)),
"TCS": pnp.Vendor("Tatung Company of America Inc", "TCS", datetime.date(1996, 11, 29)),
"VIB": pnp.Vendor("Tatung UK Ltd", "VIB", datetime.date(1999, 7, 16)),
"NRV": pnp.Vendor("Taugagreining hf", "NRV", datetime.date(1996, 11, 29)),
"TAX": pnp.Vendor("Taxan (Europe) Ltd", "TAX", datetime.date(1997, 3, 13)),
"PMD": pnp.Vendor("TDK USA Corporation", "PMD", datetime.date(1996, 11, 29)),
"TDT": pnp.Vendor("TDT", "TDT", datetime.date(1996, 11, 29)),
"TDV": pnp.Vendor("TDVision Systems, Inc.", "TDV", datetime.date(2008, 1, 18)),
"TEA": pnp.Vendor("TEAC System Corporation", "TEA", datetime.date(1996, 11, 29)),
"CET": pnp.Vendor("TEC CORPORATION", "CET", datetime.date(1998, 7, 16)),
"TCJ": pnp.Vendor("TEAC America Inc", "TCJ", datetime.date(1996, 11, 29)),
"TEZ": pnp.Vendor("Tech Source Inc.", "TEZ", datetime.date(2013, 8, 14)),
"TMC": pnp.Vendor("Techmedia Computer Systems Corporation", "TMC", datetime.date(1998, 2, 10)),
"TCL": pnp.Vendor("Technical Concepts Ltd", "TCL", datetime.date(1996, 11, 29)),
"TIL": pnp.Vendor("Technical Illusions Inc.", "TIL", datetime.date(2014, 2, 14)),
"TSD": pnp.Vendor("TechniSat Digital GmbH", "TSD", datetime.date(2005, 7, 14)),
"NXS": pnp.Vendor("Technology Nexus Secure Open Systems AB", "NXS", datetime.date(1998, 5, 8)),
"TPE": pnp.Vendor("Technology Power Enterprises Inc", "TPE", datetime.date(1996, 11, 29)),
"TTS": pnp.Vendor("TechnoTrend Systemtechnik GmbH", "TTS", datetime.date(1996, 11, 29)),
"TEC": pnp.Vendor("Tecmar Inc", "TEC", datetime.date(1996, 11, 29)),
"TCN": pnp.Vendor("Tecnetics (PTY) Ltd", "TCN", datetime.date(1996, 11, 29)),
"TNM": pnp.Vendor("TECNIMAGEN SA", "TNM", datetime.date(2005, 5, 2)),
"TVD": pnp.Vendor("Tecnovision", "TVD", datetime.date(2006, 3, 13)),
"RXT": pnp.Vendor("Tectona SoftSolutions (P) Ltd.,", "RXT", datetime.date(2004, 6, 2)),
"TKN": pnp.Vendor("Teknor Microsystem Inc", "TKN", datetime.date(1996, 11, 29)),
"TRM": pnp.Vendor("Tekram Technology Company Ltd", "TRM", datetime.date(1996, 11, 29)),
"TEK": pnp.Vendor("Tektronix Inc", "TEK", datetime.date(1999, 5, 16)),
"TWX": pnp.Vendor("TEKWorx Limited", "TWX", datetime.date(2009, 12, 24)),
"TCT": pnp.Vendor("Telecom Technology Centre Co. Ltd.", "TCT", datetime.date(1999, 7, 16)),
"TTC": pnp.Vendor("Telecommunications Techniques Corporation", "TTC", datetime.date(1996, 11, 29)),
"TLF": pnp.Vendor("Teleforce.,co,ltd", "TLF", datetime.date(2012, 11, 19)),
"TAT": pnp.Vendor("Teleliaison Inc", "TAT", datetime.date(1997, 4, 29)),
"TLK": pnp.Vendor("Telelink AG", "TLK", datetime.date(1998, 9, 1)),
"TPS": pnp.Vendor("Teleprocessing Systeme GmbH", "TPS", datetime.date(1997, 1, 24)),
"TAG": pnp.Vendor("Teles AG", "TAG", datetime.date(1996, 11, 29)),
"TLS": pnp.Vendor("Teleste Educational OY", "TLS", datetime.date(1996, 11, 29)),
"TSI": pnp.Vendor("TeleVideo Systems", "TSI", datetime.date(1996, 11, 29)),
"PFT": pnp.Vendor("Telia ProSoft AB", "PFT", datetime.date(1999, 9, 13)),
"TLD": pnp.Vendor("Telindus", "TLD", datetime.date(1996, 11, 29)),
"TLX": pnp.Vendor("Telxon Corporation", "TLX", datetime.date(1996, 11, 29)),
"TNY": pnp.Vendor("Tennyson Tech Pty Ltd", "TNY", datetime.date(1996, 11, 29)),
"TDC": pnp.Vendor("Teradici", "TDC", datetime.date(2007, 10, 11)),
"TER": pnp.Vendor("TerraTec Electronic GmbH", "TER", datetime.date(1997, 3, 21)),
"TXN": pnp.Vendor("Texas Insturments", "TXN", datetime.date(1996, 11, 29)),
"TMI": pnp.Vendor("Texas Microsystem", "TMI", datetime.date(1996, 11, 29)),
"TXT": pnp.Vendor("Textron Defense System", "TXT", datetime.date(1996, 11, 29)),
"CKC": pnp.Vendor("The Concept Keyboard Company Ltd", "CKC", datetime.date(1997, 6, 2)),
"LNX": pnp.Vendor("The Linux Foundation", "LNX", datetime.date(2014, 4, 4)),
"PXL": pnp.Vendor("The Moving Pixel Company", "PXL", datetime.date(2003, 11, 24)),
"ITN": pnp.Vendor("The NTI Group", "ITN", datetime.date(1996, 11, 29)),
"TOG": pnp.Vendor("The OPEN Group", "TOG", datetime.date(1999, 9, 13)),
"PAN": pnp.Vendor("The Panda Project", "PAN", datetime.date(1996, 11, 29)),
"PRG": pnp.Vendor("The Phoenix Research Group Inc", "PRG", datetime.date(1997, 9, 19)),
"TSG": pnp.Vendor("The Software Group Ltd", "TSG", datetime.date(1996, 11, 29)),
"TMX": pnp.Vendor("Thermotrex Corporation", "TMX", datetime.date(1996, 11, 29)),
"TLL": pnp.Vendor("Thinklogical", "TLL", datetime.date(2015, 6, 1)),
"TCO": pnp.Vendor("Thomas-Conrad Corporation", "TCO", datetime.date(1996, 11, 29)),
"TCR": pnp.Vendor("Thomson Consumer Electronics", "TCR", datetime.date(1998, 8, 20)),
"TPT": pnp.Vendor("Thruput Ltd", "TPT", datetime.date(2010, 6, 16)),
"THN": pnp.Vendor("Thundercom Holdings Sdn. Bhd.", "THN", datetime.date(1997, 3, 21)),
"TWA": pnp.Vendor("Tidewater Association", "TWA", datetime.date(1996, 11, 29)),
"TMM": pnp.Vendor("Time Management, Inc.", "TMM", datetime.date(1999, 3, 20)),
"TKS": pnp.Vendor("TimeKeeping Systems, Inc.", "TKS", datetime.date(1998, 8, 31)),
"TPD": pnp.Vendor("Times (Shanghai) Computer Co., Ltd.", "TPD", datetime.date(2013, 12, 12)),
"TIP": pnp.Vendor("TIPTEL AG", "TIP", datetime.date(1998, 2, 24)),
"TIX": pnp.Vendor("Tixi.Com GmbH", "TIX", datetime.date(1998, 10, 16)),
"TMT": pnp.Vendor("T-Metrics Inc.", "TMT", datetime.date(2000, 2, 21)),
"TNC": pnp.Vendor("TNC Industrial Company Ltd", "TNC", datetime.date(1998, 2, 27)),
"TAB": pnp.Vendor("Todos Data System AB", "TAB", datetime.date(1997, 8, 20)),
"TOE": pnp.Vendor("TOEI Electronics Co., Ltd.", "TOE", datetime.date(2001, 10, 2)),
"TON": pnp.Vendor("TONNA", "TON", datetime.date(2012, 3, 14)),
"TPV": pnp.Vendor("Top Victory Electronics ( Fujian ) Company Ltd", "TPV", datetime.date(1999, 5, 16)),
"TPK": pnp.Vendor("TOPRE CORPORATION", "TPK", datetime.date(2009, 2, 13)),
"TPR": pnp.Vendor("Topro Technology Inc", "TPR", datetime.date(1998, 5, 8)),
"TTA": pnp.Vendor("Topson Technology Co., Ltd.", "TTA", datetime.date(1998, 9, 23)),
"SFM": pnp.Vendor("TORNADO Company", "SFM", datetime.date(1997, 4, 15)),
"TGS": pnp.Vendor("Torus Systems Ltd", "TGS", datetime.date(1996, 11, 29)),
"TRS": pnp.Vendor("Torus Systems Ltd", "TRS", datetime.date(1996, 11, 29)),
"TAI": pnp.Vendor("Toshiba America Info Systems Inc", "TAI", datetime.date(1996, 11, 29)),
"TSB": pnp.Vendor("Toshiba America Info Systems Inc", "TSB", datetime.date(1996, 11, 29)),
"TOS": pnp.Vendor("Toshiba Corporation", "TOS", datetime.date(1996, 11, 29)),
"TTP": pnp.Vendor("Toshiba Corporation", "TTP", datetime.date(2015, 7, 7)),
"TGC": pnp.Vendor("Toshiba Global Commerce Solutions, Inc.", "TGC", datetime.date(2012, 6, 26)),
"LCD": pnp.Vendor("Toshiba Matsushita Display Technology Co., Ltd", "LCD", datetime.date(2000, 5, 24)),
"PCS": pnp.Vendor("TOSHIBA PERSONAL COMPUTER SYSTEM CORPRATION", "PCS", datetime.date(2010, 6, 22)),
"TLI": pnp.Vendor("TOSHIBA TELI CORPORATION", "TLI", datetime.date(2008, 1, 18)),
"TTK": pnp.Vendor("Totoku Electric Company Ltd", "TTK", datetime.date(1996, 11, 29)),
"TSE": pnp.Vendor("Tottori Sanyo Electric", "TSE", datetime.date(1996, 11, 29)),
"TSL": pnp.Vendor("Tottori SANYO Electric Co., Ltd.", "TSL", datetime.date(2001, 11, 6)),
"TPC": pnp.Vendor("Touch Panel Systems Corporation", "TPC", datetime.date(1997, 9, 2)),
"TKO": pnp.Vendor("TouchKo, Inc.", "TKO", datetime.date(2006, 1, 12)),
"TOU": pnp.Vendor("Touchstone Technology", "TOU", datetime.date(2001, 5, 7)),
"TSY": pnp.Vendor("TouchSystems", "TSY", datetime.date(2008, 1, 18)),
"TWK": pnp.Vendor("TOWITOKO electronics GmbH", "TWK", datetime.date(1998, 4, 14)),
"CSB": pnp.Vendor("Transtex SA", "CSB", datetime.date(2001, 3, 15)),
"TST": pnp.Vendor("Transtream Inc", "TST", datetime.date(1997, 4, 29)),
"TSV": pnp.Vendor("TRANSVIDEO", "TSV", datetime.date(2010, 5, 4)),
"TRE": pnp.Vendor("Tremetrics", "TRE", datetime.date(1997, 4, 24)),
"RDM": pnp.Vendor("Tremon Enterprises Company Ltd", "RDM", datetime.date(1996, 11, 29)),
"TTI": pnp.Vendor("Trenton Terminals Inc", "TTI", datetime.date(1996, 11, 29)),
"TRX": pnp.Vendor("Trex Enterprises", "TRX", datetime.date(2000, 2, 21)),
"OZO": pnp.Vendor("Tribe Computer Works Inc", "OZO", datetime.date(1996, 11, 29)),
"TRI": pnp.Vendor("Tricord Systems", "TRI", datetime.date(1996, 11, 29)),
"TDS": pnp.Vendor("Tri-Data Systems Inc", "TDS", datetime.date(1996, 11, 29)),
"TTY": pnp.Vendor("TRIDELITY Display Solutions GmbH", "TTY", datetime.date(2010, 7, 19)),
"TRD": pnp.Vendor("Trident Microsystem Inc", "TRD", datetime.date(1996, 11, 29)),
"TMS": pnp.Vendor("Trident Microsystems Ltd", "TMS", datetime.date(2002, 7, 15)),
"TGI": pnp.Vendor("TriGem Computer Inc", "TGI", datetime.date(1996, 11, 29)),
"TGM": pnp.Vendor("TriGem Computer,Inc.", "TGM", datetime.date(2001, 7, 5)),
"TIC": pnp.Vendor("Trigem KinfoComm", "TIC", datetime.date(2003, 2, 26)),
"TRC": pnp.Vendor("Trioc AB", "TRC", datetime.date(2000, 1, 13)),
"TBB": pnp.Vendor("Triple S Engineering Inc", "TBB", datetime.date(1997, 9, 26)),
"TRT": pnp.Vendor("Tritec Electronic AG", "TRT", datetime.date(2012, 1, 11)),
"TRA": pnp.Vendor("TriTech Microelectronics International", "TRA", datetime.date(1997, 1, 24)),
"TRB": pnp.Vendor("Triumph Board a.s.", "TRB", datetime.date(2013, 9, 27)),
"TRV": pnp.Vendor("Trivisio Prototyping GmbH", "TRV", datetime.date(2011, 11, 18)),
"TXL": pnp.Vendor("Trixel Ltd", "TXL", datetime.date(2000, 8, 10)),
"MKV": pnp.Vendor("Trtheim Technology", "MKV", datetime.date(1997, 3, 17)),
"TVI": pnp.Vendor("Truevision", "TVI", datetime.date(1996, 11, 29)),
"TTE": pnp.Vendor("TTE, Inc.", "TTE", datetime.date(2005, 1, 18)),
"TCI": pnp.Vendor("Tulip Computers Int'l B.V.", "TCI", datetime.date(1996, 11, 29)),
"TBC": pnp.Vendor("Turbo Communication, Inc", "TBC", datetime.date(1998, 9, 1)),
"TBS": pnp.Vendor("Turtle Beach System", "TBS", datetime.date(1996, 11, 29)),
"TUT": pnp.Vendor("Tut Systems", "TUT", datetime.date(1997, 8, 19)),
"TVR": pnp.Vendor("TV Interactive Corporation", "TVR", datetime.date(1996, 11, 29)),
"TVO": pnp.Vendor("TV One Ltd", "TVO", datetime.date(2008, 9, 2)),
"TVV": pnp.Vendor("TV1 GmbH", "TVV", datetime.date(2012, 2, 6)),
"TVS": pnp.Vendor("TVS Electronics Limited", "TVS", datetime.date(2008, 5, 20)),
"TWH": pnp.Vendor("Twinhead International Corporation", "TWH", datetime.date(1996, 11, 29)),
"TYN": pnp.Vendor("Tyan Computer Corporation", "TYN", datetime.date(1996, 11, 29)),
"USE": pnp.Vendor("U. S. Electronics Inc.", "USE", datetime.date(2013, 10, 28)),
"NRL": pnp.Vendor("U.S. Naval Research Lab", "NRL", datetime.date(1996, 11, 29)),
"TSP": pnp.Vendor("U.S. Navy", "TSP", datetime.date(2002, 10, 17)),
"USD": pnp.Vendor("U.S. Digital Corporation", "USD", datetime.date(1996, 11, 29)),
"USR": pnp.Vendor("U.S. Robotics Inc", "USR", datetime.date(1996, 11, 29)),
"UBL": pnp.Vendor("Ubinetics Ltd.", "UBL", datetime.date(2002, 5, 23)),
"UJR": pnp.Vendor("Ueda Japan Radio Co., Ltd.", "UJR", datetime.date(2003, 7, 9)),
"UFO": pnp.Vendor("UFO Systems Inc", "UFO", datetime.date(1996, 11, 29)),
"UAS": pnp.Vendor("Ultima Associates Pte Ltd", "UAS", datetime.date(1997, 1, 2)),
"UEC": pnp.Vendor("Ultima Electronics Corporation", "UEC", datetime.date(1998, 9, 1)),
"ULT": pnp.Vendor("Ultra Network Tech", "ULT", datetime.date(1996, 11, 29)),
"UMG": pnp.Vendor("Umezawa Giken Co.,Ltd", "UMG", datetime.date(2008, 4, 10)),
"UBI": pnp.Vendor("Ungermann-Bass Inc", "UBI", datetime.date(1996, 11, 29)),
"UNY": pnp.Vendor("Unicate", "UNY", datetime.date(1998, 7, 21)),
"UDN": pnp.Vendor("Uniden Corporation", "UDN", datetime.date(2004, 10, 18)),
"UIC": pnp.Vendor("Uniform Industrial Corporation", "UIC", datetime.date(1996, 11, 29)),
"UNI": pnp.Vendor("Uniform Industry Corp.", "UNI", datetime.date(2001, 11, 6)),
"UFG": pnp.Vendor("UNIGRAF-USA", "UFG", datetime.date(2008, 10, 9)),
"UNB": pnp.Vendor("Unisys Corporation", "UNB", datetime.date(1996, 11, 29)),
"UNC": pnp.Vendor("Unisys Corporation", "UNC", datetime.date(1996, 11, 29)),
"UNM": pnp.Vendor("Unisys Corporation", "UNM", datetime.date(1996, 11, 29)),
"UNO": pnp.Vendor("Unisys Corporation", "UNO", datetime.date(1996, 11, 29)),
"UNS": pnp.Vendor("Unisys Corporation", "UNS", datetime.date(1996, 11, 29)),
"UNT": pnp.Vendor("Unisys Corporation", "UNT", datetime.date(1996, 11, 29)),
"UNA": pnp.Vendor("Unisys DSD", "UNA", datetime.date(1996, 11, 29)),
"WKH": pnp.Vendor("Uni-Take Int'l Inc.", "WKH", datetime.date(2002, 6, 17)),
"UMC": pnp.Vendor("United Microelectr Corporation", "UMC", datetime.date(1996, 11, 29)),
"UNP": pnp.Vendor("Unitop", "UNP", datetime.date(2001, 11, 6)),
"UEI": pnp.Vendor("Universal Electronics Inc", "UEI", datetime.date(1997, 8, 20)),
"UET": pnp.Vendor("Universal Empowering Technologies", "UET", datetime.date(1997, 9, 26)),
"UMM": pnp.Vendor("Universal Multimedia", "UMM", datetime.date(2001, 10, 8)),
"USI": pnp.Vendor("Universal Scientific Industrial Co., Ltd.", "USI", datetime.date(2003, 11, 4)),
"JGD": pnp.Vendor("University College", "JGD", datetime.date(1996, 11, 29)),
"UWC": pnp.Vendor("Uniwill Computer Corp.", "UWC", datetime.date(2004, 4, 16)),
"UTD": pnp.Vendor("Up to Date Tech", "UTD", datetime.date(1996, 11, 29)),
"UPP": pnp.Vendor("UPPI", "UPP", datetime.date(1998, 4, 14)),
"RUP": pnp.Vendor("Ups Manufactoring s.r.l.", "RUP", datetime.date(2001, 3, 15)),
"ASD": pnp.Vendor("USC Information Sciences Institute", "ASD", datetime.date(1997, 4, 8)),
"USA": pnp.Vendor("Utimaco Safeware AG", "USA", datetime.date(1998, 5, 4)),
"VAD": pnp.Vendor("Vaddio, LLC", "VAD", datetime.date(2012, 11, 30)),
"VDM": pnp.Vendor("Vadem", "VDM", datetime.date(1996, 11, 29)),
"VAI": pnp.Vendor("VAIO Corporation", "VAI", datetime.date(2014, 4, 18)),
"VAL": pnp.Vendor("Valence Computing Corporation", "VAL", datetime.date(1996, 11, 29)),
"VBT": pnp.Vendor("Valley Board Ltda", "VBT", datetime.date(2001, 3, 15)),
"VLB": pnp.Vendor("ValleyBoard Ltda.", "VLB", datetime.date(1998, 4, 5)),
"VLV": pnp.Vendor("Valve Corporation", "VLV", datetime.date(2013, 3, 6)),
"ITI": pnp.Vendor("VanErum Group", "ITI", datetime.date(2013, 10, 1)),
"VAR": pnp.Vendor("Varian Australia Pty Ltd", "VAR", datetime.date(2000, 4, 19)),
"VTV": pnp.Vendor("VATIV Technologies", "VTV", datetime.date(2006, 4, 12)),
"VBR": pnp.Vendor("VBrick Systems Inc.", "VBR", datetime.date(2009, 8, 19)),
"VCX": pnp.Vendor("VCONEX", "VCX", datetime.date(2005, 6, 15)),
"VDC": pnp.Vendor("VDC Display Systems", "VDC", datetime.date(2009, 4, 29)),
"VEC": pnp.Vendor("Vector Informatik GmbH", "VEC", datetime.date(1997, 9, 10)),
"VCM": pnp.Vendor("Vector Magnetics, LLC", "VCM", datetime.date(2006, 4, 12)),
"VEK": pnp.Vendor("Vektrex", "VEK", datetime.date(1996, 12, 13)),
"VFI": pnp.Vendor("VeriFone Inc", "VFI", datetime.date(1998, 5, 29)),
"VMI": pnp.Vendor("Vermont Microsystems", "VMI", datetime.date(1996, 11, 29)),
"VTX": pnp.Vendor("Vestax Corporation", "VTX", datetime.date(2012, 2, 14)),
"VES": pnp.Vendor("Vestel Elektronik Sanayi ve Ticaret A. S.", "VES", datetime.date(1997, 9, 19)),
"VIM": pnp.Vendor("Via Mons Ltd.", "VIM", datetime.date(2012, 8, 29)),
"VIA": pnp.Vendor("VIA Tech Inc", "VIA", datetime.date(1996, 11, 29)),
"VCJ": pnp.Vendor("Victor Company of Japan, Limited", "VCJ", datetime.date(2009, 2, 6)),
"VDA": pnp.Vendor("Victor Data Systems", "VDA", datetime.date(2000, 5, 24)),
"VIC": pnp.Vendor("Victron B.V.", "VIC", datetime.date(1996, 11, 29)),
"VDO": pnp.Vendor("Video & Display Oriented Corporation", "VDO", datetime.date(1996, 11, 29)),
"URD": pnp.Vendor("Video Computer S.p.A.", "URD", datetime.date(1998, 2, 24)),
"JWD": pnp.Vendor("Video International Inc.", "JWD", datetime.date(2000, 2, 21)),
"VPI": pnp.Vendor("Video Products Inc", "VPI", datetime.date(2010, 5, 4)),
"VLT": pnp.Vendor("VideoLan Technologies", "VLT", datetime.date(1997, 10, 17)),
"VSI": pnp.Vendor("VideoServer", "VSI", datetime.date(1997, 6, 25)),
"VTB": pnp.Vendor("Videotechnik Breithaupt", "VTB", datetime.date(2013, 7, 23)),
"VTN": pnp.Vendor("VIDEOTRON CORP.", "VTN", datetime.date(2010, 5, 4)),
"VDS": pnp.Vendor("Vidisys GmbH & Company", "VDS", datetime.date(1996, 11, 29)),
"VDT": pnp.Vendor("Viditec, Inc.", "VDT", datetime.date(1999, 11, 8)),
"VSC": pnp.Vendor("ViewSonic Corporation", "VSC", datetime.date(1996, 11, 29)),
"VTK": pnp.Vendor("Viewteck Co., Ltd.", "VTK", datetime.date(2001, 10, 8)),
"VIK": pnp.Vendor("Viking Connectors", "VIK", datetime.date(1996, 11, 29)),
"VNC": pnp.Vendor("Vinca Corporation", "VNC", datetime.date(1996, 11, 29)),
"NHT": pnp.Vendor("Vinci Labs", "NHT", datetime.date(2006, 3, 3)),
"VML": pnp.Vendor("Vine Micros Limited", "VML", datetime.date(2004, 6, 16)),
"VIN": pnp.Vendor("Vine Micros Ltd", "VIN", datetime.date(2000, 4, 19)),
"VCC": pnp.Vendor("Virtual Computer Corporation", "VCC", datetime.date(1996, 11, 29)),
"VRC": pnp.Vendor("Virtual Resources Corporation", "VRC", datetime.date(1996, 11, 29)),
"VQ@": pnp.Vendor("Vision Quest", "VQ@", datetime.date(2009, 10, 26)),
"VSP": pnp.Vendor("Vision Systems GmbH", "VSP", datetime.date(1996, 11, 29)),
"VIS": pnp.Vendor("Visioneer", "VIS", datetime.date(1996, 11, 29)),
"VIT": pnp.Vendor("Visitech AS", "VIT", datetime.date(2006, 9, 5)),
"VLK": pnp.Vendor("Vislink International Ltd", "VLK", datetime.date(2012, 8, 27)),
"VCI": pnp.Vendor("VistaCom Inc", "VCI", datetime.date(1996, 11, 29)),
"VIR": pnp.Vendor("Visual Interface, Inc", "VIR", datetime.date(1998, 11, 27)),
"VTL": pnp.Vendor("Vivid Technology Pte Ltd", "VTL", datetime.date(1996, 11, 29)),
"VIZ": pnp.Vendor("VIZIO, Inc", "VIZ", datetime.date(2012, 6, 6)),
"VTI": pnp.Vendor("VLSI Tech Inc", "VTI", datetime.date(1996, 11, 29)),
"VMW": pnp.Vendor("VMware Inc.,", "VMW", datetime.date(2011, 10, 18)),
"VTG": pnp.Vendor("Voice Technologies Group Inc", "VTG", datetime.date(1997, 4, 24)),
"GDT": pnp.Vendor("Vortex Computersysteme GmbH", "GDT", datetime.date(1996, 11, 29)),
"VPX": pnp.Vendor("VPixx Technologies Inc.", "VPX", datetime.date(2013, 12, 5)),
"VRM": pnp.Vendor("VRmagic Holding AG", "VRM", datetime.date(2013, 4, 12)),
"VSR": pnp.Vendor("V-Star Electronics Inc.", "VSR", datetime.date(2000, 2, 21)),
"VTS": pnp.Vendor("VTech Computers Ltd", "VTS", datetime.date(1996, 11, 29)),
"VTC": pnp.Vendor("VTel Corporation", "VTC", datetime.date(1996, 11, 29)),
"VUT": pnp.Vendor("Vutrix (UK) Ltd", "VUT", datetime.date(2003, 7, 22)),
"VWB": pnp.Vendor("Vweb Corp.", "VWB", datetime.date(2004, 3, 12)),
"WAC": pnp.Vendor("Wacom Tech", "WAC", datetime.date(1996, 11, 29)),
"JPW": pnp.Vendor("Wallis Hamilton Industries", "JPW", datetime.date(1999, 7, 16)),
"MLT": pnp.Vendor("Wanlida Group Co., Ltd.", "MLT", datetime.date(2014, 4, 24)),
"WAL": pnp.Vendor("Wave Access", "WAL", datetime.date(1996, 12, 13)),
"AWS": pnp.Vendor("Wave Systems", "AWS", datetime.date(1996, 11, 29)),
"WVM": pnp.Vendor("Wave Systems Corporation", "WVM", datetime.date(1997, 12, 5)),
"WAV": pnp.Vendor("Wavephore", "WAV", datetime.date(1996, 11, 29)),
"SEL": pnp.Vendor("Way2Call Communications", "SEL", datetime.date(1997, 3, 20)),
"WBS": pnp.Vendor("WB Systemtechnik GmbH", "WBS", datetime.date(1997, 9, 8)),
"WEL": pnp.Vendor("W-DEV", "WEL", datetime.date(2010, 11, 1)),
"WPI": pnp.Vendor("Wearnes Peripherals International (Pte) Ltd", "WPI", datetime.date(1998, 3, 31)),
"WTK": pnp.Vendor("Wearnes Thakral Pte", "WTK", datetime.date(1996, 11, 29)),
"WEB": pnp.Vendor("WebGear Inc", "WEB", datetime.date(1998, 1, 30)),
"WMO": pnp.Vendor("Westermo Teleindustri AB", "WMO", datetime.date(2000, 1, 13)),
"WDC": pnp.Vendor("Western Digital", "WDC", datetime.date(1996, 11, 29)),
"WDE": pnp.Vendor("Westinghouse Digital Electronics", "WDE", datetime.date(2003, 5, 23)),
"WEY": pnp.Vendor("WEY Design AG", "WEY", datetime.date(2004, 10, 18)),
"WHI": pnp.Vendor("Whistle Communications", "WHI", datetime.date(1998, 10, 24)),
"WLD": pnp.Vendor("Wildfire Communications Inc", "WLD", datetime.date(1997, 2, 13)),
"WNI": pnp.Vendor("WillNet Inc.", "WNI", datetime.date(2000, 4, 19)),
"WEC": pnp.Vendor("Winbond Electronics Corporation", "WEC", datetime.date(1996, 11, 29)),
"WNX": pnp.Vendor("Diebold Nixdorf Systems GmbH", "WNX", datetime.date(2004, 9, 20)),
"WMT": pnp.Vendor("Winmate Communication Inc", "WMT", datetime.date(2001, 3, 15)),
"WNV": pnp.Vendor("Winnov L.P.", "WNV", datetime.date(1997, 3, 7)),
"WRC": pnp.Vendor("WiNRADiO Communications", "WRC", datetime.date(1997, 9, 11)),
"WIN": pnp.Vendor("Wintop Technology Inc", "WIN", datetime.date(1996, 12, 29)),
"WWP": pnp.Vendor("Wipotec Wiege- und Positioniersysteme GmbH", "WWP", datetime.date(2014, 4, 8)),
"WIL": pnp.Vendor("WIPRO Information Technology Ltd", "WIL", datetime.date(1996, 11, 29)),
"WIP": pnp.Vendor("Wipro Infotech", "WIP", datetime.date(2004, 1, 9)),
"WSP": pnp.Vendor("Wireless And Smart Products Inc.", "WSP", datetime.date(1999, 3, 20)),
"WCI": pnp.Vendor("Wisecom Inc", "WCI", datetime.date(1996, 11, 29)),
"WST": pnp.Vendor("Wistron Corporation", "WST", datetime.date(2010, 9, 3)),
"WML": pnp.Vendor("Wolfson Microelectronics Ltd", "WML", datetime.date(1997, 7, 30)),
"WVV": pnp.Vendor("WolfVision GmbH", "WVV", datetime.date(2012, 9, 18)),
"WCS": pnp.Vendor("Woodwind Communications Systems Inc", "WCS", datetime.date(1996, 11, 29)),
"WYT": pnp.Vendor("Wooyoung Image & Information Co.,Ltd.", "WYT", datetime.date(2008, 1, 18)),
"WTI": pnp.Vendor("WorkStation Tech", "WTI", datetime.date(1996, 11, 29)),
"WWV": pnp.Vendor("World Wide Video, Inc.", "WWV", datetime.date(1998, 10, 24)),
"WXT": pnp.Vendor("Woxter Technology Co. Ltd", "WXT", datetime.date(2010, 9, 3)),
"XTN": pnp.Vendor("X-10 (USA) Inc", "XTN", datetime.date(1997, 2, 24)),
"XTE": pnp.Vendor("X2E GmbH", "XTE", datetime.date(2009, 9, 23)),
"XAC": pnp.Vendor("XAC Automation Corp", "XAC", datetime.date(1999, 2, 22)),
"XDM": pnp.Vendor("XDM Ltd.", "XDM", datetime.date(2010, 11, 22)),
"MAD": pnp.Vendor("Xedia Corporation", "MAD", datetime.date(1996, 11, 29)),
"XLX": pnp.Vendor("Xilinx, Inc.", "XLX", datetime.date(2007, 8, 1)),
"XIN": pnp.Vendor("Xinex Networks Inc", "XIN", datetime.date(1997, 2, 7)),
"XIO": pnp.Vendor("Xiotech Corporation", "XIO", datetime.date(1998, 5, 29)),
"XRC": pnp.Vendor("Xircom Inc", "XRC", datetime.date(1996, 11, 29)),
"XIT": pnp.Vendor("Xitel Pty ltd", "XIT", datetime.date(1996, 11, 29)),
"XIR": pnp.Vendor("Xirocm Inc", "XIR", datetime.date(1996, 11, 29)),
"XNT": pnp.Vendor("XN Technologies, Inc.", "XNT", datetime.date(2003, 7, 14)),
"UHB": pnp.Vendor("XOCECO", "UHB", datetime.date(1998, 11, 27)),
"XRO": pnp.Vendor("XORO ELECTRONICS (CHENGDU) LIMITED", "XRO", datetime.date(2005, 5, 23)),
"XST": pnp.Vendor("XS Technologies Inc", "XST", datetime.date(1998, 1, 20)),
"XSN": pnp.Vendor("Xscreen AS", "XSN", datetime.date(2006, 2, 14)),
"XSY": pnp.Vendor("XSYS", "XSY", datetime.date(1998, 4, 23)),
"YMH": pnp.Vendor("Yamaha Corporation", "YMH", datetime.date(1996, 11, 29)),
"XYC": pnp.Vendor("Xycotec Computer GmbH", "XYC", datetime.date(2002, 9, 3)),
"BUF": pnp.Vendor("Yasuhiko Shirai Melco Inc", "BUF", datetime.date(1996, 11, 29)),
"YED": pnp.Vendor("Y-E Data Inc", "YED", datetime.date(1996, 11, 29)),
"YHQ": pnp.Vendor("Yokogawa Electric Corporation", "YHQ", datetime.date(1996, 11, 29)),
"TPZ": pnp.Vendor("Ypoaz Systems Inc", "TPZ", datetime.date(1996, 11, 29)),
"ZMZ": pnp.Vendor("Z Microsystems", "ZMZ", datetime.date(2005, 8, 10)),
"ZTT": pnp.Vendor("Z3 Technology", "ZTT", datetime.date(2010, 12, 14)),
"ZMT": pnp.Vendor("Zalman Tech Co., Ltd.", "ZMT", datetime.date(2007, 5, 7)),
"ZAN": pnp.Vendor("Zandar Technologies plc", "ZAN", datetime.date(2003, 12, 3)),
"ZAZ": pnp.Vendor("ZeeVee, Inc.", "ZAZ", datetime.date(2008, 1, 18)),
"ZBR": pnp.Vendor("Zebra Technologies International, LLC", "ZBR", datetime.date(2003, 9, 15)),
"ZAX": pnp.Vendor("Zefiro Acoustics", "ZAX", datetime.date(1996, 11, 29)),
"ZCT": pnp.Vendor("ZeitControl cardsystems GmbH", "ZCT", datetime.date(1999, 1, 20)),
"ZEN": pnp.Vendor("ZENIC Inc.", "ZEN", datetime.date(2015, 4, 17)),
"ZDS": pnp.Vendor("Zenith Data Systems", "ZDS", datetime.date(1996, 11, 29)),
"ZGT": pnp.Vendor("Zenith Data Systems", "ZGT", datetime.date(1996, 11, 29)),
"ZSE": pnp.Vendor("Zenith Data Systems", "ZSE", datetime.date(1996, 11, 29)),
"ZNI": pnp.Vendor("Zetinet Inc", "ZNI", datetime.date(1996, 11, 29)),
"TLE": pnp.Vendor("Zhejiang Tianle Digital Electric Co., Ltd.", "TLE", datetime.date(2014, 1, 17)),
"RSR": pnp.Vendor("Zhong Shan City Richsound Electronic Industrial Ltd.", "RSR", datetime.date(2015, 1, 27)),
"ZNX": pnp.Vendor("Znyx Adv. Systems", "ZNX", datetime.date(1996, 11, 29)),
"ZTI": pnp.Vendor("Zoom Telephonics Inc", "ZTI", datetime.date(1996, 11, 29)),
"ZRN": pnp.Vendor("Zoran Corporation", "ZRN", datetime.date(2005, 3, 31)),
"ZOW": pnp.Vendor("Zowie Intertainment, Inc", "ZOW", datetime.date(1999, 2, 22)),
"ZTM": pnp.Vendor("ZT Group Int'l Inc.", "ZTM", datetime.date(2007, 6, 14)),
"ZTE": pnp.Vendor("ZTE Corporation", "ZTE", datetime.date(2010, 9, 3)),
"SIX": pnp.Vendor("Zuniq Data Corporation", "SIX", datetime.date(1996, 11, 29)),
"ZYD": pnp.Vendor("Zydacron Inc", "ZYD", datetime.date(1997, 4, 10)),
"ZTC": pnp.Vendor("ZyDAS Technology Corporation", "ZTC", datetime.date(2000, 5, 24)),
"ZYP": pnp.Vendor("Zypcom Inc", "ZYP", datetime.date(1997, 3, 19)),
"ZYT": pnp.Vendor("Zytex Computers", "ZYT", datetime.date(1996, 11, 29)),
"HPA": pnp.Vendor("Zytor Communications", "HPA", datetime.date(2010, 7, 2)),
"AEJ": pnp.Vendor("Alpha Electronics Company", "AEJ", datetime.date(1996, 11, 29)),
"BOE": pnp.Vendor("BOE", "BOE", datetime.date(2004, 12, 2)),
"FIR": pnp.Vendor("Chaplet Systems Inc", "FIR", datetime.date(1996, 11, 29)),
"CMG": pnp.Vendor("Chenming Mold Ind. Corp.", "CMG", datetime.date(2003, 11, 14)),
"COO": pnp.Vendor("coolux GmbH", "COO", datetime.date(2010, 9, 30)),
"DGC": pnp.Vendor("Data General Corporation", "DGC", datetime.date(1996, 11, 29)),
"EXA": pnp.Vendor("Exabyte", "EXA", datetime.date(1996, 11, 29)),
"HRL": pnp.Vendor("Herolab GmbH", "HRL", datetime.date(1998, 3, 17)),
"HCP": pnp.Vendor("Hitachi Computer Products Inc", "HCP", datetime.date(1996, 11, 29)),
"ICS": pnp.Vendor("Integrated Circuit Systems", "ICS", datetime.date(1996, 11, 29)),
"IRD": pnp.Vendor("Irdata", "IRD", datetime.date(2001, 4, 24)),
"JWL": pnp.Vendor("Jewell Instruments, LLC", "JWL", datetime.date(2001, 6, 21)),
"MQP": pnp.Vendor("MultiQ Products AB", "MQP", datetime.date(1999, 3, 20)),
"NAC": pnp.Vendor("Ncast Corporation", "NAC", datetime.date(2006, 2, 14)),
"ODM": pnp.Vendor("ODME Inc.", "ODM", datetime.date(1998, 9, 23)),
"PMX": pnp.Vendor("Photomatrix", "PMX", datetime.date(1996, 11, 29)),
"QSI": pnp.Vendor("Quantum Solutions, Inc.", "QSI", datetime.date(2000, 1, 13)),
"RHT": pnp.Vendor("Red Hat, Inc.", "RHT", datetime.date(2011, 2, 17)),
"ZYX": pnp.Vendor("Zyxel", "ZYX", datetime.date(1996, 11, 29)),
"JAZ": pnp.Vendor("Carrera Computer Inc", "JAZ", datetime.date(1994, 1, 1)),
"CGA": pnp.Vendor("Chunghwa Picture Tubes, LTD", "CGA", datetime.date(1994, 1, 1)),
"EMC": pnp.Vendor("eMicro Corporation", "EMC", datetime.date(1994, 1, 1)),
"HEC": pnp.Vendor("Hisense Electric Co., Ltd.", "HEC", datetime.date(1994, 1, 1)),
"PNS": pnp.Vendor("PanaScope", "PNS", datetime.date(1994, 1, 1)),
"SPC": pnp.Vendor("SpinCore Technologies, Inc", "SPC", datetime.date(1994, 1, 1)),
"SVR": pnp.Vendor("Sensics, Inc.", "SVR", datetime.date(2015, 8, 27)),
"IAD": pnp.Vendor("IAdea Corporation", "IAD", datetime.date(2015, 9, 10)),
"ELU": pnp.Vendor("Express Industrial, Ltd.", "ELU", datetime.date(2015, 9, 10)),
"HPE": pnp.Vendor("Hewlett Packard Enterprise", "HPE", datetime.date(2015, 9, 22)),
"KGI": pnp.Vendor("Klipsch Group, Inc", "KGI", datetime.date(2015, 9, 22)),
"TKG": pnp.Vendor("Tek Gear", "TKG", datetime.date(2015, 10, 16)),
"ZMC": pnp.Vendor("HangZhou ZMCHIVIN", "ZMC", datetime.date(2015, 10, 16)),
"HVR": pnp.Vendor("HTC Corportation", "HVR", datetime.date(2015, 10, 16)),
"ZBX": pnp.Vendor("Zebax Technologies", "ZBX", datetime.date(2015, 10, 16)),
"SWO": pnp.Vendor("Guangzhou Shirui Electronics Co., Ltd.", "SWO", datetime.date(2015, 10, 16)),
"PIC": pnp.Vendor("Picturall Ltd.", "PIC", datetime.date(2015, 11, 13)),
"SKM": pnp.Vendor("Guangzhou Teclast Information Technology Limited", "SKM", datetime.date(2015, 11, 18)),
"GAC": pnp.Vendor("GreenArrays, Inc.", "GAC", datetime.date(2015, 11, 18)),
"TAV": pnp.Vendor("Thales Avionics", "TAV", datetime.date(2015, 11, 18)),
"EXR": pnp.Vendor("Explorer Inc.", "EXR", datetime.date(2015, 11, 18)),
"AVG": pnp.Vendor("Avegant Corporation", "AVG", datetime.date(2015, 12, 2)),
"MIV": pnp.Vendor("MicroImage Video Systems", "MIV", datetime.date(2015, 12, 8)),
"AUS": pnp.Vendor("ASUSTek COMPUTER INC", "AUS", datetime.date(2015, 12, 21)),
"STQ": pnp.Vendor("Synthetel Corporation", "STQ", datetime.date(2015, 12, 21)),
"HPN": pnp.Vendor("HP Inc.", "HPN", datetime.date(2015, 12, 21)),
"MTJ": pnp.Vendor("MicroTechnica Co.,Ltd.", "MTJ", datetime.date(2016, 1, 4)),
"GEC": pnp.Vendor("Gechic Corporation", "GEC", datetime.date(2016, 1, 4)),
"MEU": pnp.Vendor("MPL AG, Elektronik-Unternehmen", "MEU", datetime.date(2016, 1, 15)),
"DSA": pnp.Vendor("Display Solution AG", "DSA", datetime.date(2016, 2, 3)),
"PRP": pnp.Vendor("UEFI Forum", "PRP", datetime.date(2016, 2, 3)),
"TTX": pnp.Vendor("Taitex Corporation", "TTX", datetime.date(2016, 2, 3)),
"ECH": pnp.Vendor("EchoStar Corporation", "ECH", datetime.date(2016, 2, 26)),
"TOL": pnp.Vendor("TCL Corporation", "TOL", datetime.date(2016, 3, 30)),
"ADZ": pnp.Vendor("ADDER TECHNOLOGY LTD", "ADZ", datetime.date(2016, 3, 30)),
"HKC": pnp.Vendor("HKC OVERSEAS LIMITED", "HKC", datetime.date(2016, 3, 30)),
"KYN": pnp.Vendor("KEYENCE CORPORATION", "KYN", datetime.date(2016, 3, 30)),
"TET": pnp.Vendor("TETRADYNE CO., LTD.", "TET", datetime.date(2016, 4, 27)),
"ABS": pnp.Vendor("Abaco Systems, Inc.", "ABS", datetime.date(2016, 4, 27)),
"MVN": pnp.Vendor("Meta Company", "MVN", datetime.date(2016, 5, 25)),
"ERS": pnp.Vendor("Eizo Rugged Solutions", "ERS", datetime.date(2016, 5, 25)),
"VLC": pnp.Vendor("VersaLogic Corporation", "VLC", datetime.date(2016, 5, 25)),
"CYP": pnp.Vendor("CYPRESS SEMICONDUCTOR CORPORATION", "CYP", datetime.date(2016, 5, 25)),
"MDF": pnp.Vendor("MILDEF AB", "MDF", datetime.date(2016, 6, 23)),
"FOV": pnp.Vendor("FOVE INC", "FOV", datetime.date(2016, 7, 1)),
"NES": pnp.Vendor("INNES", "NES", datetime.date(2016, 7, 1)),
"HUK": pnp.Vendor("Hoffmann + Krippner GmbH", "HUK", datetime.date(2016, 7, 1)),
"AXE": pnp.Vendor("Axell Corporation", "AXE", datetime.date(2016, 8, 3)),
"UMT": pnp.Vendor("UltiMachine", "UMT", datetime.date(2016, 8, 11)),
"KPT": pnp.Vendor("TPK Holding Co., Ltd", "KPT", datetime.date(2016, 8, 16)),
"AAN": pnp.Vendor("AAEON Technology Inc.", "AAN", datetime.date(2016, 9, 1)),
"TDG": pnp.Vendor("Six15 Technologies", "TDG", datetime.date(2016, 9, 14)),
"IVR": pnp.Vendor("Inlife-Handnet Co., Ltd.", "IVR", datetime.date(2017, 1, 19)),
"DSJ": pnp.Vendor("VR Technology Holdings Limited", "DSJ", datetime.date(2017, 1, 19)),
"PVR": pnp.Vendor("Pimax Tech. CO., LTD", "PVR", datetime.date(2017, 2, 7)),
"TVL": pnp.Vendor("Total Vision LTD", "TVL", datetime.date(2017, 2, 7)),
"DPN": pnp.Vendor("Shanghai Lexiang Technology Limited", "DPN", datetime.date(2017, 2, 7)),
"BBX": pnp.Vendor("Black Box Corporation", "BBX", datetime.date(2017, 2, 28)),
"TRP": pnp.Vendor("TRAPEZE GROUP", "TRP", datetime.date(2017, 2, 28)),
"PMS": pnp.Vendor("Pabian Embedded Systems", "PMS", datetime.date(2017, 2, 28)),
"TCF": pnp.Vendor("Televic Conference", "TCF", datetime.date(2017, 2, 28)),
"HYL": pnp.Vendor("Shanghai Chai Ming Huang Info&Tech Co, Ltd", "HYL", datetime.date(2017, 2, 28)),
"TLN": pnp.Vendor("Techlogix Networx", "TLN", datetime.date(2017, 2, 28)),
"GGT": pnp.Vendor("G2TOUCH KOREA", "GGT", datetime.date(2017, 5, 25)),
"MVR": pnp.Vendor("MediCapture, Inc.", "MVR", datetime.date(2017, 5, 25)),
"PNT": pnp.Vendor("HOYA Corporation PENTAX Lifecare Division", "PNT", datetime.date(2017, 5, 25)),
"CHR": pnp.Vendor("christmann informationstechnik + medien GmbH & Co. KG", "CHR", datetime.date(2017, 5, 25)),
"TEN": pnp.Vendor("Tencent", "TEN", datetime.date(2017, 6, 20)),
"VRS": pnp.Vendor("VRstudios, Inc.", "VRS", datetime.date(2017, 6, 22)),
"XES": pnp.Vendor("Extreme Engineering Solutions, Inc.", "XES", datetime.date(2017, 6, 22)),
"NTK": pnp.Vendor("NewTek", "NTK", datetime.date(2017, 6, 22)),
"BBV": pnp.Vendor("BlueBox Video Limited", "BBV", datetime.date(2017, 6, 22)),
"TEV": pnp.Vendor("Televés, S.A.", "TEV", datetime.date(2017, 6, 22)),
"AVS": pnp.Vendor("Avatron Software Inc.", "AVS", datetime.date(2017, 8, 23)),
"POS": pnp.Vendor("Positivo Tecnologia S.A.", "POS", datetime.date(2017, 9, 1)),
"VRG": pnp.Vendor("VRgineers, Inc.", "VRG", datetime.date(2017, 9, 7)),
"NRI": pnp.Vendor("Noritake Itron Corporation", "NRI", datetime.date(2017, 11, 13)),
"MOC": pnp.Vendor("Matrix Orbital Corporation", "MOC", datetime.date(2017, 11, 13)),
"EIN": pnp.Vendor("Elegant Invention", "EIN", datetime.date(2018, 3, 29)),
"IMF": pnp.Vendor("Immersive Audio Technologies France", "IMF", datetime.date(2018, 3, 29)),
"LSP": pnp.Vendor("Lightspace Technologies", "LSP", datetime.date(2018, 3, 29)),
"PXN": pnp.Vendor("PixelNext Inc", "PXN", datetime.date(2018, 3, 29)),
"TSW": pnp.Vendor("VRSHOW Technology Limited", "TSW", datetime.date(2018, 3, 29)),
"SNV": pnp.Vendor("SONOVE GmbH", "SNV", datetime.date(2018, 3, 29)),
"SXI": pnp.Vendor("Silex Inside", "SXI", datetime.date(2018, 3, 29)),
"HWV": pnp.Vendor("Huawei Technologies Co., Inc.", "HWV", datetime.date(2018, 4, 25)),
"VRT": pnp.Vendor("Varjo Technologies", "VRT", datetime.date(2017, 11, 17)),
"JEM": pnp.Vendor("Japan E.M.Solutions Co., Ltd.", "JEM", datetime.date(2018, 5, 24)),
"QDL": pnp.Vendor("QD Laser, Inc.", "QDL", datetime.date(2018, 5, 31)),
"VAT": pnp.Vendor("VADATECH INC", "VAT", datetime.date(2018, 7, 9)),
"MCJ": pnp.Vendor("Medicaroid Corporation", "MCJ", datetime.date(2018, 8, 20)),
"RZR": pnp.Vendor("Razer Taiwan Co. Ltd.", "RZR", datetime.date(2018, 8, 20)),
"GBT": pnp.Vendor("GIGA-BYTE TECHNOLOGY CO., LTD.", "GBT", datetime.date(2018, 9, 5)),
"KOM": pnp.Vendor("Kontron GmbH", "KOM", datetime.date(2018, 9, 5)),
"CIE": pnp.Vendor("Convergent Engineering, Inc.", "CIE", datetime.date(2018, 9, 5)),
"WYR": pnp.Vendor("WyreStorm Technologies LLC", "WYR", datetime.date(2018, 9, 5)),
"AHQ": pnp.Vendor("Astro HQ LLC", "AHQ", datetime.date(2018, 9, 5)),
"QSC": pnp.Vendor("QSC, LLC", "QSC", datetime.date(2019, 1, 18)),
"DMN": pnp.Vendor("Dimension Engineering LLC", "DMN", datetime.date(2019, 2, 6)),
"DLO": pnp.Vendor("Shenzhen Dlodlo Technologies Co., Ltd.", "DLO", datetime.date(2019, 4, 29)),
"VLM": pnp.Vendor("LENOVO BEIJING CO. LTD.", "VLM", datetime.date(2019, 5, 21)),
"CRW": pnp.Vendor("Cammegh Limited", "CRW", datetime.date(2019, 6, 18)),
"LHC": pnp.Vendor("Beihai Century Joint Innovation Technology Co.,Ltd", "LHC", datetime.date(2019, 9, 10)),
"FDX": pnp.Vendor("Findex, Inc.", "FDX", datetime.date(2019, 10, 22)),
"ELD": pnp.Vendor("Express Luck, Inc.", "ELD", datetime.date(2019, 10, 22)),
"SKI": pnp.Vendor("LLC SKTB “SKIT”", "SKI", datetime.date(2019, 10, 22)),
"WLF": pnp.Vendor("WOLF Advanced Technology", "WLF", datetime.date(2019, 10, 22)),
"BLD": pnp.Vendor("BILD INNOVATIVE TECHNOLOGY LLC", "BLD", datetime.date(2019, 10, 22)),
"MMT": pnp.Vendor("MIMO Monitors", "MMT", datetime.date(2019, 10, 22)),
"ICR": pnp.Vendor("Icron", "ICR", datetime.date(2019, 10, 22)),
"PIS": pnp.Vendor("TECNART CO.,LTD.", "PIS", datetime.date(2019, 10, 22)),
"MHQ": pnp.Vendor("Moxa Inc.", "MHQ", datetime.date(2019, 10, 22)),
"DSG": pnp.Vendor("Disguise Technologies", "DSG", datetime.date(2019, 10, 22)),
"CMK": pnp.Vendor("Comark LLC", "CMK", datetime.date(2020, 7, 15)),
"MPV": pnp.Vendor("Megapixel Visual Realty", "MPV", datetime.date(2020, 7, 15)),
"SKW": pnp.Vendor("Skyworth", "SKW", datetime.date(2020, 7, 15)),
"CFR": pnp.Vendor("Meta View, Inc.", "CFR", datetime.date(2020, 7, 15)),
"MLC": pnp.Vendor("MILCOTS", "MLC", datetime.date(2020, 7, 15)),
"NXT": pnp.Vendor("NZXT (PNP same EDID)_", "NXT", datetime.date(2020, 7, 15)),
}
# yapf: enable
| 201,951 | 96,245 |
# 問題URL: https://atcoder.jp/contests/abc126/tasks/abc126_d
# 解答URL: https://atcoder.jp/contests/abc126/submissions/14638903
from heapq import heappop, heappush
n = int(input())
a = [[] for _ in range(n)]
for _ in range(n - 1):
u, v, w = map(int, input().split())
a[u - 1].append((v - 1, w))
a[v - 1].append((u - 1, w))
d = [-1] * n
todo = [(0, 0)]
seen = set()
while todo:
e, p = heappop(todo)
if p in seen:
continue
seen.add(p)
d[p] = e
for pi, ei in a[p]:
heappush(todo, (e + ei, pi))
for di in d:
print(di % 2)
| 568 | 279 |
import numpy as np
import random as rd
from time import time
from math import log
import gc
import heapq
import copy
from utility.parser import parse_args
args = parse_args()
class Data(object):
def __init__(self, path, batch_size):
self.batch_size = batch_size
doc_file = path + '/map.documents.txt' # before deduplication
qrl_file = path + '/map.queries.txt'
doc_dict_file = path + '/doc_dict.txt'
qrl_dict_file = path + '/qrl_dict.txt'
# train_file = path + '/train_pairs/f{}.train.pairs'.format(str(args.fold))
# test_file = path + '/valid_run/f{}.valid.run'.format(str(args.fold))
# test_file = path + '/test_run/f{}.test.run'.format(str(args.fold))
fold = [1, 2, 3, 4, 5]
test_file = path + '/%d.run' % fold[args.fold - 1]
valid_file = path + '/%d.run' % fold[args.fold - 2]
train_files = [path + '/%d.run' % fold[i] for i in [args.fold - 3, args.fold - 4, args.fold - 5]]
match_file = path + '/qrels'
doc_unique_words = path + '/doc_word_list_unique.txt' # after deduplication
doc_unique_entities = path + '/doc_ent_list_unique.txt' # after deduplication
qrl_unique_entities = path + '/que_ent_list_unique.txt'
self.n_docs = 0
self.n_qrls = 0
self.n_words = 0
self.n_ents = 0
self.n_train = 0
self.n_test = 0
self.pos_pools = {}
self.neg_pools = {}
# map the original id to numerical id, such as 'LA03421' -> 0
self.doc_dict = {}
self.qrl_dict = {}
self.doc_dict_rev = {}
self.qrl_dict_rev = {}
self.qrl_doc_match = {}
self.doc_word_list = {}
self.qrl_word_list = {}
self.qrl_ent_list = {}
self.doc_unique_word_list = {}
self.doc_unique_ent_list = {}
'''
self.word_freq = {}
self.word_doc_freq = {}
self.word_window_freq = {}
self.doc_word_freq = {}
self.qrl_word_freq = {}
self.word_pair_count = {}
self.num_window = 0
'''
# window_size = 5
# self.windows = []
self.all_neg = []
print('loading entities...', end='', flush=True)
with open(doc_unique_entities) as f:
for l in f.readlines():
if len(l) > 0:
l = l.strip().split('\t')
did = int(l[0])
if len(l) > 1:
ents = [int(i) for i in l[1].split()]
self.doc_unique_ent_list[did] = ents
self.n_ents = max(self.n_ents, max(ents))
with open(qrl_unique_entities) as f:
for l in f.readlines():
if len(l) > 0:
l = l.strip().split('\t')
did = int(l[0])
if len(l) > 1:
ents = [int(i) for i in l[1].split()]
self.qrl_ent_list[did] = ents
self.n_ents = max(self.n_ents, max(ents))
print('done')
print('loading documents...', end='', flush=True)
with open(doc_file) as f, open(doc_unique_words) as f2:
for l in f.readlines():
if len(l) > 0:
l = l.strip().split('\t')
did = int(l[0])
words = [int(i) for i in l[1].split()]
self.doc_word_list[did] = words
self.n_words = max(self.n_words, max(words))
self.n_docs = max(self.n_docs, did)
for l in f2.readlines():
if len(l)>0:
l = l.strip().split('\t')
did = int(l[0])
words = [int(i) for i in l[1].split()]
self.doc_unique_word_list[did] = words
print('done')
print('loading queries...', end='', flush=True)
with open(qrl_file) as f:
for l in f.readlines():
if len(l) > 0:
l = l.strip().split('\t')
qid = int(l[0])
words = [int(i) for i in l[1].split()]
self.qrl_word_list[qid] = words
self.n_words = max(self.n_words, max(words))
self.n_qrls = max(self.n_qrls, qid)
print('done')
self.n_docs += 1
self.n_qrls += 1
print('query:{} \t document:{} \t word:{} \t entity:{}'.format(self.n_qrls, self.n_docs, self.n_words, self.n_ents))
self.n_words += 1
self.n_ents += 1
print('loading dict...', end='', flush=True)
with open(doc_dict_file) as f1, open(qrl_dict_file) as f2:
for l in f1.readlines():
key, item = l.strip().split('\t')
self.doc_dict[key] = int(item)
self.doc_dict_rev[int(item)] = key
for l in f2.readlines():
key, item = l.strip().split('\t')
self.qrl_dict[key] = int(item) # key is the original id and value is the mapped id starts from 0
self.qrl_dict_rev[int(item)] = key
with open(match_file) as f:
for l in f.readlines():
qrl_key, _, doc_key, score = l.strip().split()
if qrl_key not in self.qrl_dict:
continue
if doc_key not in self.doc_dict:
continue
qrl = self.qrl_dict[qrl_key]
doc = self.doc_dict[doc_key]
if int(score) > 0:
score = '1'
if (qrl, score) in self.qrl_doc_match:
self.qrl_doc_match[(qrl, score)].append(doc)
else:
self.qrl_doc_match[(qrl, score)] = [doc]
print('done')
print('loading train&test set...', end='', flush=True)
self.train_items, self.test_set = {}, {}
for train_file in train_files:
with open(train_file) as f_train:
for l in f_train.readlines():
if len(l) == 0:
break
l = l.strip()
qrl_key, _, doc_key, _, _, _ = l.split()
qrl = self.qrl_dict[qrl_key]
doc = self.doc_dict[doc_key]
if qrl in self.train_items:
self.train_items[qrl].append(doc)
else:
self.train_items[qrl] = [doc]
self.n_train += 1
with open(test_file) as f_test:
for l in f_test.readlines():
if len(l) == 0:
break
l = l.strip()
qrl_key, _, doc_key, _, _, _ = l.split()
qrl = self.qrl_dict[qrl_key]
doc = self.doc_dict[doc_key]
if qrl in self.test_set:
self.test_set[qrl].append(doc)
else:
self.test_set[qrl] = [doc]
self.n_test += 1
print('done')
for qrl in self.train_items.keys():
for doc in self.train_items[qrl]:
if not (qrl, '0') in self.qrl_doc_match:
self.qrl_doc_match[(qrl, '0')] = []
if not (qrl, '1') in self.qrl_doc_match:
self.qrl_doc_match[(qrl, '1')] = []
self.all_neg.append(qrl)
if doc in self.qrl_doc_match[(qrl, '0')] or doc in self.qrl_doc_match[(qrl, '1')]:
continue
else:
self.qrl_doc_match[(qrl, '0')].append(doc)
for q in self.all_neg:
self.train_items.pop(q)
self.positive_pool()
self.negative_pool()
print('init finish!')
def positive_pool(self):
t1 = time()
for q in self.train_items.keys():
self.pos_pools[q] = self.qrl_doc_match[(q, '1')]
print('refresh positive pools', time() - t1)
def negative_pool(self):
t1 = time()
for q in self.train_items.keys():
self.neg_pools[q] = self.qrl_doc_match[(q, '0')]
print('refresh negative pools', time() - t1)
def sample(self):
key_pool = list(self.train_items.keys())
rd.shuffle(key_pool)
if self.batch_size <= len(key_pool):
qrls = rd.sample(key_pool, self.batch_size)
else:
qrls = [rd.choice(key_pool) for _ in range(self.batch_size)]
def sample_pos_docs_for_q_from_pools(q, num):
pos_docs = self.pos_pools[q]
return rd.sample(pos_docs, num)
def sample_neg_docs_for_q_from_pools(q, num):
neg_docs = self.neg_pools[q]
return rd.sample(neg_docs, num)
pos_docs, neg_docs = [], []
for q in qrls:
pos_docs += sample_pos_docs_for_q_from_pools(q, 1)
neg_docs += sample_neg_docs_for_q_from_pools(q, 1)
return qrls, pos_docs, neg_docs
def print_statistics(self):
print('n_docs=%d, n_qrls=%d, n_words=%d' % (self.n_docs, self.n_qrls, self.n_words))
print('n_interactions=%d' % (self.n_train + self.n_test))
print('n_train=%d, n_test=%d, sparsity=%.5f' % (self.n_train, self.n_test, (self.n_train + self.n_test)/(self.n_docs * self.n_qrls)))
| 10,029 | 3,407 |
import torch
from torch import nn, Tensor
from torch.nn import functional as F
from .modules import Conv, SeparableConv
from .backbones import ResNet
class ASPP(nn.Module):
def __init__(self, c1, c2, drop_rate=0.1):
super().__init__()
ratios = [1, 6, 12, 18]
self.blocks = nn.ModuleList([
Conv(c1, c2, 1 if ratio==1 else 3, 1, 0 if ratio==1 else ratio, ratio)
for ratio in ratios])
self.blocks.append(nn.Sequential(
nn.AdaptiveAvgPool2d(1),
Conv(c1, c2, 1)
))
self.conv = Conv(c2 * (len(ratios) + 1), c2, 1)
self.dropout = nn.Dropout(drop_rate)
def forward(self, x: Tensor) -> Tensor:
contexts = []
for blk in self.blocks:
contexts.append(F.interpolate(blk(x), x.shape[2:], mode='bilinear', align_corners=False))
x = self.conv(torch.cat(contexts, dim=1))
x = self.dropout(x)
return x
class Decoder(nn.Module):
def __init__(self, backbone_channels, aspp_out_channel=256, decoder_channel=256, low_level_channels=[64, 32]):
super().__init__()
self.aspp = ASPP(backbone_channels[-1], aspp_out_channel)
self.conv = Conv(aspp_out_channel, aspp_out_channel, 1)
self.project8 = Conv(backbone_channels[1], low_level_channels[0], 1)
self.fuse8 = SeparableConv(aspp_out_channel + low_level_channels[0], decoder_channel, 5, 1, 2)
self.project4 = Conv(backbone_channels[0], low_level_channels[1], 1)
self.fuse4 = SeparableConv(decoder_channel + low_level_channels[1], decoder_channel, 5, 1, 2)
def forward(self, features: list) -> Tensor:
x = self.aspp(features[-1])
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=False)
feat8 = self.project8(features[1])
x = self.fuse8(torch.cat([x, feat8], dim=1))
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=False)
feat4 = self.project4(features[0])
x = self.fuse4(torch.cat([x, feat4], dim=1))
return x
class SemanticHead(nn.Module):
def __init__(self, decoder_channel, head_channel, num_classes):
super().__init__()
self.conv = nn.Sequential(
SeparableConv(decoder_channel, head_channel, 5, 1, 2),
nn.Conv2d(head_channel, num_classes, 1)
)
def forward(self, x: Tensor) -> Tensor:
return self.conv(x)
class InstanceHead(nn.Module):
def __init__(self, decoder_channel, head_channel):
super().__init__()
self.center_conv = nn.Sequential(
SeparableConv(decoder_channel, head_channel, 5, 1, 2),
nn.Conv2d(head_channel, 1, 1)
)
self.offset_conv = nn.Sequential(
SeparableConv(decoder_channel, head_channel, 5, 1, 2),
nn.Conv2d(head_channel, 2, 1)
)
def forward(self, x: Tensor) -> Tensor:
return self.center_conv(x), self.offset_conv(x)
class PanopticDeepLab(nn.Module):
def __init__(self, variant: str = '50', num_classes: int = 19):
super().__init__()
self.backbone = ResNet(variant)
backbone_channels = [256, 512, 1024]
self.semantic_decoder = Decoder(backbone_channels, 256, 256, [64, 32])
self.instance_decoder = Decoder(backbone_channels, 256, 128, [32, 16])
self.semantic_head = SemanticHead(256, 256, num_classes)
self.instance_head = InstanceHead(128, 32)
def forward(self, x: Tensor) -> Tensor:
features = self.backbone(x)[:-1]
semantic = self.semantic_decoder(features)
instance = self.instance_decoder(features)
semantic = self.semantic_head(semantic)
center, offset = self.instance_head(instance)
semantic = F.interpolate(semantic, x.shape[-2:], mode='bilinear', align_corners=False)
center = F.interpolate(center, x.shape[-2:], mode='bilinear', align_corners=False)
scale = x.shape[-2] // offset.shape[-2]
offset = F.interpolate(offset, x.shape[-2:], mode='bilinear', align_corners=False)
offset *= scale
return semantic, center, offset
if __name__ == '__main__':
model = PanopticDeepLab('50')
x = torch.randn(2, 3, 224, 224)
semantic, center, offset = model(x)
print(semantic.shape)
print(center.shape)
print(offset.shape) | 4,379 | 1,661 |
# https://www.hackerrank.com/challenges/qheap1/problem?isFullScreen=true
Q = int(input())
hp = []
for _ in range(Q):
ls = list(map(int, input().split()))
if ls[0]==1:
if len(hp)==0:
hp.append(ls[1])
minv = hp[0]
else:
minv = min(ls[1], minv)
hp.append(ls[1])
elif ls[0]==2:
hp.remove(ls[1])
if ls[1] == minv and hp:
minv = min(hp)
else:
print(minv)
| 474 | 189 |
# encoding: utf-8
"""
labels.py
Created by Thomas Mangin on 2009-11-05.
Copyright (c) 2009-2012 Exa Networks. All rights reserved.
Modified by Orange - 2014
"""
from struct import pack
#from bagpipe.exabgp.message.update.attribute import AttributeID,Flag,Attribute
# =================================================================== Community
class Label (object):
def __init__ (self,label):
self.label = label
def pack (self):
return pack('!L',self.label)
def __str__ (self):
return "NOT DONE"
def __len__ (self):
return 4
def __cmp__(self,other):
if ( not isinstance(other,Label)
or (self.label != other.label)
):
return -1
else:
return 0 | 680 | 257 |
import math
from torch.nn import Embedding
import torch
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Module
from torch.nn import init
from torch.autograd import Variable
torch.random.manual_seed(2070)
class GraphConvolution(Module):
"""
Simple GCN layer, similar to https://arxiv.org/abs/1609.02907
"""
def __init__(self, in_features, out_features, bias=True):
super(GraphConvolution, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = Parameter(torch.FloatTensor(in_features, out_features))
if bias:
self.bias = Parameter(torch.FloatTensor(out_features))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
if self.bias is not None:
self.bias.data.uniform_(-stdv, stdv)
def forward(self, input, adj):
support = torch.mm(input, self.weight)
output = torch.spmm(adj, support)
if self.bias is not None:
return output + self.bias
else:
return output
def __repr__(self):
return self.__class__.__name__ + ' (' \
+ str(self.in_features) + ' -> ' \
+ str(self.out_features) + ')'
class BI_Intereaction(Module):
def __init__(self, in_features, k_embedding, train_idx):
'''
:param in_features: 输入特征维数
:param k: 单一特征embedding
:param bias:
'''
super(BI_Intereaction, self).__init__()
self.in_features = in_features
self.k_embedding = k_embedding
self.train_idx = train_idx
self.embedding = Embedding(in_features, k_embedding)
# self.weight = Parameter(torch.FloatTensor(in_features,k_embedding))
# self.reset_parameters()
self.init_embedding()
def init_embedding(self):
init.xavier_uniform_(self.embedding.weight)
# print('embedding_init',self.embedding.weight)
def reset_parameters(self):
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
def bi_pooling(self, input, embeddings):
output = torch.zeros(input.shape[0], self.k_embedding)
rows, cols = input.shape[0], input.shape[1]
# print(rows,cols)
for _ in self.train_idx:
left = torch.zeros(self.k_embedding)
right = torch.zeros(self.k_embedding)
nonzero_index = torch.nonzero(input[_])
# print(nonzero_index.squeeze(1))
for i in nonzero_index.squeeze(1):
left += torch.mul(embeddings.weight[i] , input[_][i])
right += torch.mul(embeddings.weight[i] , input[_][i]) ** 2
vec = 0.5 * (left ** 2 - right)
del left, right
output[_] = vec
return output
def forward(self, input):
return self.bi_pooling(input,self.embedding)
| 3,087 | 1,015 |
from .pdf_segmenter import PDFSegmenter | 39 | 12 |
import pytest
from digest.digest_tools import Digest, content_path, InvalidDigestException
@pytest.mark.parametrize(
"digest, output_args",
[
("tarsum.v123123+sha1:123deadbeef", ("tarsum.v123123+sha1", "123deadbeef")),
("tarsum.v1+sha256:123123", ("tarsum.v1+sha256", "123123")),
("tarsum.v0+md5:abc", ("tarsum.v0+md5", "abc")),
("tarsum+sha1:abc", ("tarsum+sha1", "abc")),
("sha1:123deadbeef", ("sha1", "123deadbeef")),
("sha256:123123", ("sha256", "123123")),
("md5:abc", ("md5", "abc")),
],
)
def test_parse_good(digest, output_args):
assert Digest.parse_digest(digest) == Digest(*output_args)
assert str(Digest.parse_digest(digest)) == digest
@pytest.mark.parametrize(
"bad_digest",
[
"tarsum.v+md5:abc:",
"sha1:123deadbeefzxczxv",
"sha256123123",
"tarsum.v1+",
"tarsum.v1123+sha1:",
],
)
def test_parse_fail(bad_digest):
with pytest.raises(InvalidDigestException):
Digest.parse_digest(bad_digest)
@pytest.mark.parametrize(
"digest, path",
[
("tarsum.v123123+sha1:123deadbeef", "tarsum/v123123/sha1/12/123deadbeef"),
("tarsum.v1+sha256:123123", "tarsum/v1/sha256/12/123123"),
("tarsum.v0+md5:abc", "tarsum/v0/md5/ab/abc"),
("sha1:123deadbeef", "sha1/12/123deadbeef"),
("sha256:123123", "sha256/12/123123"),
("md5:abc", "md5/ab/abc"),
("md5:1", "md5/01/1"),
("md5.....+++:1", "md5/01/1"),
(".md5.:1", "md5/01/1"),
],
)
def test_paths(digest, path):
assert content_path(digest) == path
| 1,623 | 799 |
import requests
def call(logger, params):
try:
requests.post(logger, json=params)
except requests.exceptions.RequestException as e:
print('Logger service is not available')
| 199 | 53 |
import unittest
from milecsa import Chain, Config
import local_config
def print_block(chain, block):
print("Id: ", block.blockId)
print("Version: ", block.version)
print("Timestamp: ", block.timestamp)
print("Trx count: ", block.transaction_count)
for t in block.transactions:
asset = chain.asset_name(t.assetCode)
print(t, t.source, "->[", t.description, "]", t.destination, " asset: ", t.assetCode, asset, " amount: ",
t.amount)
class MyTestCase(unittest.TestCase):
def test_something(self):
chain = Chain()
state = chain.get_state()
print("Block count: ", state.block_count)
print("Node count: ", state.node_count)
block_id = chain.get_current_block_id()
print("Last block id: ", chain.get_current_block_id())
block = chain.get_block(block_id=556)
print("Last id: ", block_id)
print_block(chain, block)
#
# iterate
#
print()
for bid in range(state.block_count-10, state.block_count):
block = chain.get_block(block_id=bid)
print_block(chain, block)
print()
#self.assertEqual(chain0, chain1)
if __name__ == '__main__':
unittest.main()
| 1,277 | 408 |
from flask_jsglue import JSGlue
from flask import Flask, render_template, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_restful import reqparse, Api
app = Flask(__name__)
api = Api(app)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root:root@localhost:3306/videorent?charset=utf8mb4'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
js_glue = JSGlue()
js_glue.init_app(app) # 让js文件中可以使用url_for方法
parser = reqparse.RequestParser()
class Customer(db.Model):
id = db.Column('id', db.Integer, primary_key=True)
name = db.Column(db.String(30), unique=True)
phone = db.Column(db.String(20), unique=True)
deposit = db.Column(db.String(10))
comment = db.Column(db.String(50))
def __init__(self, customer_id=None, customer_name=None, customer_phone=None, customer_deposit="0", comment=""):
self.id = customer_id
self.name = customer_name
self.phone = customer_phone
self.deposit = customer_deposit
self.comment = comment
def get_id(self):
return str(self.id)
# 打印对象的内容
def __repr__(self):
return '<Customer %r,%r,%r,%r,%r >' % (self.id, self.name, self.phone, self.deposit, self.comment)
def to_json(self):
return {
'id': self.id,
'name': self.name,
'phone': self.phone,
'deposit': self.deposit,
'comment': self.comment,
}
class Video(db.Model):
id = db.Column('id', db.Integer, primary_key=True)
format = db.Column(db.String(10), unique=True)
name = db.Column(db.String(50), unique=True)
description = db.Column(db.String(256))
comment = db.Column(db.String(256))
def __init__(self, video_id=None, video_format=None, video_name=None, video_description="", comment=""):
self.id = video_id
self.format = video_format
self.name = video_name
self.description = video_description
self.comment = comment
def get_id(self):
return str(self.id)
# 打印对象的内容
def __repr__(self):
return '<Video %r,%r,%r,%r,%r >' % (self.id, self.name, self.format, self.description, self.comment)
def to_json(self):
return {
'id': self.id,
'name': self.name,
'format': self.format,
'description': self.description,
'comment': self.comment,
}
class Rental(db.Model):
id = db.Column('id', db.Integer, primary_key=True)
rental_time = db.Column(db.TIMESTAMP, unique=True)
return_time = db.Column(db.TIMESTAMP, unique=True)
customer_id = db.Column(db.String(20))
video_id = db.Column(db.String(20))
status = db.Column(db.String(10))
comment = db.Column(db.String(256))
def __init__(self, rental_id=None, rental_time=None, return_time=None, customer_id="", video_id="",
status="", comment=""):
self.id = rental_id
self.rental_time = rental_time
self.return_time = return_time
self.customer_id = customer_id
self.video_id = video_id
self.status = status
self.comment = comment
def get_id(self):
return str(self.id)
# 打印对象的内容
def __repr__(self):
return '<Rental $r,%r,%r,%r,%r,%r,%r>' % (self.id, self.rental_time, self.return_time, self.customer_id,
self.video_id, self.status, self.comment)
def to_json(self):
return {
'id': self.id,
'rental_time': self.rental_time,
'return_time': self.return_time,
'customer_id': self.customer_id,
'video_id': self.video_id,
'status': self.status,
'comment': self.comment,
}
class RentalRelation:
def __init__(self, rental_id, video_id, video_name, video_description, customer_id, customer_name,
customer_phone, status,
rental_time, return_time, comment=None):
self.id = rental_id
self.video_id = video_id
self.video_name = video_name
self.video_description = video_description
self.customer_id = customer_id
self.customer_name = customer_name
self.customer_phone = customer_phone
self.status = status
self.rental_time = rental_time
self.return_time = return_time
# 打印对象的内容
def __repr__(self):
return '<RentalRelation $r,%r,%r,%r,%r,%r,%r,%r>' % (self.id, self.video_id, self.video_name, self.video_description,
self.customer_id, self.customer_name, self.customer_phone,
self.status,
self.rental_time, self.return_time)
def to_json(self):
return {
'id': self.id,
'video_id': self.video_id,
'video_name': self.video_name,
'video_description': self.video_description,
'customer_id': self.customer_id,
'customer_name': self.customer_name,
'customer_phone': self.customer_phone,
'status': self.status,
'rental_time': self.rental_time,
'return_time': self.return_time,
}
@app.route('/')
def index():
return render_template('index.html')
@app.route('/get_data')
def get_base_data():
data = db.session.query(Video, Customer, Rental).filter(Rental.video_id == Video.id, Rental.customer_id ==
Customer.id).all()
dict1 = []
for item in data:
format_rental_time = item.Rental.rental_time.strftime("%Y.%m.%d-%H:%M:%S")
format_return_time = item.Rental.return_time.strftime("%Y.%m.%d-%H:%M:%S")
rental_relation_item = RentalRelation(item.Rental.id, item.Video.id, item.Video.name, item.Video.description,
item.Customer.id, item.Customer.name, item.Customer.phone,
item.Rental.status, format_rental_time, format_return_time)
dict1.append(rental_relation_item.to_json())
return jsonify({'results': dict1})
@app.route('/get_video_data')
def get_video_data():
parser.add_argument("id", type=str)
args = parser.parse_args()
item_id = args.get('id')
if item_id is None:
data = db.session.query(Video).all()
else:
data = db.session.query(Video).filter(Video.id == item_id).all()
dict1 = []
for item in data:
video_item = Video(item.id, "", item.name, item.description, "")
dict1.append(video_item.to_json())
return jsonify({'results': dict1})
@app.route('/get_customer_data')
def get_customer_data():
parser.add_argument("id", type=str)
args = parser.parse_args()
item_id = args.get('id')
if item_id is None:
data = db.session.query(Customer).all()
else:
data = db.session.query(Customer).filter(Customer.id == item_id).all()
dict1 = []
for item in data:
customer_item = Customer(item.id, item.name, item.phone, item.deposit,
item.comment)
dict1.append(customer_item.to_json())
return jsonify({'results': dict1})
@app.route('/add_video', methods=['POST'])
def add_video():
parser.add_argument("name")
parser.add_argument("description")
parser.add_argument("comment")
args = parser.parse_args()
video_name = args['name']
video_description = args['description']
comment = args['comment']
db.create_all() # In case user table doesn't exists already. Else remove it.
video_data = db.session.query(Video).filter(Video.name == video_name).all()
if len(video_data) == 0:
video = Video(video_name=video_name, video_description=video_description, comment=comment)
db.session.add(video)
db.session.commit()
return jsonify({'message': '添加成功!'}), 200
else:
return jsonify({'message': '添加失败!该影片已经存在'}), 400
@app.route('/add_customer', methods=['POST'])
def add_customer():
parser.add_argument("phone")
parser.add_argument("name")
parser.add_argument("deposit")
parser.add_argument("comment")
args = parser.parse_args()
customer_phone = args['phone']
customer_name = args['name']
customer_deposit = args['deposit']
comment = args['comment']
db.create_all() # In case user table doesn't exists already. Else remove it.
customer_data = db.session.query(Customer).filter(Customer.name == customer_name,
Customer.phone == customer_phone).all()
if len(customer_data) == 0:
customer = Customer(customer_phone=customer_phone, customer_name=customer_name,
customer_deposit=customer_deposit, comment=comment)
db.session.add(customer)
db.session.commit()
return jsonify({'message': '添加成功!'}), 200
else:
return jsonify({'message': '添加失败!该会员已经存在'}), 400
@app.route('/add_rental', methods=['POST'])
def add_rental():
parser.add_argument("video_name")
parser.add_argument("video_description")
parser.add_argument("customer_phone")
parser.add_argument("customer_name")
parser.add_argument("rental_time")
parser.add_argument("return_time")
parser.add_argument("comment")
args = parser.parse_args()
video_name = args['video_name']
customer_phone = args['customer_phone']
customer_name = args['customer_name']
rental_time = args['rental_time']
return_time = args['return_time']
comment = args['comment']
db.create_all() # In case user table doesn't exists already. Else remove it.
video_data = db.session.query(Video).filter(Video.name == video_name).first()
customer_data = db.session.query(Customer).filter(Customer.name == customer_name, Customer.phone ==
customer_phone).first()
if video_data is not None and customer_data is not None:
rental = Rental(rental_time=rental_time, return_time=return_time, customer_id=customer_data.id,
video_id=video_data.id, status=str(1))
db.session.add(rental)
db.session.commit()
return jsonify({'message': '添加成功!'}), 200
else:
if video_data is None:
return jsonify({'message': '添加失败!这里没有该影片'}), 400
else:
return jsonify({'message': '添加失败!这里没有该会员'}), 400
@app.route('/update_rental', methods=['PUT'])
def update_rental():
parser.add_argument("id", type=int)
parser.add_argument("video_name", type=str)
parser.add_argument("customer_name", type=str)
parser.add_argument("customer_phone", type=str)
parser.add_argument("rental_time", type=str)
parser.add_argument("return_time", type=str)
parser.add_argument("status", type=str)
parser.add_argument("comment", type=str)
args = parser.parse_args()
item_id = args.get('id')
update_video_name = args.get('video_name')
update_customer_name = args.get('customer_name')
update_customer_phone = args.get('customer_phone')
update_rental_time = args.get('rental_time')
update_return_time = args.get('return_time')
update_status = args.get('status')
update_comment = args.get('comment')
video = db.session.query(Video).filter_by(name=update_video_name).first()
customer = db.session.query(Customer).filter_by(name=update_customer_name, phone=update_customer_phone).first()
rental = db.session.query(Rental).filter_by(id=item_id).first()
# 将要修改的值赋给title
if rental is not None and video is not None and customer is not None:
rental.customer_id = customer.id
rental.video_id = video.id
rental.return_time = update_return_time
rental.rental_time = update_rental_time
rental.status = update_status
rental.comment = update_comment
db.session.commit()
else:
print("the rental is None,update error")
return jsonify({"message": "修改成功!"})
@app.route('/delete_rental', methods=['DELETE'])
def delete_rental():
parser.add_argument("id", type=str, location='args')
args = parser.parse_args()
raw_id = args.get('id')
rental = db.session.query(Rental).filter_by(id=raw_id).first()
if rental is not None:
db.session.delete(rental)
db.session.commit()
else:
print("the rental is None,delete error")
return jsonify({'message': '删除成功!'})
@app.route('/delete_video', methods=['DELETE'])
def delete_video():
parser.add_argument("id", type=str, location='args')
args = parser.parse_args()
raw_id = args.get('id')
video = db.session.query(Video).filter_by(id=raw_id).first()
rental = db.session.query(Rental).filter_by(video_id=raw_id, status=1).first()
if video is not None and rental is None:
db.session.delete(video)
db.session.commit()
return jsonify({'message': '删除成功!'}),200
else:
print("the video is None,delete error")
return jsonify({'message': '影片不存在或者还有人租赁该影片没有归还'}), 403
@app.route('/delete_customer', methods=['DELETE'])
def delete_customer():
parser.add_argument("id", type=str, location='args')
args = parser.parse_args()
raw_id = args.get('id')
customer = db.session.query(Customer).filter_by(id=raw_id).first()
rental = db.session.query(Rental).filter_by(customer_id=raw_id, status=1).first()
if customer is not None and rental is None:
db.session.delete(customer)
db.session.commit()
return jsonify({'message': '删除成功!'})
else:
print("the customer is None,delete error")
return jsonify({'message': '会员不存在,或者还有会员租赁还影片!'}), 403
@app.errorhandler(404)
def page_not_found(e):
# note that we set the 404 status explicitly
return render_template('404.html'), 404
if __name__ == '__main__':
app.run(debug=True)
| 14,014 | 4,617 |
"""
Querying functionality for the rdb repository.
This file is part of the everest project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Created on Jan 7, 2013.
"""
from everest.constants import RESOURCE_ATTRIBUTE_KINDS
from everest.entities.base import Entity
from everest.exceptions import MultipleResultsException
from everest.exceptions import NoResultsException
from everest.querying.base import EXPRESSION_KINDS
from everest.querying.filtering import RepositoryFilterSpecificationVisitor
from everest.querying.interfaces import IFilterSpecificationVisitor
from everest.querying.interfaces import IOrderSpecificationVisitor
from everest.querying.ordering import OrderSpecificationVisitor
from everest.querying.ordering import RepositoryOrderSpecificationVisitor
from everest.querying.specifications import order
from everest.repositories.rdb.utils import OrmAttributeInspector
from everest.resources.interfaces import ICollectionResource
from everest.resources.interfaces import IResource
from everest.utils import get_order_specification_visitor
from functools import reduce as func_reduce
from sqlalchemy import and_ as sqlalchemy_and
from sqlalchemy import not_ as sqlalchemy_not
from sqlalchemy import or_ as sqlalchemy_or
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.query import Query as SaQuery
from sqlalchemy.sql.expression import ClauseList
from sqlalchemy.sql.expression import func
from sqlalchemy.sql.expression import over
from zope.interface import implementer # pylint: disable=E0611,F0401
__docformat__ = 'reStructuredText en'
__all__ = ['OptimizedCountingRdbQuery',
'OrderClauseList',
'RdbQuery',
'SqlFilterSpecificationVisitor',
'SqlOrderSpecificationVisitor',
]
@implementer(IFilterSpecificationVisitor)
class SqlFilterSpecificationVisitor(RepositoryFilterSpecificationVisitor):
"""
Filter specification visitor implementation for the RDB repository
(builds a SQL expression).
"""
def __init__(self, entity_class, custom_clause_factories=None):
"""
Constructs a SqlFilterSpecificationVisitor
:param entity_class: an entity class that is mapped with SQLAlchemy
:param custom_clause_factories: a map containing custom clause factory
functions for selected (attribute name, operator) combinations.
"""
RepositoryFilterSpecificationVisitor.__init__(self, entity_class)
if custom_clause_factories is None:
custom_clause_factories = {}
self.__custom_clause_factories = custom_clause_factories
def visit_nullary(self, spec):
key = (spec.attr_name, spec.operator.name)
if key in self.__custom_clause_factories:
self._push(self.__custom_clause_factories[key](spec.attr_value))
else:
RepositoryFilterSpecificationVisitor.visit_nullary(self, spec)
def _starts_with_op(self, spec):
return self.__build(spec.attr_name, 'startswith', spec.attr_value)
def _ends_with_op(self, spec):
return self.__build(spec.attr_name, 'endswith', spec.attr_value)
def _contains_op(self, spec):
return self.__build(spec.attr_name, 'contains', spec.attr_value)
def _contained_op(self, spec):
if ICollectionResource.providedBy(spec.attr_value): # pylint:disable=E1101
# FIXME: This is a hack that allows us to query for containment
# of a member in an arbitrary collection (not supported
# by SQLAlchemy yet).
spec.attr_name = spec.attr_name + '.id'
spec.attr_value = [rc.id for rc in spec.attr_value]
return self.__build(spec.attr_name, 'in_', spec.attr_value)
def _equal_to_op(self, spec):
return self.__build(spec.attr_name, '__eq__', spec.attr_value)
def _less_than_op(self, spec):
return self.__build(spec.attr_name, '__lt__', spec.attr_value)
def _less_than_or_equal_to_op(self, spec):
return self.__build(spec.attr_name, '__le__', spec.attr_value)
def _greater_than_op(self, spec):
return self.__build(spec.attr_name, '__gt__', spec.attr_value)
def _greater_than_or_equal_to_op(self, spec):
return self.__build(spec.attr_name, '__ge__', spec.attr_value)
def _in_range_op(self, spec):
from_value, to_value = spec.attr_value
return self.__build(spec.attr_name, 'between', from_value, to_value)
def _conjunction_op(self, spec, *expressions):
return sqlalchemy_and(*expressions)
def _disjunction_op(self, spec, *expressions):
return sqlalchemy_or(*expressions)
def _negation_op(self, spec, expression):
return sqlalchemy_not(expression)
def __build(self, attribute_name, sql_op, *values):
# Builds an SQL expression from the given (possibly dotted)
# attribute name, SQL operation name, and values.
exprs = []
infos = OrmAttributeInspector.inspect(self._entity_class,
attribute_name)
count = len(infos)
for idx, info in enumerate(infos):
kind, entity_attr = info
if idx == count - 1:
#
args = \
[val.get_entity() if IResource.providedBy(val) else val # pylint: disable=E1101
for val in values]
expr = getattr(entity_attr, sql_op)(*args)
elif kind == RESOURCE_ATTRIBUTE_KINDS.MEMBER:
expr = entity_attr.has
exprs.insert(0, expr)
elif kind == RESOURCE_ATTRIBUTE_KINDS.COLLECTION:
expr = entity_attr.any
exprs.insert(0, expr)
return func_reduce(lambda g, h: h(g), exprs, expr)
class OrderClauseList(ClauseList):
"""
Custom clause list for ORDER BY clauses.
Suppresses the grouping parentheses which would trigger a syntax error.
"""
def self_group(self, against=None):
return self
@implementer(IOrderSpecificationVisitor)
class SqlOrderSpecificationVisitor(RepositoryOrderSpecificationVisitor):
"""
Order specification visitor implementation for the rdb repository
(builds a SQL expression).
"""
def __init__(self, entity_class, custom_join_clauses=None):
"""
Constructs a SqlOrderSpecificationVisitor
:param klass: a class that is mapped to a selectable using SQLAlchemy
"""
RepositoryOrderSpecificationVisitor.__init__(self, entity_class)
if custom_join_clauses is None:
custom_join_clauses = {}
self.__custom_join_clauses = custom_join_clauses
self.__joins = set()
def visit_nullary(self, spec):
OrderSpecificationVisitor.visit_nullary(self, spec)
if spec.attr_name in self.__custom_join_clauses:
self.__joins = set(self.__custom_join_clauses[spec.attr_name])
def order_query(self, query):
for join_expr in self.__joins:
# FIXME: only join when needed here.
query = query.outerjoin(join_expr)
return query.order(self.expression)
def _conjunction_op(self, spec, *expressions):
clauses = []
for expr in expressions:
if isinstance(expr, ClauseList):
clauses.extend(expr.clauses)
else:
clauses.append(expr)
return OrderClauseList(*clauses)
def _asc_op(self, spec):
return self.__build(spec.attr_name, 'asc')
def _desc_op(self, spec):
return self.__build(spec.attr_name, 'desc')
def __build(self, attribute_name, sql_op):
expr = None
infos = OrmAttributeInspector.inspect(self._entity_class,
attribute_name)
count = len(infos)
for idx, info in enumerate(infos):
kind, entity_attr = info
if idx == count - 1:
expr = getattr(entity_attr, sql_op)()
elif kind != RESOURCE_ATTRIBUTE_KINDS.TERMINAL:
# FIXME: Avoid adding multiple attrs with the same target here.
self.__joins.add(entity_attr)
return expr
class Query(SaQuery):
def __init__(self, entities, session, **kw):
SaQuery.__init__(self, entities, session, **kw)
ent_cls = entities[0]
if isinstance(ent_cls, type) and issubclass(ent_cls, Entity):
self._entity_class = ent_cls
else: # just for compatibility pragma: no cover
self._entity_class = None
def order(self, order_expression):
return SaQuery.order_by(self, order_expression)
def order_by(self, *args):
spec = order(*args)
vst_cls = get_order_specification_visitor(EXPRESSION_KINDS.SQL)
vst = vst_cls(self._entity_class)
spec.accept(vst)
return vst.order_query(self)
class RdbQuery(Query):
"""
Query class for the RDB backend.
"""
def one(self):
# Overwritten so we can translate exceptions.
try:
return Query.one(self)
except NoResultFound:
raise NoResultsException('No results found when exactly one '
'was expected.')
except MultipleResultsFound:
raise MultipleResultsException('More than one result found '
'where exactly one was expected.')
class SimpleCountingRdbQuery(RdbQuery):
"""
Simple counting query for the RDB backend.
We want the count to reflect the true size of the aggregate, without
slicing.
"""
def count(self):
count_query = self.limit(None).offset(None)
# Avoid circular calls to by "downcasting" the new query.
count_query.__class__ = Query
return count_query.count()
class OptimizedCountingRdbQuery(RdbQuery): # pragma: no cover
"""
Optimized counting query for the RDB backend.
The optimization uses the OVER windowing SQL statement to retrieve the
collection data ''and'' count in ''one'' database roundtrip. Note that
this query object will always return the same count and data once the
:method:`__iter__` or :method:`count` method has been called.
"""
def __init__(self, entities, session, **kw):
RdbQuery.__init__(self, entities, session, **kw)
self.__count = None
self.__data = None
def _clone(self):
clone = RdbQuery._clone(self)
# pylint: disable=W0212
clone.__count = None
clone.__data = None
# pylint: enable=W0212
return clone
def __iter__(self):
if self.__data is None:
self.__count, self.__data = self.__load()
return iter(self.__data)
def count(self):
if self.__count is None:
self.__count, self.__data = self.__load()
return self.__count
def __load(self):
query = self.add_columns(over(func.count(1)).label('_count'))
res = [tup[0] for tup in Query.__iter__(query)]
if len(res) > 0:
count = tup._count # pylint:disable-msg=W0212,W0631
else:
count = 0
return count, res
| 11,285 | 3,302 |
# Copyright 2017 Quantum Information Science, University of Parma, Italy. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
__author__ = "Davide Ferrari"
__copyright__ = "Copyright 2017, Quantum Information Science, University of Parma, Italy"
__license__ = "Apache"
__version__ = "2.0"
__email__ = "davide.ferrari8@studenti.unipr.it"
import logging
logging.VERBOSE = 5
class MyHandler(logging.StreamHandler):
def __init__(self):
logging.StreamHandler.__init__(self)
logging.addLevelName(logging.VERBOSE, "VERBOSE")
formatter = logging.Formatter('%(filename)s - %(levelname)s - %(message)s')
self.setFormatter(formatter)
| 1,259 | 376 |
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 12 17:57:02 2020
@author: Atique Akhtar
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
from sklearn import neighbors
from sklearn.ensemble import RandomForestRegressor
from sklearn.svm import SVR
import pickle
weather_df=pd.read_csv("C:\\Users\\EE1303227\\Desktop\\flask app\\pavagada_nasa_dataset.csv")
weather_df.info()
weather_desc=pd.DataFrame(weather_df.describe())
weather_df['GENERATED_ENERGY'] = weather_df.apply(lambda row: row.ALLSKY_SFC_LW_DWN*1.6*15.6*0.75 , axis = 1)
weather_df.columns
df=weather_df[['PRECTOT', 'QV2M', 'RH2M', 'PS', 'TS','T2MDEW', 'T2MWET', 'T2M_MAX',
'T2M_MIN', 'T2M', 'WS10M', 'WS50M','WS10M_MAX', 'WS50M_MAX', 'WS50M_MIN',
'WS10M_MIN', 'GENERATED_ENERGY']]
df_corr=pd.DataFrame(df.corr())
X=df[['PRECTOT', 'QV2M', 'PS', 'T2M_MIN', 'T2M','WS10M_MAX']]
y=df['GENERATED_ENERGY']
X_corr=pd.DataFrame(X.corr())
Xtrain,Xtest,ytrain,ytest=train_test_split(X, y, test_size=0.3, random_state=100)
# LINEAR REGRESSION
lm=LinearRegression()
lm.fit(Xtrain,ytrain)
print(lm.intercept_)
print(lm.coef_)
X.columns
cdf=pd.DataFrame(lm.coef_,Xtrain.columns,columns=['coeff'])
predictions = lm.predict(Xtest)
plt.scatter(ytest,predictions)
sns.distplot((ytest-predictions)) # if normally distributed then the model is correct choice
metrics.mean_absolute_error(ytest,predictions)
metrics.mean_squared_error(ytest,predictions)
np.sqrt(metrics.mean_squared_error(ytest,predictions))
# KNN
scaler=StandardScaler()
scaler.fit(X)
scaled_features=scaler.transform(X)
X_feat=pd.DataFrame(scaled_features,columns=X.columns)
Xtrain,Xtest,ytrain,ytest=train_test_split(X_feat, y, test_size=0.3, random_state=0)
rmse_val = [] #to store rmse values for different k
for K in range(40):
K = K+1
model = neighbors.KNeighborsRegressor(n_neighbors = K)
model.fit(Xtrain, ytrain) #fit the model
pred=model.predict(Xtest) #make prediction on test set
error = np.sqrt(metrics.mean_squared_error(ytest,pred)) #calculate rmse
rmse_val.append(error) #store rmse values
print('RMSE value for k= ' , K , 'is:', error)
#plotting the rmse values against k values
curve = pd.DataFrame(rmse_val) #elbow curve
curve.plot()
knn_model = neighbors.KNeighborsRegressor(n_neighbors = 25)
knn_model.fit(Xtrain, ytrain) #fit the model
pred=knn_model.predict(Xtest) #make prediction on test set
np.sqrt(metrics.mean_squared_error(ytest,pred)) #calculate rmse
# RANDOM FOREST
rf_model=RandomForestRegressor(n_estimators=300)
rf_model.fit(Xtrain,ytrain)
pred_rf=rf_model.predict(Xtest)
plt.scatter(ytest,pred_rf)
sns.distplot((ytest-pred_rf))
metrics.mean_absolute_error(ytest,pred_rf)
metrics.mean_squared_error(ytest,pred_rf)
np.sqrt(metrics.mean_squared_error(ytest,pred_rf))
# SVR
pram_grid={'C':[0.1,1,10,100,1000], 'gamma':[1,0.1,0.01,0.001,0.0001]}
grid=GridSearchCV(SVR(),pram_grid,verbose=3)
grid.fit(Xtrain,ytrain)
grid.best_params_
#{'C': 1000, 'gamma': 0.01}
grid.best_estimator_
#SVR(C=1000, cache_size=200, coef0=0.0, degree=3, epsilon=0.1, gamma=0.01,
# kernel='rbf', max_iter=-1, shrinking=True, tol=0.001, verbose=False)
pred_grid=grid.predict(Xtest)
metrics.mean_absolute_error(ytest,pred_grid)
metrics.mean_squared_error(ytest,pred_grid)
np.sqrt(metrics.mean_squared_error(ytest,pred_grid))
#6.0520296890047245
# PICKLE FILE
pickle.dump(lm, open('model.pkl', 'wb'))
model=pickle.load(open('model.pkl','rb'))
print(model.predict([[12.17,0.017,94.42,21.47,23.3,6.67]]))
| 3,742 | 1,642 |
'''
Written by Jan H. Jensen 2018
'''
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.Chem import Descriptors
from rdkit.DataStructs.cDataStructs import TanimotoSimilarity
from rdkit.Chem import rdFMCS
from rdkit import rdBase
rdBase.DisableLog('rdApp.error')
import numpy as np
import sys
from multiprocessing import Pool
import subprocess
import os
import shutil
import string
import random
import sascorer
logP_values = np.loadtxt('logP_values.txt')
SA_scores = np.loadtxt('SA_scores.txt')
cycle_scores = np.loadtxt('cycle_scores.txt')
SA_mean = np.mean(SA_scores)
SA_std=np.std(SA_scores)
logP_mean = np.mean(logP_values)
logP_std= np.std(logP_values)
cycle_mean = np.mean(cycle_scores)
cycle_std=np.std(cycle_scores)
def calculate_score(args):
'''Parallelize at the score level (not currently in use)'''
gene, function, scoring_args = args
score = function(gene,scoring_args)
return score
def calculate_scores_parallel(population,function,scoring_args, n_cpus):
'''Parallelize at the score level (not currently in use)'''
args_list = []
args = [function, scoring_args]
for gene in population:
args_list.append([gene]+args)
with Pool(n_cpus) as pool:
scores = pool.map(calculate_score, args_list)
return scores
def calculate_scores(population,function,scoring_args):
if 'pop' in function.__name__:
scores = function(population,scoring_args)
else:
scores = [function(gene,scoring_args) for gene in population]
return scores
def logP_max(m, dummy):
score = logP_score(m)
return max(0.0, score)
def logP_target(m,args):
target, sigma = args
score = logP_score(m)
score = GaussianModifier(score, target, sigma)
return score
def logP_score(m):
try:
logp = Descriptors.MolLogP(m)
except:
print (m, Chem.MolToSmiles(m))
sys.exit('failed to make a molecule')
SA_score = -sascorer.calculateScore(m)
#cycle_list = nx.cycle_basis(nx.Graph(rdmolops.GetAdjacencyMatrix(m)))
cycle_list = m.GetRingInfo().AtomRings() #remove networkx dependence
if len(cycle_list) == 0:
cycle_length = 0
else:
cycle_length = max([ len(j) for j in cycle_list ])
if cycle_length <= 6:
cycle_length = 0
else:
cycle_length = cycle_length - 6
cycle_score = -cycle_length
#print cycle_score
#print SA_score
#print logp
SA_score_norm=(SA_score-SA_mean)/SA_std
logp_norm=(logp-logP_mean)/logP_std
cycle_score_norm=(cycle_score-cycle_mean)/cycle_std
score_one = SA_score_norm + logp_norm + cycle_score_norm
return score_one
def shell(cmd, shell=False):
if shell:
p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
cmd = cmd.split()
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = p.communicate()
return output
def write_xtb_input_file(fragment, fragment_name):
number_of_atoms = fragment.GetNumAtoms()
charge = Chem.GetFormalCharge(fragment)
symbols = [a.GetSymbol() for a in fragment.GetAtoms()]
for i,conf in enumerate(fragment.GetConformers()):
file_name = fragment_name+"+"+str(i)+".xyz"
with open(file_name, "w") as file:
file.write(str(number_of_atoms)+"\n")
file.write("title\n")
for atom,symbol in enumerate(symbols):
p = conf.GetAtomPosition(atom)
line = " ".join((symbol,str(p.x),str(p.y),str(p.z),"\n"))
file.write(line)
if charge !=0:
file.write("$set\n")
file.write("chrg "+str(charge)+"\n")
file.write("$end")
def get_structure(mol,n_confs):
mol = Chem.AddHs(mol)
new_mol = Chem.Mol(mol)
AllChem.EmbedMultipleConfs(mol,numConfs=n_confs,useExpTorsionAnglePrefs=True,useBasicKnowledge=True)
energies = AllChem.MMFFOptimizeMoleculeConfs(mol,maxIters=2000, nonBondedThresh=100.0)
energies_list = [e[1] for e in energies]
min_e_index = energies_list.index(min(energies_list))
new_mol.AddConformer(mol.GetConformer(min_e_index))
return new_mol
def compute_absorbance(mol,n_confs,path):
mol = get_structure(mol,n_confs)
dir = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6))
os.mkdir(dir)
os.chdir(dir)
write_xtb_input_file(mol, 'test')
shell(path+'/xtb4stda test+0.xyz',shell=False)
out = shell(path+'/stda_v1.6.1 -xtb -e 10',shell=False)
#data = str(out).split('Rv(corr)\\n')[1].split('alpha')[0].split('\\n') # this gets all the lines
data = str(out).split('Rv(corr)\\n')[1].split('(')[0]
wavelength, osc_strength = float(data.split()[2]), float(data.split()[3])
os.chdir('..')
shutil.rmtree(dir)
return wavelength, osc_strength
def absorbance_target(mol,args):
n_confs, path, target, sigma, threshold = args
try:
wavelength, osc_strength = compute_absorbance(mol,n_confs,path)
except:
return 0.0
score = GaussianModifier(wavelength, target, sigma)
score += ThresholdedLinearModifier(osc_strength,threshold)
return score
# GuacaMol article https://arxiv.org/abs/1811.09621
# adapted from https://github.com/BenevolentAI/guacamol/blob/master/guacamol/utils/fingerprints.py
def get_ECFP4(mol):
return AllChem.GetMorganFingerprint(mol, 2)
def get_ECFP6(mol):
return AllChem.GetMorganFingerprint(mol, 3)
def get_FCFP4(mol):
return AllChem.GetMorganFingerprint(mol, 2, useFeatures=True)
def get_FCFP6(mol):
return AllChem.GetMorganFingerprint(mol, 3, useFeatures=True)
def rediscovery(mol,args):
target = args[0]
try:
fp_mol = get_ECFP4(mol)
fp_target = get_ECFP4(target)
score = TanimotoSimilarity(fp_mol, fp_target)
return score
except:
print('Failed ',Chem.MolToSmiles(mol))
return None
def MCS(mol,args):
target = args[0]
try:
mcs = rdFMCS.FindMCS([mol, target], bondCompare=rdFMCS.BondCompare.CompareOrderExact,ringMatchesRingOnly=True,completeRingsOnly=True)
score = mcs.numAtoms/target.GetNumAtoms()
return score
except:
print('Failed ',Chem.MolToSmiles(mol))
return None
def similarity(mol,target,threshold):
score = rediscovery(mol,target)
if score:
return ThresholdedLinearModifier(score,threshold)
else:
return None
# adapted from https://github.com/BenevolentAI/guacamol/blob/master/guacamol/score_modifier.py
def ThresholdedLinearModifier(score,threshold):
return min(score,threshold)/threshold
def GaussianModifier(score, target, sigma):
try:
score = np.exp(-0.5 * np.power((score - target) / sigma, 2.))
except:
score = 0.0
return score
if __name__ == "__main__":
n_confs = 20
xtb_path = '/home/jhjensen/stda'
target = 200.
sigma = 50.
threshold = 0.3
smiles = 'Cc1occn1' # Tsuda I
mol = Chem.MolFromSmiles(smiles)
wavelength, osc_strength = compute_absorbance(mol,n_confs,xtb_path)
print(wavelength, osc_strength)
score = absorbance_target(mol,[n_confs, xtb_path, target, sigma, threshold])
print(score)
| 7,047 | 2,750 |
# Copyright (c) OpenMMLab. All rights reserved.
from .pillar_scatter import PointPillarsScatter
from .sparse_encoder import SparseEncoder
from .sparse_unet import SparseUNet
from .sparse_encoder_aux import SparseEncoder_AUX
from .sparse_encoderv2 import SparseEncoderV2
__all__ = [
'PointPillarsScatter', 'SparseEncoder', 'SparseUNet', 'SparseEncoder_AUX',
'SparseEncoderV2'
]
| 386 | 144 |
"""
Contains result backpropagation policies for MCTS.
All policies should be represented as a function that takes a MCTree and
the result of a game, and updates a node accordingly. Backup policies dictate
how the results of simulated games affect game states beforehand. See the main
Mopy module and MCTree for more details on how policies are incorporated.
"""
def win_loss_ratio(node, winner):
"""
Policy to update a node's win/loss ratio after a simulation result.
Args:
node (MCTree): The current node during backpropagation phase of MCTS.
winner (int): Zero-indexed integer representing winner of a simulated
game which is currently being backpropagated up the tree.
"""
node.total_games += 1
if node.state.current_player != winner:
node.won_games += 1
| 845 | 239 |
import pytest
from flex.exceptions import ValidationError
from flex.loading.schema.paths.path_item.operation.parameters import (
parameters_validator,
)
from flex.validation.parameter import (
validate_parameters,
)
from flex.constants import (
PATH,
STRING,
)
from tests.utils import assert_path_in_errors
#
# minimum validation tests
#
@pytest.mark.parametrize(
'min_length,value',
(
(1, ''),
(2, 'a'),
(5, '1234'),
),
)
def test_minimum_length_validation_with_too_short_values(min_length, value):
parameters = parameters_validator([
{
'name': 'id',
'in': PATH,
'description':'id',
'type': STRING,
'required': True,
'minLength': min_length,
},
])
parameter_values = {
'id': value,
}
with pytest.raises(ValidationError) as err:
validate_parameters(parameter_values, parameters, {})
assert_path_in_errors(
'id.minLength',
err.value.detail,
)
@pytest.mark.parametrize(
'min_length,value',
(
(1, 'a'),
(2, 'ab'),
(2, '12345'),
(5, '12345-abcde'),
),
)
def test_minimum_length_validation_with_valid_lengths(min_length, value):
parameters = parameters_validator([
{
'name': 'id',
'in': PATH,
'description':'id',
'type': STRING,
'required': True,
'minLength': min_length,
},
])
parameter_values = {
'id': value,
}
validate_parameters(parameter_values, parameters, {})
#
# maximum validation tests
#
@pytest.mark.parametrize(
'max_length,value',
(
(1, 'ab'),
(5, '123456'),
),
)
def test_maximum_length_validation_with_too_long_values(max_length, value):
parameters = parameters_validator([
{
'name': 'id',
'in': PATH,
'description':'id',
'type': STRING,
'required': True,
'maxLength': max_length,
},
])
parameter_values = {
'id': value,
}
with pytest.raises(ValidationError) as err:
validate_parameters(parameter_values, parameters, {})
assert_path_in_errors(
'id.maxLength',
err.value.detail,
)
@pytest.mark.parametrize(
'max_length,value',
(
(1, 'a'),
(2, 'ab'),
(2, '12'),
(5, '12345'),
),
)
def test_maximum_length_validation_with_valid_lengths(max_length, value):
parameters = parameters_validator([
{
'name': 'id',
'in': PATH,
'description':'id',
'type': STRING,
'required': True,
'maxLength': max_length,
},
])
parameter_values = {
'id': value,
}
validate_parameters(parameter_values, parameters, {})
| 2,917 | 943 |
import cv2
import numpy
import math
from enum import Enum
class GripPipeline:
"""
An OpenCV pipeline generated by GRIP.
"""
def __init__(self):
"""initializes all values to presets or None if need to be set
"""
self.__new_size_width = 100.0
self.__new_size_height = 100.0
self.new_size_output = None
self.__cascade_classifier_scale_factor = 1.1
self.__cascade_classifier_min_neighbors = 2.0
self.__cascade_classifier_min_size = self.new_size_output
self.__cascade_classifier_max_size = (0, 0)
self.cascade_classifier_output = None
def process(self, source0, source1):
"""
Runs the pipeline and sets all outputs to new values.
"""
# Step New_Size0:
(self.new_size_output) = self.__new_size(self.__new_size_width, self.__new_size_height)
# Step Cascade_Classifier0:
self.__cascade_classifier_image = source0
self.__cascade_classifier_classifier = source1
self.__cascade_classifier_min_size = self.new_size_output
(self.cascade_classifier_output) = self.__cascade_classifier(self.__cascade_classifier_image, self.__cascade_classifier_classifier, self.__cascade_classifier_scale_factor, self.__cascade_classifier_min_neighbors, self.__cascade_classifier_min_size, self.__cascade_classifier_max_size)
@staticmethod
def __new_size(width, height):
"""Fills a size with given width and height.
Args:
width: A number for the width.
height: A number for the height.
Returns:
A list of two numbers that represent a size.
"""
return (width, height)
@staticmethod
def __cascade_classifier(input, classifier, scale_factor, min_neighbors, min_size, max_size):
"""Sets the values of pixels in a binary image to their distance to the nearest black pixel.
Args:
input: A numpy.ndarray.
classifier: The classifier to use
scale_factor: the scale factor of each successive downsized image
min_neighbors: how many neighbors each candidate rectangle should have to retain it
min_size: the minimum possible object size
max_size: the maximum possible object size. If (0, 0), it is assumed to be unbounded
Return:
A list of rectangles bounding the found regions of interest
"""
return classifier.detectMultiScale(input, scale_factor, min_neighbors, 0, min_size, max_size)
| 2,556 | 753 |
from argparse import ArgumentParser, Namespace
from typing import Any, Dict, Tuple
import numpy as np
from sklearn.cluster import DBSCAN
from sklearn.metrics import precision_score, recall_score
from sklearn.neighbors import LocalOutlierFactor
from module.LatexGenerator import LatexGenerator
from module.OutlierAgglomerativeClustering import OutlierAgglomerativeClustering
from module.OutlierKMeans import OutlierKMeans
from module.plot import draw_plots
from module.reader import read_http_dataset, read_mammography_dataset, read_synthetic_dataset
from module.utils import create_directory, display_finish, run_main
"""
How to run:
python main.py -s -c lof
"""
# VAR ------------------------------------------------------------------------ #
RESULTS_DIR = "results/"
latex_generator: LatexGenerator = LatexGenerator(RESULTS_DIR)
clusterizers: Dict[str, Any] = {
"kmeans": (OutlierKMeans, int, float),
"agglomerative": (OutlierAgglomerativeClustering, float, float),
"db_scan": (DBSCAN, float),
"lof": (LocalOutlierFactor, int)
}
datasets: Dict[str, Tuple[np.ndarray, np.ndarray]] = {
"http": read_http_dataset(),
"mammography": read_mammography_dataset(),
"synthetic": read_synthetic_dataset(),
}
# MAIN ----------------------------------------------------------------------- #
def main() -> None:
args = prepare_args()
chosen_clusterizer_name = args.clusterizer
chosen_dataset_name = args.dataset
algorithm_params = args.algorithm_params
save_stats = args.save
create_directory(RESULTS_DIR)
X, y = datasets[chosen_dataset_name]
params = [
typee(param)
for param, typee in zip(algorithm_params, clusterizers[chosen_clusterizer_name][1:])
]
y_pred = clusterizers[chosen_clusterizer_name][0](*params).fit_predict(X)
recall = np.round(recall_score(y, y_pred, average=None, zero_division=0)[0], 2)
precision = np.round(precision_score(y, y_pred, average=None, zero_division=0)[0], 2)
print(f"Recall {recall} & Precision {precision}")
print(f"{chosen_clusterizer_name} ({algorithm_params}) - {chosen_dataset_name}")
name = (f"{chosen_clusterizer_name}_{chosen_dataset_name}_"
f"{'_'.join([str(param).replace('.', ',') for param in algorithm_params])}_")
title = name + f"Rcl={recall}_Prec={precision}"
draw_plots(X, y_pred, name, title, RESULTS_DIR, save_stats)
display_finish()
# DEF ------------------------------------------------------------------------ #
def prepare_args() -> Namespace:
arg_parser = ArgumentParser()
arg_parser.add_argument(
"-c", "--clusterizer", type=str, choices=clusterizers.keys(),
help="Name of clusterizer"
)
arg_parser.add_argument(
"-ds", "--dataset", type=str, choices=datasets.keys(),
help="Name of dataset"
)
arg_parser.add_argument(
"-ap", "--algorithm_params", nargs="+", required=True, type=str,
help="List of arguments for certain algorithm"
)
arg_parser.add_argument(
"-s", "--save", default=False, action="store_true", help="Save charts to files"
)
return arg_parser.parse_args()
# __MAIN__ ------------------------------------------------------------------- #
if __name__ == "__main__":
run_main(main)
| 3,298 | 1,042 |
import itertools
import os
import unittest
from generator import rand
from approximate_string_matching import dont_care, matching_with_dont_cares
class TestExactMatchingWithDontCaresCase(unittest.TestCase):
run_large = unittest.skipUnless(
os.environ.get('LARGE', False), 'Skip test in small runs')
def make_test(self, text, pattern, result):
matches = matching_with_dont_cares.exact_matching_with_dont_cares(
text, pattern, len(text), len(pattern))
self.assertEqual(result, list(matches))
def test_given_input_with_no_wildcards_returns_matches(self):
self.make_test('#abbabaaa', '#ab', [1, 4])
def test_given_input_with_wildcards_returns_matches(self):
self.make_test('#abbabaaa', '#??a', [2, 4, 5, 6])
def test_simple(self):
self.make_test('#aa', '#a', [1, 2])
@run_large
def test_random_exact_string_matching(self):
T, n, m, A = 100, 500, 10, ['a', 'b']
for _ in range(T):
t, w = rand.random_word(n, A), rand.random_word(m, A + ['?'])
reference = list(dont_care.basic_fft(t, w, n, m))
self.make_test(t, w, reference)
@run_large
def test_all_exact_string_matching(self):
N, M, A = 7, 3, ['a', 'b']
for n in range(2, N + 1):
for m in range(1, M + 1):
for t in itertools.product(A, repeat = n):
t = '#' + ''.join(t)
for w in itertools.product(A + ['?'], repeat = m):
w = '#' + ''.join(w)
reference = list(dont_care.basic_fft(t, w, n, m))
self.make_test(t, w, reference)
| 1,534 | 594 |
import nengo
from .magic import decorator
@decorator
def with_self(method, network, args, kwargs):
"""Wraps a method with ``with network:``.
This makes it easy to add methods to a network that create new
Nengo objects. Instead of writing ``with self`` at the top of the method
and indenting everything over, you can instead use this decorator.
Example
-------
The two methods in the following class do the same thing::
class MyNetwork(nengo.Network):
def add_one_1(self):
with self:
node = nengo.Node(output=1)
@with_self
def add_one_2(self):
node = nengo.Node(output=1)
"""
with network:
return method(*args, **kwargs)
def activate_direct_mode(network):
"""Activates direct mode for a network.
This sets the neuron type of all ensembles to a `nengo.Direct`
instance unless:
- there is a connection to or from the ensemble's neurons
- there is a probe on an ensemble's neurons
- the ensemble has a connection with a learning rule attached.
Parameters
----------
network : Network
Network to activate direct mode for.
"""
requires_neurons = set()
for c in network.all_connections:
if isinstance(c.pre_obj, nengo.ensemble.Neurons):
requires_neurons.add(c.pre_obj.ensemble)
if isinstance(c.post_obj, nengo.ensemble.Neurons):
requires_neurons.add(c.post_obj.ensemble)
if c.learning_rule_type is not None:
requires_neurons.add(c.pre_obj)
requires_neurons.add(c.post_obj)
for p in network.all_probes:
if isinstance(p.obj, nengo.ensemble.Neurons):
requires_neurons.add(p.obj.ensemble)
for e in network.all_ensembles:
if e not in requires_neurons:
e.neuron_type = nengo.Direct()
| 1,902 | 566 |
from matplotlib import pyplot as plt
from sklearn import metrics
class Evaluator:
def __init__(self, y_true, y_pred):
"""
calculate auc : area under roc curve
"""
self.auc = self.auc(y_true, y_pred)
def auc(self, y_true, y_pred):
FF, TT, thresholds = metrics.roc_curve(y_true, y_pred)
self.auc = metrics.auc(FF, TT)
| 379 | 134 |
#
# DVI.py
#
# (c) 2020 by Andreas Kraft
# License: BSD 3-Clause License. See the LICENSE file for further details.
#
# ResourceType: mgmtObj:DeviceInfo
#
from .MgmtObj import *
from Constants import Constants as C
import Utils
defaultDeviceType = 'unknown'
defaultModel = "unknown"
defaultManufacturer = "unknown"
defaultDeviceLabel = "unknown serial id"
class DVI(MgmtObj):
def __init__(self, jsn=None, pi=None, create=False):
super().__init__(jsn, pi, C.tsDVI, C.mgdDVI, create=create)
if self.json is not None:
self.setAttribute('dty', defaultDeviceType, overwrite=False)
self.setAttribute('mod', defaultModel, overwrite=False)
self.setAttribute('man', defaultManufacturer, overwrite=False)
self.setAttribute('dlb', defaultDeviceLabel, overwrite=False)
| 779 | 278 |
import cv2
import numpy as np
class Augmentation:
"""docstring for Augmentation."""
# def __init__(self):
# pass
# super(Augmentation, self).__init__()
# self.arg = arg
def get_resize(image, lengthScale, breadthScale):
src = cv2.imread(image , cv2.IMREAD_UNCHANGED)
#calculate the scale percent of original dimensions
width = int(src.shape[1] * breadthScale / 100)
length = int(src.shape[0] * lengthScale / 100)
# dsize
dsize = (width, length)
# resize image
output = cv2.resize(src, dsize)
return(output)
def get_crop(image, dim):
ima = cv2.imread(image)
if dim == None:
print(ima.shape[1])
x = int(ima.shape[0]/2)
y = int(ima.shape[1]/2)
cropped = ima[0:x, 0:y]
else:
cropped = ima[dim[0]:dim[1], dim[2]:dim[3]]
return(cropped)
def get_rotate(image, angle):
src = cv2.imread(image , cv2.IMREAD_UNCHANGED)
image_center = tuple(np.array(src.shape[1::-1]) / 2)
rot_mat = cv2.getRotationMatrix2D(image_center, angle, 1.0)
result = cv2.warpAffine(src, rot_mat, src.shape[1::-1], flags=cv2.INTER_LINEAR)
return result
| 1,272 | 453 |
input_file_name = "windows-1251_encoded_file.txt"
output_file_name = "utf8_encoded_file.txt"
def read_from(enc, file_name):
# open file for read in textmode with encoding:
with open(file_name,"r", encoding="cp1251") as f:
decoded_content = f.read()
return decoded_content
def write_to(enc, content, file_name):
# open file for write in textmode with encoding:
with open(file_name,"w+", encoding=enc) as f:
f.write(content)
decoded_content = read_from("cp1251", input_file_name)
write_to("utf8", decoded_content, output_file_name)
| 557 | 207 |
# Generated by Django 3.0.7 on 2020-12-01 14:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("iaso", "0064_instance_deleted")]
operations = [
migrations.AlterField(
model_name="featureflag", name="code", field=models.CharField(max_length=30, unique=True)
),
migrations.AlterField(model_name="project", name="name", field=models.TextField()),
]
| 449 | 152 |
"""myapp.py
Usage:
(window1)$ python myapp.py worker -l info
(window2)$ python
>>> from myapp import add
>>> add.delay(16, 16).get()
32
You can also specify the app to use with celeryd::
$ celery worker -l info --app=myapp
"""
from celery import Celery
celery = Celery("myapp", broker="amqp://guest@localhost//")
@celery.task()
def add(x, y):
return x + y
if __name__ == "__main__":
celery.start()
| 436 | 171 |
import CoreFoundation
import objc
from PyObjCTools.TestSupport import TestCase
class TestCFDictionary(TestCase):
def testCreation(self):
dictionary = CoreFoundation.CFDictionaryCreate(
None,
("aap", "noot", "mies", "wim"),
("monkey", "nut", "missy", "john"),
4,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
self.assertIsInstance(dictionary, CoreFoundation.CFDictionaryRef)
self.assertEqual(
dictionary, {"aap": "monkey", "noot": "nut", "mies": "missy", "wim": "john"}
)
dictionary = CoreFoundation.CFDictionaryCreateMutable(
None,
0,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
self.assertIsInstance(dictionary, CoreFoundation.CFMutableDictionaryRef)
CoreFoundation.CFDictionarySetValue(dictionary, "hello", "world")
self.assertEqual(dictionary, {"hello": "world"})
def testApplyFunction(self):
dictionary = CoreFoundation.CFDictionaryCreate(
None,
("aap", "noot", "mies", "wim"),
("monkey", "nut", "missy", "john"),
4,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
context = []
def function(key, value, context):
context.append((key, value))
self.assertArgIsFunction(
CoreFoundation.CFDictionaryApplyFunction, 1, b"v@@@", False
)
self.assertArgHasType(CoreFoundation.CFDictionaryApplyFunction, 2, b"@")
CoreFoundation.CFDictionaryApplyFunction(dictionary, function, context)
context.sort()
self.assertEqual(len(context), 4)
self.assertEqual(
context,
[
(b"aap".decode("ascii"), b"monkey".decode("ascii")),
(b"mies".decode("ascii"), b"missy".decode("ascii")),
(b"noot".decode("ascii"), b"nut".decode("ascii")),
(b"wim".decode("ascii"), b"john".decode("ascii")),
],
)
def testTypeID(self):
self.assertIsInstance(CoreFoundation.CFDictionaryGetTypeID(), int)
def testCreation2(self): # XXX
dct = CoreFoundation.CFDictionaryCreate(
None,
[b"key1".decode("ascii"), b"key2".decode("ascii")],
[42, 43],
2,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
self.assertIsInstance(dct, CoreFoundation.CFDictionaryRef)
dct = CoreFoundation.CFDictionaryCreateCopy(None, dct)
self.assertIsInstance(dct, CoreFoundation.CFDictionaryRef)
dct = CoreFoundation.CFDictionaryCreateMutable(
None,
0,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
self.assertIsInstance(dct, CoreFoundation.CFDictionaryRef)
dct = CoreFoundation.CFDictionaryCreateMutableCopy(None, 0, dct)
self.assertIsInstance(dct, CoreFoundation.CFDictionaryRef)
def testInspection(self):
dct = CoreFoundation.CFDictionaryCreate(
None,
[b"key1".decode("ascii"), b"key2".decode("ascii")],
[42, 42],
2,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
self.assertIsInstance(dct, CoreFoundation.CFDictionaryRef)
self.assertEqual(CoreFoundation.CFDictionaryGetCount(dct), 2)
self.assertEqual(
CoreFoundation.CFDictionaryGetCountOfKey(dct, b"key1".decode("ascii")), 1
)
self.assertEqual(
CoreFoundation.CFDictionaryGetCountOfKey(dct, b"key3".decode("ascii")), 0
)
self.assertEqual(CoreFoundation.CFDictionaryGetCountOfValue(dct, 42), 2)
self.assertEqual(CoreFoundation.CFDictionaryGetCountOfValue(dct, 44), 0)
self.assertResultHasType(CoreFoundation.CFDictionaryContainsKey, objc._C_NSBOOL)
self.assertTrue(
CoreFoundation.CFDictionaryContainsKey(dct, b"key1".decode("ascii"))
)
self.assertFalse(
CoreFoundation.CFDictionaryContainsKey(dct, b"key3".decode("ascii"))
)
self.assertResultHasType(
CoreFoundation.CFDictionaryContainsValue, objc._C_NSBOOL
)
self.assertTrue(CoreFoundation.CFDictionaryContainsValue(dct, 42))
self.assertFalse(
CoreFoundation.CFDictionaryContainsValue(dct, b"key3".decode("ascii"))
)
self.assertEqual(CoreFoundation.CFDictionaryGetValue(dct, "key2"), 42)
self.assertIs(CoreFoundation.CFDictionaryGetValue(dct, "key3"), None)
self.assertResultHasType(
CoreFoundation.CFDictionaryGetValueIfPresent, objc._C_NSBOOL
)
self.assertArgIsOut(CoreFoundation.CFDictionaryGetValueIfPresent, 2)
ok, value = CoreFoundation.CFDictionaryGetValueIfPresent(dct, "key2", None)
self.assertTrue(ok)
self.assertEqual(value, 42)
ok, value = CoreFoundation.CFDictionaryGetValueIfPresent(dct, "key3", None)
self.assertFalse(ok)
self.assertIs(value, None)
keys, values = CoreFoundation.CFDictionaryGetKeysAndValues(dct, None, None)
self.assertEqual(values, (42, 42))
keys = list(keys)
keys.sort()
self.assertEqual(keys, ["key1", "key2"])
def testMutation(self):
dct = CoreFoundation.CFDictionaryCreateMutable(
None,
0,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
self.assertEqual(CoreFoundation.CFDictionaryGetCount(dct), 0)
CoreFoundation.CFDictionaryAddValue(
dct, b"key1".decode("ascii"), b"value1".decode("ascii")
)
self.assertEqual(CoreFoundation.CFDictionaryGetCount(dct), 1)
self.assertTrue(
CoreFoundation.CFDictionaryContainsKey(dct, b"key1".decode("ascii"))
)
CoreFoundation.CFDictionarySetValue(
dct, b"key2".decode("ascii"), b"value2".decode("ascii")
)
self.assertEqual(CoreFoundation.CFDictionaryGetCount(dct), 2)
self.assertTrue(
CoreFoundation.CFDictionaryContainsKey(dct, b"key2".decode("ascii"))
)
CoreFoundation.CFDictionaryReplaceValue(
dct, b"key2".decode("ascii"), b"value2b".decode("ascii")
)
self.assertEqual(CoreFoundation.CFDictionaryGetCount(dct), 2)
self.assertTrue(
CoreFoundation.CFDictionaryContainsKey(dct, b"key2".decode("ascii"))
)
self.assertEqual(
CoreFoundation.CFDictionaryGetValue(dct, "key2"), b"value2b".decode("ascii")
)
CoreFoundation.CFDictionaryReplaceValue(
dct, b"key3".decode("ascii"), b"value2b".decode("ascii")
)
self.assertEqual(CoreFoundation.CFDictionaryGetCount(dct), 2)
self.assertFalse(
CoreFoundation.CFDictionaryContainsKey(dct, b"key3".decode("ascii"))
)
CoreFoundation.CFDictionaryRemoveValue(dct, b"key1".decode("ascii"))
self.assertFalse(
CoreFoundation.CFDictionaryContainsKey(dct, b"key1".decode("ascii"))
)
CoreFoundation.CFDictionaryRemoveAllValues(dct)
self.assertFalse(
CoreFoundation.CFDictionaryContainsKey(dct, b"key2".decode("ascii"))
)
self.assertEqual(CoreFoundation.CFDictionaryGetCount(dct), 0)
| 7,795 | 2,257 |
from threading import Thread
import time
from hallo.events import EventMessage
def test_threads_simple(hallo_getter):
test_hallo = hallo_getter({"hallo_control"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "active threads")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "i have" in data[0].text.lower()
assert "active threads" in data[0].text.lower()
def test_threads_increase(hallo_getter):
test_hallo = hallo_getter({"hallo_control"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "active threads")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
first_threads = int(
data[0].text.lower().split("active")[0].split("have")[1].strip()
)
# Launch 10 threads
for _ in range(10):
Thread(target=time.sleep, args=(10,)).start()
# Run function again
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "active threads")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
second_threads = int(
data[0].text.lower().split("active")[0].split("have")[1].strip()
)
assert second_threads > first_threads, "Thread count should have increased"
| 1,462 | 499 |
import logging
from typing import Dict, List
import numpy as np
import torch
from numpy import ndarray
from sentence_transformers import SentenceTransformer, models
from melior_transformers.config.global_args import global_args
from melior_transformers.encoding.constants import MODEL_CLASSES
logger = logging.getLogger(__name__)
class SentenceEncoder:
""" Simple wrapper class around 'sentence-transformers':
(https://github.com/UKPLab/sentence-transformers/) that
allow us to easly-extract embeddings from pre-trained models.
You can find the full list of models here:
https://huggingface.co/transformers/pretrained_models.html
"""
def __init__(
self,
model_type: str = "bert",
model_name: str = "bert-base-uncased",
args: Dict = None,
use_cuda: bool = False,
random_seed: int = None,
):
"""
Initializes a pre-trained Transformer model for Sentence Encoding.
Args:
model_type (optional): The type of model.
model_size (optional): The model name.
args (optional): Aditional arguments to configure embeddigs extraction.
use_cuda (optional): Use GPU if available. Setting to False will
force model to use CPU only.
Returns:
None
"""
if random_seed is not None:
np.random.seed(random_seed)
torch.manual_seed(random_seed)
if use_cuda:
device = "cuda"
else:
device = "cpu"
self.args = {
# Model config
"max_seq_length": 128,
"do_lower_case": False,
# Model config
"pooling_mode_mean_tokens": True,
"pooling_mode_cls_token": False,
"pooling_mode_max_tokens": False,
"pooling_mode_mean_sqrt_len_tokens": False,
}
if args is not None:
self.args.update(args)
if model_type not in MODEL_CLASSES:
raise ValueError(
f"Model type {model_type} doesn't exist."
f"\nPlease select one of the follwing: {MODEL_CLASSES.keys()}"
)
try:
logger.info(f"Loading model '{model_name}'")
word_embedding_model = MODEL_CLASSES[model_type](
model_name_or_path=model_name,
max_seq_length=self.args["max_seq_length"],
do_lower_case=self.args["do_lower_case"],
)
pooling_model = models.Pooling(
word_embedding_model.get_word_embedding_dimension(),
pooling_mode_mean_tokens=self.args["pooling_mode_mean_tokens"],
pooling_mode_cls_token=self.args["pooling_mode_cls_token"],
pooling_mode_max_tokens=self.args["pooling_mode_max_tokens"],
pooling_mode_mean_sqrt_len_tokens=self.args[
"pooling_mode_mean_sqrt_len_tokens"
],
)
self.encoder_model = SentenceTransformer(
modules=[word_embedding_model, pooling_model], device=device
)
except Exception as e:
raise ValueError(f"Error loading model: {e}")
def encode(
self, sentences: List[str], batch_size: int = 8, show_progress_bar: bool = False
) -> List[np.ndarray]:
"""
Extract sentence embeddings from the selected model.
Args:
sentences: List of sentences to extract embeddings.
batch_size (optional): Batch size used for the computation
show_progress_bar (optional): Output a progress bar when encode sentences
Returns:
List with ndarrays of the embeddings for each sentence.
"""
return self.encoder_model.encode(
sentences, batch_size=batch_size, show_progress_bar=show_progress_bar
)
| 3,910 | 1,114 |
#!/usr/bin/env python
from django.db import models
class MyModel(models.Model):
val = models.IntegerField()
| 114 | 36 |
import json
import requests
from requests.exceptions import Timeout
from .util import meta_fix
class SalsaObject(object):
def __init__(self, initial=None):
_data = {}
if initial:
_data.update(initial)
self.__dict__['_data'] = _data
def __getattr__(self, attr):
if attr in self._data:
return self._data[attr]
def __setattr__(self, attr, value):
if attr in self._data:
self._data[attr] = value
else:
self.__dict__[attr] = value
def whoami(self):
return self.__class__.__name__
@classmethod
def from_list(clazz, objects):
instances = []
for obj in objects:
instances.append(clazz(obj))
return instances
@property
def key(self):
key_field = "%s_KEY" % self.object
return self._data.get(key_field)
def link(self, to_object, with_key):
pass
def tag(self, tag):
pass
def save(self):
salsa.save(self.object, self._data, key=self.key)
def delete(self):
pass
class Donation(SalsaObject):
object = 'donation'
class Event(SalsaObject):
object = 'event'
class Group(SalsaObject):
object = 'groups'
def __repr__(self):
return u"<Group: %s %s>" % (self.key, self.Group_Name)
class Supporter(SalsaObject):
object = 'supporter'
def __repr__(self):
return u"<Supporter: %s %s %s %s>" % (self.key, self.First_Name, self.Last_Name, self.Email)
class SupporterAction(SalsaObject):
object = 'supporter_action'
class SupporterAddress(SalsaObject):
object = 'supporter_address'
class SupporterEvent(SalsaObject):
object = 'supporter_event'
class SupporterGroup(SalsaObject):
object = 'supporter_groups'
class SignupPage(SalsaObject):
object = 'signup_page'
class EmailBlast(SalsaObject):
object = 'email_blast'
# other objects: distributed_event, supporter_action_comment,
# supporter_action_target, supporter_action_content, chapter
def check_authentication(f):
def wrapper(client, *args, **kwargs):
if not client.authenticated:
if client.auth_email and client.auth_password:
client.authenticate(client.hq, client.auth_email, client.auth_password)
data = f(client, *args, **kwargs)
if data and not isinstance(data, dict) and isinstance(data[0], dict):
if data[0].get('result') == 'error':
client.authenticated = False
client.authenticate(client.hq, client.auth_email, client.auth_password)
data = f(client, *args, **kwargs)
return data
return wrapper
class AuthenticationError(Exception):
pass
class Client(object):
def __init__(self):
self.hq = None
self.auth_email = None
self.auth_password = None
self.authenticated = False
self.timeout = None
self.http = requests.Session()
self.http.headers.update({'User-Agent': 'python-tortilla'})
self.http.params = {'json': 'true'}
def build_url(self, path, secure=True):
scheme = 'https' if secure else 'http'
return "%s://%s.salsalabs.com/%s" % (scheme, self.hq, path)
@check_authentication
def get(self, url, params=None, raw=False):
try:
resp = self.http.get(url, params=params, timeout=self.timeout)
content = resp.content
if params and params.get('object') == 'email_blast':
content = meta_fix(content)
if raw:
return content
return json.loads(content)
except Timeout:
pass
@check_authentication
def post(self, url, params=None, raw=False):
try:
resp = self.http.post(url, params=params, timeout=self.timeout)
if raw:
return resp.content
return json.loads(resp.content)
except Timeout:
pass
def authenticate(self, hq, email, password, org_key=None, chapter_key=None):
self.hq = hq
self.auth_email = email
self.auth_password = password
url = self.build_url('api/authenticate.sjs')
params = {
'email': email,
'password': password,
}
if org_key:
params['organization_KEY'] = org_key
if chapter_key:
params['chapter_KEY'] = chapter_key
try:
resp = self.http.get(url, params=params)
data = resp.json()
if 'status' in data and data['status'] == 'success':
self.authenticated = True
return True
raise AuthenticationError(data.get('message', 'Unable to authenticate'))
except Timeout:
pass
def describe(self, object):
url = self.build_url('api/describe2.sjs')
params = {'object': object}
return self.get(url, params=params)
def object(self, object, key, fields=None):
url = self.build_url('api/getObject.sjs')
params = {
'object': object,
'key': key,
}
if fields:
params['include'] = fields
return self.get(url, params=params)
def objects(self, object, condition=None, order_by=None, limit=None, fields=None):
url = self.build_url('api/getObjects.sjs')
params = {'object': object}
if condition:
params['condition'] = condition
if order_by:
params['orderBy'] = order_by
if limit:
params['limit'] = limit
if fields:
params['include'] = fields
return self.get(url, params=params)
def join(self, object_left, key_left, object_right, key_right=None, object_center=None,
condition=None, order_by=None, limit=None, fields=None):
url = self.build_url('api/getLeftJoin.sjs')
object = "%s(%s)" % (object_left, key_left)
if key_right and object_center:
object = "%s%s(%s)" % (object, object_center, key_right)
object = "%s%s" % (object, object_right)
params = {'object': object}
if condition:
params['condition'] = condition
if order_by:
params['orderBy'] = order_by
if limit:
params['limit'] = limit
if fields:
params['include'] = fields
return self.get(url, params=params)
def tagged(self, object, tag, condition=None, order_by=None, limit=None, fields=None):
url = self.build_url('api/getTaggedObjects.sjs')
params = {'object': object, 'tag': tag}
if condition:
params['condition'] = condition
if order_by:
params['orderBy'] = order_by
if limit:
params['limit'] = limit
if fields:
params['include'] = fields
return self.get(url, params=params)
def report(self, key):
url = self.build_url('api/getReport.sjs')
params = {'report_KEY': key}
return self.get(url, params=params)
def save(self, object, values, key=None):
url = self.build_url('save')
params = {'object': object}
params.update(values)
if key:
params['key'] = key
data = self.post(url, params=params)
if data and data[0].get('result') == 'success':
return data[0].get('key')
def tag(self, object, key, tag):
url = self.build_url('api/tagObject.sjs')
params = {
'object': object,
'key': key,
'tag': tag,
}
return self.post(url, params=params)
def link(self, object, key, to_object, with_key):
url = self.build_url('save')
params = {
'object': object,
'key': key,
'link': to_object,
'linkKey': with_key,
}
return self.get(url, params=params)
def delete(self, object, key):
url = self.build_url('api/delete')
params = {'object': object, 'key': key}
return self.get(url, params=params)
#
# custom fetch methods
#
def group(self, group_id):
object = self.object(Group.object, group_id)
return Group(object)
def groups(self, **kwargs):
objects = self.objects(Group.object, **kwargs)
return Group.from_list(objects)
def supporter(self, supporter_id):
object = self.object(Supporter.object, supporter_id)
return Supporter(object)
def supporters(self, **kwargs):
objects = self.objects(Supporter.object, **kwargs)
return Supporter.from_list(objects)
#
# custom action methods
#
def add_to_group(self, supporter, group):
return self.link(supporter.object, supporter.key, group.object, group.key)
def remove_from_group(self, supporter, group):
condition = "supporter_KEY=%s&groups_KEY=%s" % (supporter.key, group.key)
groups = self.objects(SupporterGroup.object, condition=condition)
if groups:
self.delete(SupporterGroup.object, groups[0]['key'])
return True
salsa = Client()
| 9,234 | 2,768 |
from pygame import display, font, image, init, time, event
from pygame.locals import *
from random import randrange
from GameObject import Bird, Pipe
SCORE_COLOR = (255, 255, 0)
HIGHSCORE_COLOR = (255, 165, 0)
DISPLAY_WIDTH = 320
DISPLAY_HEIGHT = 480
FONT = None
FONT_SIZE = 22
HOLE_SIZE = 50
PIPE_FREQUENCY = 50
PIPE_MAXLIFETIME = 100
score = 1
highscore = 0
def save():
global score, highscore
if score > highscore:
highscore = score
with open('save', 'a+') as f:
f.seek(0)
save = f.read()
if highscore > int(save) if save.isdigit() else '0':
f.seek(0)
f.truncate()
f.write(str(highscore))
else:
highscore = int(save)
score = 0
def pause(display):
screen = display.get_surface()
hsfont = font.Font(FONT, 100)
hs = hsfont.render(str(highscore), True, HIGHSCORE_COLOR)
screen.blit(image.load('pause.png').convert_alpha(), (0, 0))
screen.blit(hs, (77, 110))
display.flip()
while True:
for i in event.get():
if i.type == MOUSEBUTTONDOWN or i.type == KEYDOWN:
return
def main():
global score, highscore
init()
display.set_mode((DISPLAY_WIDTH, DISPLAY_HEIGHT))
display.set_caption('Flappy bird')
myfont = font.Font(FONT, FONT_SIZE)
screen = display.get_surface()
bird = Bird(150, 1)
bg = image.load('background.png').convert_alpha()
pipes = []
save()
running = True
while running:
lScore = myfont.render(str(score), True, SCORE_COLOR)
time.Clock().tick(30) # Set FPS to 30
screen.blit(bg, (0, 0))
score += 1
# Create new pipes
if score % PIPE_FREQUENCY == 0:
hole = randrange(HOLE_SIZE, DISPLAY_HEIGHT - HOLE_SIZE)
pipe1 = Pipe(DISPLAY_WIDTH, hole + HOLE_SIZE)
pipe2 = Pipe(DISPLAY_WIDTH, -DISPLAY_HEIGHT + hole - HOLE_SIZE)
pipes.extend((pipe1, pipe2))
# Move pipes
for pipe in pipes:
screen.blit(pipe.img, pipe.rect)
pipe.fly()
# Remove old pipes
for pipe in pipes:
if pipe.lifetime > PIPE_MAXLIFETIME:
pipes.remove(pipe)
# Move the bird on the y-axis
bird.fly()
# Handle the input
for i in event.get():
if i.type == MOUSEBUTTONDOWN or i.type == KEYDOWN:
bird.speedY = -10
elif i.type == QUIT:
running = False
# Check collisions with pipes and bottom
# If the bird is too low or touches a pipe
if bird.rect.y >= DISPLAY_HEIGHT - bird.img.get_height() or \
bird.checkCollisions(pipes):
bird.die()
pipes.clear()
save()
pause(display)
elif bird.rect.y < -HOLE_SIZE: # The bird is too high
bird.speedY = 1
# Draw the bird and score info
screen.blit(bird.img, bird.rect)
screen.blit(lScore, (0, 0))
display.flip()
if __name__ == '__main__':
main()
| 3,138 | 1,120 |
import os
import capnp
import operator
import sys
from importlib.abc import Loader, MetaPathFinder
from importlib.util import spec_from_loader
from pathlib import Path
from functools import reduce
capnp.remove_import_hook()
my_path = Path(os.path.dirname(os.path.abspath(__file__)))
class _Importer(MetaPathFinder, Loader):
@classmethod
def find_spec(cls, fullname, path, target):
if fullname.startswith(__name__):
if fullname in sys.modules:
return None
parts = fullname.split(".")
return spec_from_loader(fullname, cls)
return None
@staticmethod
def create_module(spec):
parts = spec.name.split(".")
return capnp.load(
str(reduce(operator.truediv, parts[1:], my_path)) + ".capnp",
spec.name,
sys.path,
)
@staticmethod
def exec_module(module):
pass
sys.meta_path.append(_Importer())
| 950 | 286 |
#!/usr/bin/env python3
from astroquery.simbad import Simbad
from astroquery.vizier import Vizier
from cat_setup import src_localDB, src_onlineDB
from buildDB import addData, check_ldb
import sys, os
import csv
from more_itertools import locate
import argparse
import subprocess
from pathlib import Path
# for the spectrum search:
from astropy import units as u
import astropy.coordinates as coord
from getSpect import queryCASSIS, queryISO
import warnings
warnings.filterwarnings('ignore', category=UserWarning)
# Describe the script:
description = \
"""
description:
script to pull photometry from set catalogs in VizieR
(specified in cat_setup.py) and from local database of
data tables not presently in VizieR. If optional
argument --getSpect is set equal to True (boolean),
the script will also pull flux calibrated infrared
spectra from the CASSIS low resolution Spitzer
Atlas and Gregory C Sloan's ISO/SWS Atlas.
"""
epilog = \
"""
examples:
queryDB.py --obj=HD_283571 --rad=10s --getSpect=True
"""
parser = argparse.ArgumentParser(description=description,epilog=epilog,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("--obj",dest="obj",default='',type=str,
help='Object name')
parser.add_argument("--rad",dest="rad",default='10s',type=str,
help='Search radius for VizieR catalog query')
parser.add_argument("--ldb",dest='ldb',default='',type=str,
help='')
parser.add_argument("--getSpect",dest="getSpect",default=False,type=bool,
help='Choose whether to query CASSIS for IRS spectra (default False)')
parser.add_argument("--closest",dest="closest",default=False,type=bool,
help='Retreive closest entry from VizieR catalogs (default False)')
parser.add_argument("--queryAll",dest="query",default='True',type=str,
help='Choose whether to query full database ("all") or specific catalog')
argopt = parser.parse_args()
obj = argopt.obj.replace('_', ' ')
searchR = argopt.rad
# Check that the local database can be found:
localDB_trunk = check_ldb(argopt.ldb) # returns a pathlib.Path object
qu = argopt.query
# Read in the details of the VizieR catalogs to be queried:
if qu == 'True':
catN, catR, catW, catA, catM, catE, catU, catB = src_onlineDB('simbad')
else:
# Expect to be given one catalog to query
try:
catN,catR,catW,catA,catM,catE,catU,catB = [{qu:item[qu]} for item in src_onlineDB('simbad')]
except KeyError:
print('No online catalog matching keyword ',qu)
catN,catR,catW,catA,catM,catE,catU,catB = [[]]*8
# Read in the details of the local catalogs to be queried:
if qu == 'True':
try:
ldbN, ldbR, ldbW, ldbA, ldbM, ldbE, ldbU, ldbB = src_localDB(localDB_trunk)
except TypeError:
print('Error: local database files not found!')
print('Please check local database directory trunk before continuing.')
print('')
sys.exit()
else:
try:
ldbN,ldbR,ldbW,ldbA,ldbM,ldbE,ldbU,ldbB = [{qu:item[qu]} for item in src_localDB(localDB_trunk)]
except KeyError:
print('No local catalog matching keyword ',qu)
if catN == []:
print('Exiting...')
sys.exit()
ldbN,ldbR,ldbW,ldbA,ldbM,ldbE,ldbU,ldbB = [[]]*8
##########
# Initialise outputs:
##########
wvlen, band, mag, emag, units = ['m'], ['--'], ['--'], ['--'], ['--']
beam, odate, ref = ['arcsec'], ['--'], ['--']
##########
# Collect SIMBAD names and VizieR catalog matches
##########
# Create custom SIMBAD (cS) query to retrieve 2MASS flux
cS = Simbad()
cS.add_votable_fields('flux(J)', 'flux(H)', 'flux(K)')
cS.add_votable_fields('flux_error(J)', 'flux_error(H)', 'flux_error(K)')
cS.add_votable_fields('flux_bibcode(J)', 'flux_bibcode(H)', 'flux_bibcode(K)')
cS.remove_votable_fields('coordinates')
objsim = cS.query_object(obj)
if not objsim:
print('')
print('Warning: object name '+obj+' not recognised by SIMBAD!')
# Try treat it as photometry of binary component (expect e.g. A or A+B label)
print(' - blindly assuming multiplicity: checking "'+' '.join(obj.split(' ')[:-1])+'"')
try:
objB = [a[0] for a in Simbad.query_objectids(' '.join(obj.split(' ')[:-1]))]
# If we get to here, the object is a component of a multiple system
print(' - Success! '+' '.join(obj.split(' ')[:-1])+' recognised by SIMBAD!')
print('Info: photometry search will be limited to the local database')
print('--------------------------------------------')
print(' CAUTION: ')
print(' Individual component identifiers can vary ')
print(' according to wavelength or between studies.')
print(' You are advised to check the collated ')
print(' references to ensure consistent naming. ')
print('--------------------------------------------')
print('')
if ' '.join(obj.split(' ')[:-1]) not in [' '.join(o.split()) for o in objB]:
for o in objB:
# Retrieve full name of parent star from SIMBAD (in case e.g. XZ Tau
# parsed instead of V* XZ Tau):
if ' '.join(obj.split(' ')[:-1]) in o:
obj2 = o+' '+obj.split(' ')[-1]
else:
# Parsed name matches required format of full simbad name of parent star plus
# component flag (e.g. A).
print('')
obj2 = obj
altIDs = [obj2]
except TypeError:
print('Error: not multiple. Object name not registered in SIMBAD!')
print('Please provide a valid object identifier.')
print('')
sys.exit()
else:
# Only get here if the object identifier is simbad-compatible
# Retrieve data from online catalogs:
for o in catN:
resM, resE = [], []
found = ''
print('Retrieving photometry from '+o+' ('+catR[o]+') ...')
if o == '2MASS':
for t in range(0, 3):
if catR[o] in str(objsim[catN[o][t]][0]):
addData(objsim[catM[o][t]][0], objsim[catE[o][t]][0], catB[o][t],
catW[o][t], catA[o][t], catU[o][t], 'unknown', catR[o],
m=mag, em=emag, b1=band, u=units, b2=beam, d=odate, r=ref,
w=wvlen)
else:
print('No match')
else:
res = Vizier(columns=['**', '+_r'], catalog=catN[o])
result = res.query_region(obj, radius=searchR)
try:
l_tmp = result[catN[o]]
except TypeError:
found = 'No match'
if result.keys() and found != 'No match':
if len(result[catN[o]]) > 1 and argopt.closest == False:
# Get the user to specify the matching catalog entry for the object:
print('Multiple results returned by Vizier within search radius')
print(result[catN[o]])
print('')
obj_r = input('Enter "_r" value for required target: ')
# Retrieve row number:
for r in range(0, len(result[catN[o]])):
if (result[catN[o]][r]['_r'] == float(obj_r)):
row = r
elif len(result[catN[o]]) > 1 and argopt.closest == True:
# Retrieve the entry with smallest _r
print('Multiple results returned by Vizier within search radius')
print(result[catN[o]])
print('')
q_r = min([r['_r'] for r in result[catN[o]]])
# Retrieve row number:
print('Closest entry has _r =',q_r)
row = None
for r in range(0, len(result[catN[o]])):
if row == None and result[catN[o]][r]['_r'] == q_r:
row = r
else:
row = 0
# Retrieve mag/flux and its error from the catalog, given the row number
#for mm in catM[o]:
for m in range(0, len(catM[o])):
# Retrieve each of the mag/flux measurements...
try:
if '--' not in str(result[catN[o]][row][catM[o][m]]):
resM = result[catN[o]][row][catM[o][m]]
else:
resM = '--'
except KeyError:
print('Warning: potential flux column name change in VizieR!')
print(result[catN[o]][row])
print (catM[o][m])
raise KeyError
# ... and their errors...
if o == 'IRAS':
t_resM = result[catN[o]][row][catE[o][m]]
resE = result[catN[o]][row][catM[o][m]]*0.01*t_resM
elif isinstance(catE[o][m], str):
if '--' not in str(result[catN[o]][row][catE[o][m]]):
resE = result[catN[o]][row][catE[o][m]]
else:
resE = '--'
else:
resE = catE[o][m] * result[catN[o]][row][catM[o][m]]
# And add it to the data to be written to file:
addData(resM, resE, catB[o][m], catW[o][m], catA[o][m], catU[o][m],
'unknown', catR[o], m=mag, em=emag, b1=band,
u=units, b2=beam, d=odate, r=ref, w=wvlen)
else:
print('No match.')
##########
# Account for specific case of Vieira+2003 which provides mag + colour table
# and object ID in PDS format:
##########
altIDs = [a[0] for a in Simbad.query_objectids(obj)]
if qu == 'True':
cmN = {'Vieira03' : 'J/AJ/126/2971/table2'}
cmR = {'Vieira03' : '2003AJ....126.2971V'}
cmW = {'Vieira03' : [540e-9, 442e-9, 364e-9, 647e-9, 786.5e-9]}
cmA = {'Vieira03' : [(1.22*w/0.60)*206265 for w in cmW['Vieira03']]}
cmM = {'Vieira03' : ['Vmag', 'B-V', 'U-B', 'V-Rc', 'Rc-Ic']}
cmE = {'Vieira03' : ['--', '--', '--', '--', '--']}
cmU = {'Vieira03' : ['mag', 'mag', 'mag', 'mag', 'mag']}
cmB = {'Vieira03' : ['Johnson:V','Johnson:B','Johnson:U','Cousins:Rc',
'Cousins:Ic']}
print('Retrieving photometry from Vieira et al. ('+cmR['Vieira03']+') ...')
if any('PDS' in b for b in altIDs):
indices = [i for i, s in enumerate(altIDs) if 'PDS' in s]
p_obj = altIDs[indices[0]]
# Ensure pds_obj is just numeric and has leading zeros so that len = 3
if len(p_obj.split()[1]) == 1:
pds_obj = '00'+p_obj.split()[1]
elif len(p_obj.split()[1]) == 2:
pds_obj = '0'+p_obj.split()[1]
elif len(p_obj.split()[1]) == 3:
pds_obj = p_obj.split()[1]
else:
print('Format of PDS identifier not recognised: '+p_obj)
print('Exiting...')
sys.exit()
result = Vizier.get_catalogs(cmN['Vieira03'])
ind = [i for i, s in enumerate([a for a in result[0]['PDS']]) if pds_obj in s]
if len(ind) > 1:
jvmag = result[0]['Vmag'][ind]
jbmag = result[0]['B-V'][ind] + jvmag
jumag = result[0]['U-B'][ind] + jbmag
crmag = jvmag - result[0]['V-Rc'][ind]
cimag = crmag - result[0]['Rc-Ic'][ind]
vieira_m = [jvmag, jbmag, jumag, crmag, cimag]
for m in range(0, len(vieira_m)):
addData(vieira_m[m], cmE['Vieira03'][m], cmB['Vieira03'][m],
cmW['Vieira03'][m], cmA['Vieira03'][m], cmU['Vieira03'][m],
'unknown', cmR['Vieira03'], m=mag, em=emag, b1=band,
u=units, b2=beam, d=odate, r=ref, w=wvlen)
else:
print('No match.')
else:
print('No match.')
##########
# Then deal with local data base of tables not on VizieR:
##########
suggestAlt = []
for o in ldbN:
print('Retrieving photometry from '+o+' ('+ldbR[o]+') ...')
with open(ldbN[o]) as f_in:
reader = csv.DictReader(f_in, delimiter=',')
entries = [a for a in reader]
targs = [row['Target'] for row in entries]
match = list(set(targs).intersection([' '.join(a.split()) for a in altIDs]))
# check for entries where any of [a for altIDs] match local database catalog
# entry.split(' ')[:-1] (i.e. the portion of the name up to the final space)
smatch = list(set([' '.join(t.split(' ')[:-1]) for t in targs]).intersection([' '.join(a.split()) for a in altIDs]))
if len(match) == 0 and len(smatch) == 0:
print(' - no match.')
elif len(match) == 0 and len(smatch) != 0:
# Alert the user to the fact that there are entries for individual components of
# the target they are querying.
print(' - no match for '+obj+' but individual component/blended photometry exists')
for ind in list(locate([' '.join(t.split(' ')[:-1]) for t in targs], lambda a: a == smatch[0])):
suggestAlt.append(str(targs[ind]))
else:
# Identical matches are found:
for ind in list(locate(targs, lambda a: a == match[0])):
resM = []
resE = []
resD = []
for mm in ldbM[o]:
# Retrieve each of the mag/flux measurements...
resM.append(entries[ind][mm])
resD.append(entries[ind]['ObsDate'])
for me in ldbE[o]:
# ... and their errors
resE.append(entries[targs.index(match[0])][me])
for m in range(0, len(resM)):
addData(resM[m], resE[m], ldbB[o][m], ldbW[o][m], ldbA[o][m], ldbU[o][m],
resD[m], ldbR[o], m=mag, em=emag, b1=band, u=units,
b2=beam, d=odate, r=ref, w=wvlen)
if len(smatch) != 0:
# ...AND potential individual component photometry exists in the table:
for ind in list(locate([' '.join(t.split(' ')[:-1]) for t in targs], lambda a: a == smatch[0])):
suggestAlt.append(str(targs[ind]))
if len(suggestAlt) != 0:
print('')
print('------------------------------------------------------')
print(' !!! CAUTION !!! ')
print('------------------------------------------------------')
print('Individual component or blended photometry also found!')
print(' - Data exists in local database for:')
for sA in list(set(suggestAlt)):
print(' '+str(sA))
print('')
print('Suggestion: use each of the target IDs with queryDB.py')
print('to collate all available photometry.')
print('')
print('Important note: collated photometry may contain ')
print('contributions from any/all of these components. Use ')
print('inspectSED.py to check this.')
print('------------------------------------------------------')
print('')
##############
# Write output to ascii file:
##############
resS = Simbad.query_object(obj)
Path.mkdir(Path(os.getcwd()) / Path(obj.replace(" ", "")), parents=True, exist_ok=True)
output = Path(os.getcwd()) / Path(obj.replace(" ", "")) / Path(obj.replace(" ", "")+'_phot.dat')
if output.exists() and qu == 'True':
print('File '+str(output.name)+' already exists in '+str(output.parent)+ '...')
print('Exiting...')
sys.exit()
elif output.exists() and qu != 'True':
f = open(output, mode='a')
f.write('#New photometry obtained using search radius of '+searchR+'\n')
for i in range(1, len(wvlen)):
oLINE = str(wvlen[i])+' '+str(band[i])+' '+str(mag[i])+' '+str(emag[i])+' -- '+str(units[i])+' '+str(beam[i])+' '+str(odate[i])+' '+str(ref[i])
f.write(oLINE+"\n")
else:
f = open(output, mode='w')
f.write('#Photometry obtained for '+obj)
try:
f.write(': RA='+str(resS['RA'][0])+', Dec='+str(resS['DEC'][0]))
f.write(', cone search radius='+searchR+'\n')
except:
f.write('. Sky coordinates not retrievable; cone search not used\n')
f.write("lam band mag e_mag f_mag u_mag beam obsDate ref\n")
for i in range(0, len(wvlen)):
oLINE = str(wvlen[i])+' '+str(band[i])+' '+str(mag[i])+' '+str(emag[i])+' -- '+str(units[i])+' '+str(beam[i])+' '+str(odate[i])+' '+str(ref[i])
f.write(oLINE+"\n")
f.close()
print('Collated photometry written to ',output)
print('')
if argopt.getSpect == True:
# objRA = str(65.48922), objDEC = str(28.443204)
objPos = coord.SkyCoord(resS['RA'][0]+' '+resS['DEC'][0], unit=(u.hourangle, u.deg))
RA = objPos.ra.value
DEC = objPos.dec.value
queryCASSIS(obj, str(RA), str(DEC), searchR=str(20))
queryISO(obj, str(RA), str(DEC), searchR=str(20))
| 17,283 | 5,801 |
from celescope.__init__ import __CONDA__
from celescope.tcr_fl.__init__ import __STEPS__, __ASSAY__
from celescope.tools.Multi import Multi
class Multi_tcr_fl(Multi):
def custome_args(self):
self.parser.add_argument('--thread', help='thread', default=4)
self.parser.add_argument("--nCell", help="select top N cell")
def read_custome_args(self):
self.thread = self.args.thread
self.nCell = self.args.nCell
def split_fq(self, sample):
step = 'split_fq'
fq = f'{self.outdir_dic[sample]["cutadapt"]}/{sample}_clean_2.fq.gz'
cmd = (
f'{self.__APP__} '
f'{self.__ASSAY__} '
f'{step} '
f'--outdir {self.outdir_dic[sample][step]} '
f'--sample {sample} '
f'--assay {self.__ASSAY__} '
f'--fq {fq} '
f'--nCell {self.nCell} '
f'--match_dir {self.col4_dict[sample]} '
)
self.process_cmd(cmd, step, sample, m=5, x=1)
def assemble(self, sample):
step = 'assemble'
fastq_dir = f'{self.outdir_dic[sample]["split_fq"]}/fastq'
cmd = (
f'{self.__APP__} '
f'{self.__ASSAY__} '
f'{step} '
f'--outdir {self.outdir_dic[sample][step]} '
f'--sample {sample} '
f'--assay {self.__ASSAY__} '
f'--fastq_dir {fastq_dir} '
f'--thread {self.thread} '
)
self.process_cmd(cmd, step, sample, m=4 * int(self.thread), x=self.thread)
def main():
multi = Multi_tcr_fl(__ASSAY__, __STEPS__, __CONDA__)
multi.col4_default = None
multi.run()
if __name__ == '__main__':
main()
| 1,690 | 621 |
# coding: utf-8
# author: gabriel couture
import unittest
from pyorthanc import Orthanc
from tests import setup_server
from tests.data import a_patient
class TestOrthancPatientDeleters(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
global orthanc_subprocess
orthanc_subprocess = setup_server.setup_orthanc_server()
@classmethod
def tearDownClass(cls) -> None:
global orthanc_subprocess
setup_server.stop_orthanc_server_and_remove_data_directory(orthanc_subprocess)
del orthanc_subprocess
def setUp(self) -> None:
self.orthanc = Orthanc(setup_server.ORTHANC_URL)
def tearDown(self) -> None:
self.orthanc = None
setup_server.clear_data()
def given_patient_in_orthanc_server(self):
setup_server.setup_data()
def test_givenOrthancWithPatient_whenDeletingPatientData_thenResultIsTrue(self):
self.given_patient_in_orthanc_server()
result = self.orthanc.delete_patient(a_patient.IDENTIFIER)
self.assertTrue(result)
def test_givenOrthancWithoutPatient_whenDeletingPatientData_thenResultIsFalse(self):
result = self.orthanc.delete_patient(a_patient.IDENTIFIER)
self.assertFalse(result)
| 1,253 | 414 |
# Generated by Django 2.1.5 on 2020-07-24 05:14
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ErrorLogObject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ticket', models.CharField(max_length=50)),
('timestamp', models.DateTimeField()),
('message', models.TextField()),
('stacktrace', models.TextField()),
],
),
migrations.CreateModel(
name='LogEvolution',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('log_id', models.IntegerField(db_index=True)),
('timestamp', models.CharField(max_length=500)),
('count', models.IntegerField(default=1)),
],
options={
'ordering': ('-timestamp',),
},
),
migrations.CreateModel(
name='LogInstance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('log_id', models.IntegerField(db_index=True)),
('timestamp', models.CharField(max_length=500)),
('type', models.CharField(max_length=300)),
('log', models.TextField()),
],
),
migrations.AlterUniqueTogether(
name='errorlogobject',
unique_together={('ticket', 'message', 'stacktrace')},
),
]
| 1,761 | 471 |
################################################################################
#
# Copyright (C) 2020 Aclima, Inc
# This file is part of aclima-schema - https://github.com/Aclima/aclima-schema
#
# SPDX-License-Identifier: MIT
# See DOCS/LICENSING.md for more information.
#
################################################################################
import unittest
class ObjectsTest(unittest.TestCase):
def test_example(self) -> None:
pass
| 467 | 135 |
from pyspark.sql import SparkSession
from pyspark.ml.evaluation import RegressionEvaluator
from pyspark.ml.recommendation import ALS
class Globals:
def __init__(self):
print("Init Globals")
@staticmethod
def recommendator():
spark = SparkSession.builder.appName("Recommendation ALS").getOrCreate()
# do something to prove it works
movies_df = spark.read.option("header", "true").csv("data/movies.csv", inferSchema=True)
links_df = spark.read.option("header", "true").csv("data/links.csv", inferSchema=True)
movies_df = movies_df.join(links_df, on=['movieid'])
ratings_df = spark.read.option("header", "true").csv("data/ratings.csv", inferSchema=True)
tags_df = spark.read.option("header", "true").csv("data/tags.csv", inferSchema=True)
(training, test) = ratings_df.randomSplit([0.8, 0.2])
# Build the recommendation model using ALS on the training data
# Note we set cold start strategy to 'drop' to ensure we don't get NaN evaluation metrics
als = ALS(maxIter=5, regParam=0.01, userCol="userId", itemCol="movieId", ratingCol="rating",
coldStartStrategy="drop")
model = als.fit(training)
# Evaluate the model by computing the RMSE on the test data
predictions = model.transform(test)
predictions.printSchema()
predictions.orderBy('prediction').show(10)
evaluator = RegressionEvaluator(metricName="rmse", labelCol="rating",
predictionCol="prediction")
rmse = evaluator.evaluate(predictions)
Globals.movies_df = movies_df
Globals.ratings_df = ratings_df
Globals.tags_df = tags_df
Globals.predictions = predictions
Globals.model = model
Globals.genres = ["Crime", "Romance", "Thriller", "Adventure", "Drama", "War", "Documentary", "Fantasy", "Mystery", \
"Musical", "Animation", "Film-Noir", "(no genres listed)", "IMAX", "Horror", "Western", \
"Comedy", "Children", "Action", "Sci-Fi"]
Globals.recommendator() | 2,121 | 650 |
def print_results(results_dic, results_stats_dic, model,
print_incorrect_dogs = False, print_incorrect_breed = False):
"""
Prints summary results on the classification and then prints incorrectly
classified dogs and incorrectly classified dog breeds if user indicates
they want those printouts (use non-default values)
Parameters:
results_dic - Dictionary with key as image filename and value as a List
(index)idx 0 = pet image label (string)
idx 1 = classifier label (string)
idx 2 = 1/0 (int) where 1 = match between pet image and
classifer labels and 0 = no match between labels
idx 3 = 1/0 (int) where 1 = pet image 'is-a' dog and
0 = pet Image 'is-NOT-a' dog.
idx 4 = 1/0 (int) where 1 = Classifier classifies image
'as-a' dog and 0 = Classifier classifies image
'as-NOT-a' dog.
results_stats_dic - Dictionary that contains the results statistics (either
a percentage or a count) where the key is the statistic's
name (starting with 'pct' for percentage or 'n' for count)
and the value is the statistic's value
model - Indicates which CNN model architecture will be used by the
classifier function to classify the pet images,
values must be either: resnet alexnet vgg (string)
print_incorrect_dogs - True prints incorrectly classified dog images and
False doesn't print anything(default) (bool)
print_incorrect_breed - True prints incorrectly classified dog breeds and
False doesn't print anything(default) (bool)
Returns:
None - simply printing results.
"""
print("\nClassification results:")
for key, value in results_stats_dic.items():
print("{}: {}".format(key, value))
print("\nNumber of images identified as INCORRECT ITEM:")
if print_incorrect_dogs==True:
if results_stats_dic["n_correct_dogs"] + results_stats_dic["n_correct_notdogs"] != results_stats_dic["n_images"]:
for key,value in results_dic.items():
if sum(value[3:]) == 1:
print("{} not equal to {}".format(value[0],value[1]))
print("\nNumber of images identified as INCORRECT DOG'S BREED:")
if print_incorrect_breed==True:
if results_stats_dic["n_correct_dogs"] != results_stats_dic["n_correct_breed"]:
for key,value in results_dic.items():
if sum(value[3:]) == 2 and value[2] == 0:
print("{} not equal to {}".format(value[0],value[1]))
| 2,873 | 784 |
from construct import *
from construct.lib import *
expr_io_pos__all_plus_number = Struct(
'my_str' / NullTerminated(GreedyString(encoding='UTF-8'), term=b'\x00', include=False, consume=True),
'body' / FixedSized(((stream_size(_io) - stream_tell(_io)) - 2), GreedyBytes),
'number' / Int16ul,
)
expr_io_pos = Struct(
'substream1' / FixedSized(16, LazyBound(lambda: expr_io_pos__all_plus_number)),
'substream2' / FixedSized(14, LazyBound(lambda: expr_io_pos__all_plus_number)),
)
_schema = expr_io_pos
| 508 | 205 |
from aiosparql.client import SPARQLClient
from typing import Union
from models.types import sparql_bindings
class QueryService():
"""
Base class used for querying SPARQL endpoints.
Uses the aiosparql SPARQLClient to support sending async requests.
"""
_user_agent = "KG-Enricher"
_headers = {'User-Agent': _user_agent}
def __init__(self, endpoint_url, triple_query):
self._endpoint_url = endpoint_url
self._triple_query = triple_query
async def execute_query(self, query) -> sparql_bindings:
"""Instantiate a SPARQLClient, run the provided query, close the client, then return the bindings."""
client = SPARQLClient(self._endpoint_url, headers=self._headers)
result = await client.query(query)
await client.close()
return result["results"]["bindings"]
async def get_triples(self, uri: Union[str, None]) -> sparql_bindings:
"""Prepares the triple_query with the provided uri, executes it and returns the result."""
if (uri is None):
return list()
query = self._triple_query.substitute(uri=uri)
return await self.execute_query(query)
| 1,175 | 342 |
import dropbox
import json
import logging
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from allauth.socialaccount.models import SocialApp, SocialToken
from books.forms import ImportForm
from books.models import (
Book,
BookFileVersion,
BookEmail,
Series,
)
from readers.models import Reader
from libraries.models import Library, Librarian
logger = logging.getLogger('scripts')
class Command(BaseCommand):
help = "send pending book emails"
def handle(self, *args, **options):
logger.debug('Starting book email send cronjob')
books_to_send = BookEmail.objects.filter(
status=BookEmail.PENDING)[:4]
for book_email in books_to_send:
logger.debug('Working on email job %s' % book_email.id)
book_email.status = BookEmail.PROCESSING
book_email.save()
book_file_path = book_email.book_file.path
token = None
try:
dropbox_app_creds = SocialApp.objects.filter(
provider='dropbox_oauth2'
)[0]
token = SocialToken.objects.get(
account__user=book_email.book_file.book.added_by,
app__provider='dropbox_oauth2'
).token
except:
logger.exception(
'Error getting dropbox token for email job %s' % book_email.id
)
book_email.status = BookEmail.ERROR
book_email.save()
if token:
client = dropbox.client.DropboxClient(token)
message = EmailMessage(
subject='[Booksonas] A book for you!',
body=book_email.book_file.book.title,
from_email="books@booksonas.com",
to=[book_email.reader.email,],
)
f, metadata = client.get_file_and_metadata(book_file_path)
message.attach(
'book.{}'.format(book_email.book_file.filetype),
f.read(),
metadata.get('mime_type'),
)
message.send()
book_email.status=BookEmail.SENT
book_email.save()
logger.debug('Successfully sent %s' % book_email.id)
logger.debug('Book email cronjob finished')
| 2,463 | 644 |
#!/usr/bin/env python3
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument("key")
args = parser.parse_args()
RULE = [86 >> i & 1 for i in range(8)]
N_BYTES = 32
N = 8 * N_BYTES
def next(x):
x = (x & 1) << N+1 | x << 1 | x >> N-1
y = 0
for i in range(N):
y |= RULE[(x >> i) & 7] << i
return y
# Bootstrap the PNRG
keystream = int.from_bytes(args.key.encode(),'little')
for i in range(N//2):
keystream = next(keystream)
# Encrypt / decrypt stdin to stdout
plainte | 511 | 222 |
from zencad.interactive.interactive_object import InteractiveObject
from OCC.Core.AIS import AIS_Axis
class AxisInteractiveObject(InteractiveObject):
def __init__(self, axis, color):
self.axis = axis
super().__init__(AIS_Axis(axis.to_Geom_Line()), color=color)
| 284 | 91 |
#!/usr/bin/env python
#
# This file is part of the TeTePy software
#
# Copyright (c) 2017, 2018, University of Southampton
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime, dateutil.parser, errno, os, os.path, re, textwrap, time
import enqueue_outgoing_mails
try:
import pwd
except:
raise ImportError,"Couldn't import pwd -- only available on unix systems"
Modulecode = pwd.getpwuid(os.getuid()).pw_name.upper()
conf = __import__('config_' + Modulecode.lower())
import csvio, mylogger, process_emails
def check_student_groups(students, groups, email_admin=True):
"""Given the dictionary 'students', {'stud1@domain1':('stud1
name','stud1_group'), ...}, and the list 'groups', checks that the
group assigned to each student can be found in 'groups'. If this
is not the case the error is logged and, by default, the
administrator is emailed a warning. Returns a new students
dictionary which is the input students dictionary, less any
assigned to invalid groups, and a list of those students that were
removed."""
output_students = {}
ignored_students = []
for stud_email in students:
(stud_name, stud_group) = students[stud_email]
if stud_group not in groups:
ignored_students.append("{0} ({1}) (group: {2})".format(stud_name, stud_email, stud_group))
log_global.error("Student '{0}' ({1}) assigned group '{2}',"
" which is not in config file '{3}'.".format
(stud_name, stud_email, stud_group, 'config_' + Modulecode.lower()))
if email_admin:
subject = ("WARNING: {0}: student assigned to invalid group {1}".format(conf.ModulecodeSubjectLine,stud_group))
text = ("The student {0} ({1}) is assigned to deadline group {2},"
" which was not found in the configuration file {3}. Marks for"
" this student will not be recorded.".format(stud_name, stud_email, stud_group, 'config_' + Modulecode.lower()))
text = textwrap.fill(text)
enqueue_outgoing_mails.send_text_message(conf.SysadminEmail, conf.ModuleEmailAddress, text, subject)
else:
output_students[stud_email] = students[stud_email]
return (output_students, ignored_students)
def gather_mark_data(student):
"""Given the student's login id as 'student', returns a dictionary
of results for the given student, so that:
marks['assignmentName'] = [fractionCorrect, dateOfSubmission]
where the fraction correct is calculated using the first line of the
_test_results file only."""
if(student.count('@') > 0):
stud_dir = os.path.join(conf.Submissiondir, student.split('@')[0])
else:
stud_dir = os.path.join(conf.Submissiondir, student)
try:
labs = os.listdir(stud_dir)
except OSError,e:
if e.errno == errno.ENOENT: # No such file or directory.
log_global.info("Student {0} has no student directory {1}.".format(student, stud_dir))
return {} # Most likely, the student has not yet submitted.
marks = {}
for lab in labs:
resultsfilename = os.path.join(stud_dir,lab,'_test_results.txt')
if os.path.exists(resultsfilename):
firstline = open(resultsfilename,'r').readlines()[0]
# first line is first submission
# convert score into list: [passed,failed,total]
score = map(int,firstline.split(';')[0][1:-1].split(','))
date = dateutil.parser.parse(firstline.split(';')[1])
# sanity check:
passed, failed, total = score
assert passed+failed == total
marks[lab]=[passed,total,date]
else:
log_global.info("Results file {0} does not exist.".format(resultsfilename))
return marks
def remove_unassessed_submissions(mark_data, to_remove):
"""Given the dictionary mark_data and the list to_remove, returns
the same dictionary less any entries whose key appears in
to_remove."""
for key in mark_data.keys():
if key in to_remove:
mark_data.pop(key)
return mark_data
def find_max_activities(students, groups, activity_regex):
"""Given the student and group dictionaries, returns a dictionary
of student IDs and the number of activities that match
activity_regex, whose deadlines have passed (i.e. the number of
activities we expect them to have completed)."""
max_activities = {}
for stud in students.keys():
stud_activities = 0
deadlines = groups[students[stud][1]]
for activity in deadlines.keys():
if re.match(activity_regex, activity):
# We are counting this kind of activity.
this_deadline = dateutil.parser.parse(deadlines[activity])
now = datetime.datetime.now()
if now > this_deadline:
# Deadline passed
stud_activities += 1
max_activities[stud] = stud_activities
return max_activities
def find_late_submissions(mark_data, students, groups):
"""Finds submissions that are dated later than the deadline."""
late_submissions = {}
studs = mark_data.keys()
for stud in studs:
stud_assigned_group = students[stud][1]
stud_deadlines = groups[stud_assigned_group]
for activity in stud_deadlines.keys():
activity_deadline = dateutil.parser.parse(stud_deadlines[activity])
try:
submitted_date = mark_data[stud][activity][2]
except KeyError:
submitted_date = None
print '#Key Error'
continue
if submitted_date > activity_deadline:
if not late_submissions.has_key(stud):
late_submissions[stud] = {}
late_submissions[stud][activity] = [submitted_date, activity_deadline, submitted_date - activity_deadline]
return late_submissions
def print_result_report(results, max_labs, activity_regex):
print("Reporting on {0} for activities matching {1}\n".format(time.asctime(),activity_regex))
print("Total number of students: {0}".format(len(results)))
print("Average mark (across expected submissions): {0:.1f}%".format(sum([r[0] for r in results.values()])/float(max_labs)*100.))
print("Average mark (across present submissions): {0:.1f}%".format(sum([r[1] for r in results.values()])/float(len(results))*100.))
for r in sorted(results.keys()):
print("{0:10s}: {1:5.1f}%, {2:5.1f}%".format(r,results[r][0]*100,results[r][1]*100))
def print_result_report(students, groups, mark_Data, stud_max_activities,
late_submissions, ignored_students, activity_regex,
out_filename):
table = []
for student in sorted(students.keys()):
row = []
student_name = students[student][0]
student_group = students[student][1]
if late_submissions.has_key(student):
student_lates = late_submissions[student]
else:
student_lates = {}
marks = [0.]*len(groups[student_group].keys())
for i,lab in enumerate(groups[student_group].keys()):
if lab in mark_data[student]:
if lab not in student_lates.keys():
marks[i] = float(mark_data[student][lab][0])/mark_data[student][lab][1]
else:
marks[i] = 0.
else:
marks[i] = 0.
if len(student_lates) > 0:
late_text = 'LATE: '
for activity in student_lates:
late_text += activity + ' (by: ' + str(student_lates[activity][2]) + ') '
row = [student,student_name,student_group] + marks + [late_text]
else:
row = [student,student_name,student_group] + marks
print row
table.append(row)
f=open(out_filename, 'w')
for row in table:
f.write(",".join(map(str,row))+'\n')
f.close()
if __name__ == "__main__":
# Set up logging
log_global = mylogger.attach_to_logfile(conf.report_marks_logfile, level = conf.log_level)
process_emails.log_global=log_global #let functions in that module use the same logger
log_global.info("report_marks.py starting up.")
# Read student list, returns dictionary:
# {'stud1@domain1':('stud1 name','stud1_group'), ...}
students = csvio.readcsv_group(conf.Studentlistcsvfile)
# Groups defined in config file. Structure is a dictionary:
# {'group1': {'lab3': '20 Nov 2009 09:00', 'lab4': '27 Nov 2009 09:00'},
# 'group2': {'lab3': '22 Nov 2009 09:00', 'lab4': '29 Nov 2009 09:00'}}
groups = conf.deadline_groups
# Email admin if students are found assigned to deadline groups
# that are not defined in the config. ignored_students lists
# those that had invalid groups and were removed.
(students, ignored_students) = check_student_groups(students, groups.keys())
# Populate the mark data so that
# mark_data = {'stud1@domain': {'lab1': [fractionalScore, submissionDateTime], ...}, ...}
mark_data={}
for student in students:
mark_data[student] = gather_mark_data(student)
# Stop e.g. demonstrators' submissions affecting statistics.
mark_data = remove_unassessed_submissions(mark_data, conf.unassessed_usernames)
# Count max activities for each student.
activity_regex = r'^training.*'
stud_max_activities = find_max_activities(students, groups, activity_regex)
# Find out if any were late.
late_submissions = find_late_submissions(mark_data, students, groups)
# Print to screen and file
print_result_report(students, groups, mark_data, stud_max_activities,
late_submissions, ignored_students, activity_regex,
'tmp-labmarks.csv')
| 11,464 | 3,512 |
from typing import *
class naive_pd(object):
"""
:param m Quotients (q_i) range interval. (forall q_i, q_i in [m])
:param f Number of elements in PD.
:param l Remainder (r_i) length. (|r_i| = l)
"""
head: list
body: list
r: int
m: int
max_capacity: int
capacity: int
def __init__(self, m: int, f: int, l: int):
# Todo: decrease size by 1.
self.head = [0] * (f + m + 1)
self.body = [""] * f
self.r = l
self.m = m
self.max_capacity = f
self.capacity = 0
def insert(self, q: int, r: str):
if self.capacity == self.max_capacity:
print("Insertion failed, since PD contains", self.max_capacity, "elements")
return
self.capacity += 1
assert self.is_head_last_bit_zero()
# head_insertion
start, end = find_lookup_interval(self.head, q)
length = end - start
assert self.is_head_last_bit_zero()
temp = self.head.copy()
for i in reversed(range(end + 1, len(self.head))):
self.head[i] = self.head[i - 1]
assert self.head[end + 1] == 0
if not self.is_head_last_bit_zero():
print()
print(temp)
print(self.head)
assert False
self.head[end] = 1
# if end != 0:
# assert self.head[end + 1] == 0
# find where in the run should the new element be inserted.
body_start_index = self.get_body_start_index_by_head_start_index(start)
body_index = body_start_index + length
for i in range(length):
if self.body[body_start_index + i] >= r:
body_index = body_start_index + i
break
# push
for i in reversed(range(body_index + 1, len(self.body))):
self.body[i] = self.body[i - 1]
self.body[body_index] = r
assert self.is_head_last_bit_zero()
#
# for i in range(length):
# if self.body[start + i] >= r:
# self.body.insert(start + i, r)
# return
# self.body.insert(start + length, r)
def lookup(self, q: int, r: str) -> bool:
start, end = find_lookup_interval(self.head, q)
length = end - start
# if length == 0:
# return False
body_start_index = self.get_body_start_index_by_head_start_index(start)
for i in range(length):
if self.body[body_start_index + i] == r:
assert self.is_head_last_bit_zero()
return True
assert self.is_head_last_bit_zero()
return False
def remove(self, q: int, r: str):
start, end = find_lookup_interval(self.head, q)
length = end - start
if length == 0:
print("Delete failed, since PD does not contain any element with given quotient")
return
# find where in the run is the element that is going to be deleted.
body_start_index = self.get_body_start_index_by_head_start_index(start)
body_index = -1
for i in range(length):
if self.body[body_start_index + i] == r:
body_index = body_start_index + i
break
else:
print("Delete failed, since PD does not contain any element with given remainder")
# for i in range(start, end):
# if self.body[i] == r:
# body_index = i
# break
# update body
for i in range(body_index, len(self.body) - 1):
self.body[i] = self.body[i + 1]
# Todo? did not clear self.body[-1].
assert self.is_head_last_bit_zero()
assert end > 0
assert self.head[end] == 0
# update head
for i in range(end, len(self.head)):
self.head[i - 1] = self.head[i]
assert self.is_head_last_bit_zero()
self.capacity -= 1
return
# for i in range(length):
# if self.body[start + i] == r:
# del self.body[start + i]
# break
# else:
# print("Delete failed, since PD does not contain any element with given remainder")
# del self.body[start + length]
#
# assert self.head[start + length]
# del self.head[start + length]
def get_body_start_index_by_head_start_index(self, start: int) -> int:
return self.head[:start].count(1)
def __repr__(self) -> str:
return super().__repr__()
def __str__(self):
return str(self.head) + "\n" + str(self.body)
# print(self.head)
# print(self.body)
def check_zeros_in_head(self):
return self.head.count(0) == self.max_capacity
def is_head_last_bit_zero(self) -> bool:
return self.head[-1] == 0
def get_head_as_runs(self) -> list:
s = "".join([str(i) for i in self.head])
k = s.split("0")
return [i + "0" for i in k]
def valid_interval_result(l: list, q: int) -> Tuple[int, int]:
# return two tuple of two ints. the start of the run, and it end which is zero.
assert q >= 0
if q == 0:
if l[0] == 0:
return 0, 0
else:
return 0, l.index(0)
indexes = [i for i in range(len(l)) if l[i] == 0]
assert len(indexes) > q
return indexes[q - 1] + 1, indexes[q]
def validate_interval(l: list, start: int, end: int) -> bool:
res = True
if start != 0:
c = (l[start - 1] != 0)
if c:
print("start predeccesor is not zero")
res = False
if l[end] != 0:
print("end is not zero")
res = False
if end < start:
print("end is smaller then start")
res = False
return res
def find_lookup_interval(head: list, q: int) -> Tuple[int, int]:
zero_counter = 1 # zero appear in the end of a run, not on the run's start.
index = 0
while zero_counter <= q:
if head[index] == 0:
zero_counter += 1
index += 1
start = index
one_counter = 0
# for i in range(index)
while index < len(head) and head[index]:
one_counter += 1
index += 1
sanity_check = head[start:].index(0)
assert sanity_check == one_counter
assert validate_interval(head, start, index)
if valid_interval_result(head, q) != (start, index):
print("got", (start, index), " instead of", valid_interval_result(head, q))
assert False
return start, index
# return start, one_counter
def lsm(x):
return (x ^ (x & (x-1)))
for i in range(32):
print(i, lsm(i))
def split_by_runs(l: list):
s = "".join([str(i) for i in l])
k = s.split("0")
temp = [i + "0" for i in k]
temp
return temp
1 << 7
split_by_runs(h2)
bi(170)
int('01010101', 2)
def bi(n): return bin(n)[2:]
def ci(n, k): return to_bin_with_leading_zeros(n, k)
def to_bin_with_leading_zeros(n: int, length: int) -> str:
s = bin(n)[2:]
diff = length - len(s)
if diff > 0:
return "0" * diff + s
elif diff == 0:
return s
else:
assert False
# def ci(n:int,length)->str
bi(216)
bi(170)
bin(85)
170
~170
"0111"
7+7*16
bi(119)
s2 = '01110111'*4
h2 = [int(i) for i in s2]
h2 =
bin(-171 - 1)
h = [int(i) for i in bi(216)]
h
head = [1, 0] * 4 + [0, 0, 0]
head
for i in range(4):
print(i, find_lookup_interval(head, i))
for i in range(4):
print(i, find_lookup_interval(h, i))
for i in range(8):
print(i, find_lookup_interval(h2, i))
0 (0, 0)
1 (1, 4)
2 (5, 8)
3 (9, 9)
4 (13, 13)
5 (17, 17)
6 (21, 21)
7 (25, 25)
find_lookup_interval(head, 0)
find_lookup_interval(head, 1)
find_lookup_interval(head, 2)
find_lookup_interval(head, 3)
find_lookup_interval(head, 4)
find_lookup_interval(head, 5)
find_lookup_interval(head, 6)
def dealing_with_last_index(l: list, index: int) -> int:
pass
def naive_dealing_with_last_index(n: int, index: int, block_size: int = 8) -> int:
left_shift = block_size - index - 1
upper = (n >> left_shift) << (left_shift)
lower = (n >> 1) & ((1 << left_shift) - 1)
# print(upper, bi(upper)[:left_shift + 1])
# print(lower,bi(lower))
return upper | lower | (1<<left_shift)
def f2(n: int, index: int, block_size: int = 8) -> int:
left_shift = block_size - index
upper = (n >> left_shift) << (left_shift)
lower = (n >> 1) & ((1 << left_shift) - 1)
# print(upper, bi(upper)[:left_shift ])
# print(lower,bi(lower))
return upper | lower | (1<<(left_shift - 1))
for i in range(256):
for k in range(8):
if naive_dealing_with_last_index(i,k) != f2(i,k):
print(i,k)
assert False
1
s3 = "10001001"
n = int(s3,2)
n
ci(137,8)
'1000 1001'
bin(137)
bin(148)
naive_dealing_with_last_index(137,3)
naive_dealing_with_last_index(137,3)
ci(164,8)
n = 170
ci(n,8)
naive_dealing_with_last_index(n,3)
naive_dealing_with_last_index(n,3)
ci(181,8)
ci(189,8)
naive_dealing_with_last_index(n,5)
ci(173,8)
# indexes = [i for i in range(len(l)) if l[i] == 0]
# indexes
# split_by_runs(l)
# l = [0, 1, 1, 0, 0, 1, 0]
# # ls = [str(i) for i in l]
# s = "".join(ls)
# s
# check_interval(l, 0)
# check_interval(l, 1)
# check_interval(l, 2)
# check_interval(l, 3)
# check_interval(l, 4)
#
# for i in range(l.count(0)):
# res = check_interval(l, i)
# length = res[1] - res[0]
# print(i, length, res)
#
# if q == 1:
# temp = l.index(0)
# return temp, l[temp + 1:].index(0)
#
# start, length = 0, 0
# zero_counter = 1
# index = 0
# for i in range(len(l)):
# if l[i] == 0:
# zero_counter += 1
# while index < len(l) and zero_counter < q:
# if l[index] == 0:
# zero_counter += 1
# index += 1
# start =
#
# l = [1, 0, 1, 0, 1, 1, 0]
# l.index
# k = list(range(8)) + list(range(8))
# del k[12]
# k.index(4)
# k[12]
| 9,871 | 3,787 |
#!/usr/bin/env python3
import sys, json
from collections import Counter
data_providers = []
with open(sys.argv[1]) as f:
rec = json.load(f)
for record in rec:
data_providers.append(record['dataProvider'])
counts = Counter(data_providers)
for item in list(counts):
print(item, ': ', counts[item])
| 322 | 112 |
from datetime import date, datetime
from pytz import UTC, timezone
from ..utils import es_format_datetime
ET = timezone('US/Eastern')
def test_es_format_datetime():
def _assert_returns(date_or_datetime, expected):
actual = es_format_datetime(date_or_datetime)
assert actual == expected, f"Expected {expected}, got {actual}"
for date_or_datetime, expected in [
("04/28/21", "04/28/21"),
(date(2021, 4, 28), "2021-04-28"),
(datetime(2021, 4, 28), "2021-04-28T00:00:00"),
(datetime(2021, 4, 28, 11, 47, 22), "2021-04-28T11:47:22"),
(datetime(2021, 4, 28, 11, 47, 22, 3), "2021-04-28T11:47:22.000003"),
(datetime(2021, 4, 28, 11, 47, 22, 300000), "2021-04-28T11:47:22.300000"),
(datetime(2021, 4, 28, 11, tzinfo=UTC), "2021-04-28T11:00:00+00:00"),
(ET.localize(datetime(2021, 4, 28, 11)), "2021-04-28T11:00:00-04:00"),
# 2021-04-28T11:00:00.000001-04:00 isn't supported in ES, so convert to server time
(ET.localize(datetime(2021, 4, 28, 11, microsecond=1)), "2021-04-28T15:00:00.000001"),
]:
yield _assert_returns, date_or_datetime, expected
| 1,201 | 612 |
"""
Build a character dictionary
"""
import numpy
import cPickle as pkl
import sys
import fileinput
from collections import OrderedDict
def main(filenames):
char_freqs = OrderedDict()
for filename in filenames:
print '==> Processing', filename
with open(filename, 'r') as f:
lines = f.readlines()
for line in lines:
for c in line.strip():
if c not in char_freqs:
char_freqs[c] = 0
char_freqs[c] += 1
chars = char_freqs.keys()
freqs = char_freqs.values()
sorted_idx = numpy.argsort(freqs)
sorted_chars = [chars[ii] for ii in sorted_idx[::-1]]
chardict = OrderedDict()
chardict['eos'] = 0
chardict['UNK'] = 1
for ii, cc in enumerate(sorted_chars):
chardict[cc] = ii+2
with open('./char_dict.pkl', 'wb') as f:
pkl.dump(chardict, f)
print 'Done'
if __name__ == '__main__':
filenames = sys.argv[1:]
assert len(filenames) > 0, "please specify at least one filename."
main(filenames)
| 1,072 | 380 |
# This tool takes the raw usage data and splits it into session files
import gzip
import json
import base64
import sys
import traceback
import time
import random
import os
import sys
VERBOSE = True
err = 0
MAX_MEM = 1 * 1000 * 1000 * 1000
def log(s):
if VERBOSE:
print time.strftime("%Y-%m-%d %H:%M:%S"), s
def compute_derived_path(s):
return s[:-3] + ".sorted.gz"
if len(sys.argv) != 2:
print "Usage: python sort_sessions.py path_to_sessions"
print "Sort sessions"
exit(1)
path = sys.argv[1]
paths_to_test = set()
sorted_paths = set()
linesCount = 0
paths_count = 0
for root, subdirs, files in os.walk(path):
for ff in files:
paths_count += 1
if paths_count % 1000 == 0:
log ("Enumerating paths: " + str(paths_count))
path = os.path.join(root, ff)
if path.endswith('.sorted.gz'):
sorted_paths.add(path)
continue
if not path.endswith('.gz'):
continue
paths_to_test.add(path)
log ("All paths: " + str(len(paths_to_test)))
already_sorted = 0
paths_to_sort = set()
for test_path in paths_to_test:
if compute_derived_path(test_path) in sorted_paths:
already_sorted += 1
continue
paths_to_sort.add(test_path)
log ("Sorting: " + str(len(paths_to_sort)) + ", already sorted: " + str(already_sorted))
paths_to_sort_list = list(paths_to_sort)
random.shuffle(paths_to_sort_list)
i = 0
skipped = 0
for path in paths_to_sort_list:
i += 1
out_path = compute_derived_path(path)
# skip files that have been processed already between start of the script and now
if os.path.exists(out_path):
skipped += 1
log ("Sorted: " + str(i) + ", skipped: " + str(skipped))
continue
try:
log(str(float((100*i)) / len(paths_to_sort)) + "%: " + path)
f = gzip.open(path)
data_set = set()
data = []
byte_counter = 0
skip_file = False
for ln in f:
data_set.add(ln)
byte_counter += sys.getsizeof(ln)
if byte_counter > MAX_MEM:
skip_file = True
skipped +=1
log ("Skipped large file: " + path)
log ("Sorted: " + str(i) + ", skipped: " + str(skipped))
break
if skip_file:
continue
for ln in data_set:
data.append(json.loads(ln))
data.sort(key=lambda x: int(x["MicroTime"]))
sortedF = gzip.open(out_path, 'w')
for d in data:
sortedF.write(json.dumps(d) + "\n")
sortedF.flush()
sortedF.close()
except Exception as e:
err += 1
if os.path.exists(out_path):
os.remove(out_path)
log ("Failed: " + str(err))
log (str(e))
| 2,815 | 983 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Unit tests for :py:mod:`sfftk.core.notes` package"""
from __future__ import division, print_function
import os
import shlex
import shutil
import sys
import unittest
from urllib import urlencode
import __init__ as tests
from .. import BASE_DIR
from .. import schema
from ..core import utils
from ..core.parser import parse_args
from ..notes import find, modify, view, RESOURCE_LIST
__author__ = "Paul K. Korir, PhD"
__email__ = "pkorir@ebi.ac.uk, paul.korir@gmail.com"
__date__ = "2017-05-15"
# :TODO: rewrite to use sfftk.notes.modify.SimpleNote
class TestNotesModifyExternalReference(unittest.TestCase):
def test_ols(self):
"""Test that sfftk.notes.modify.ExternalReference object works correctly"""
type_ = u'ncit'
otherType = u'http://purl.obolibrary.org/obo/NCIT_C62195'
value = u'NCIT_C62195'
# likely to change
label = u'Wild Type'
description = u'The naturally-occurring, normal, non-mutated version of a gene or genome.'
urlenc = urlencode({u'iri': otherType.encode(u'idna')})
urlenc2 = urlencode({u'iri': urlenc.split(u'=')[1]})
urlenc3 = urlenc2.split(u'=')[1].decode(u'utf-8')
extRef = modify.ExternalReference(
type_=type_,
otherType=otherType,
value=value,
)
self.assertEqual(extRef.type, type_)
self.assertEqual(extRef.otherType, otherType)
self.assertEqual(extRef.value, value)
self.assertEqual(extRef.label, label)
self.assertEqual(extRef.description,
description)
self.assertItemsEqual(extRef._get_text(), [label, description])
self.assertEqual(extRef.iri, urlenc3)
def test_emdb(self):
"""Test that sfftk.notes.modify.ExternalReference object works correctly"""
type_ = u'EMDB'
otherType = u'https://www.ebi.ac.uk/pdbe/emdb/EMD-8654'
value = u'EMD-8654'
# likely to change
label = u'EMD-8654'
description = u'Zika virus-infected Vero E6 cell at 48 hpi: dual-axis tilt series tomogram from 3 serial sections'
extRef = modify.ExternalReference(
type_=type_,
otherType=otherType,
value=value,
)
self.assertEqual(extRef.type, type_)
self.assertEqual(extRef.otherType, otherType)
self.assertEqual(extRef.value, value)
self.assertEqual(extRef.label, label)
self.assertEqual(extRef.description,
description)
self.assertItemsEqual(extRef._get_text(), [label, description])
def test_pdb(self):
"""Test that sfftk.notes.modify.ExternalReference object works correctly"""
type_ = u'PDB'
otherType = u'https://www.ebi.ac.uk/pdbe/entry/pdb/4gzw'
value = u'4gzw'
# likely to change
label = u'N2 neuraminidase D151G mutant of A/Tanzania/205/2010 H3N2 in complex with avian sialic acid receptor'
description = u'H3N2 subtype'
extRef = modify.ExternalReference(
type_=type_,
otherType=otherType,
value=value,
)
self.assertEqual(extRef.type, type_)
self.assertEqual(extRef.otherType, otherType)
self.assertEqual(extRef.value, value)
self.assertEqual(extRef.label, label)
self.assertEqual(extRef.description,
description)
self.assertItemsEqual(extRef._get_text(), [label, description])
def test_uniprot(self):
"""Test that sfftk.notes.modify.ExternalReference object works correctly"""
type_ = u'UniProt'
otherType = u'https://www.uniprot.org/uniprot/A0A1Q8WSX6'
value = u'A0A1Q8WSX6'
# likely to change
label = u'A0A1Q8WSX6_9ACTO'
description = u'Type I-E CRISPR-associated protein Cas5/CasD (Organism: Actinomyces oris)'
extRef = modify.ExternalReference(
type_=type_,
otherType=otherType,
value=value,
)
self.assertEqual(extRef.type, type_)
self.assertEqual(extRef.otherType, otherType)
self.assertEqual(extRef.value, value)
self.assertEqual(extRef.label, label)
self.assertEqual(extRef.description,
description)
self.assertItemsEqual(extRef._get_text(), [label, description])
class TestNotesFindSearchResource(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.config_fn = os.path.join(BASE_DIR, 'sff.conf')
def test_unknown_resource(self):
"""Test exception raised formed unknown resource"""
with self.assertRaises(SystemExit):
args, config = parse_args(shlex.split(
'notes search --resource xxx "something" --config-path {}'.format(self.config_fn)
))
def test_configs_attribute(self):
"""Test the value of the configs attribute"""
args, configs = parse_args(shlex.split(
'notes search --resource ols "mitochondria" --config-path {}'.format(self.config_fn)
))
resource = find.SearchResource(args, configs)
self.assertEqual(resource.configs, configs)
def test_result_path(self):
"""Test result path attr"""
args, configs = parse_args(shlex.split(
"notes search -R ols 'mitochondria' --config-path {}".format(self.config_fn)
))
resource = find.SearchResource(args, configs)
self.assertEqual(resource.result_path, RESOURCE_LIST['ols']['result_path'])
def test_result_count(self):
"""Test result_count attr"""
args, configs = parse_args(shlex.split(
"notes search -R ols 'mitochondria' --config-path {}".format(self.config_fn)
))
resource = find.SearchResource(args, configs)
self.assertEqual(resource.result_count, RESOURCE_LIST['ols']['result_count'])
def test_format(self):
"""Test format attr"""
args, configs = parse_args(shlex.split(
"notes search -R ols 'mitochondria' --config-path {}".format(self.config_fn)
))
resource = find.SearchResource(args, configs)
self.assertEqual(resource.format, RESOURCE_LIST['ols']['format'])
def test_response(self):
"""Test response attr"""
args, configs = parse_args(shlex.split(
"notes search -R ols 'mitochondria' --config-path {}".format(self.config_fn)
))
resource = find.SearchResource(args, configs)
self.assertIsNone(resource.response)
resource.search()
url = resource.get_url()
print('url: ' + url, file=sys.stderr)
import requests
import json
R = requests.get(url)
resource_results = utils.get_path(json.loads(resource.response), resource.result_path)
test_results = utils.get_path(json.loads(R.text), resource.result_path)
self.assertItemsEqual(resource_results, test_results)
def test_get_url_ols_list_ontologies(self):
"""Test url correctness for OLS"""
resource_name = 'ols'
args, configs = parse_args(shlex.split(
"notes search -R {resource_name} 'mitochondria' -L --config-path {config_fn}".format(
resource_name=resource_name,
config_fn=self.config_fn,
),
))
resource = find.SearchResource(args, configs)
url = "{root_url}ontologies?size=1000".format(
root_url=RESOURCE_LIST[resource_name]['root_url'],
)
self.assertEqual(resource.get_url(), url)
def test_get_url_ols(self):
"""Test url correctness for OLS"""
resource_name = 'ols'
args, configs = parse_args(shlex.split(
"notes search -R {resource_name} 'mitochondria' -O go -x -o --config-path {config_fn}".format(
resource_name=resource_name,
config_fn=self.config_fn,
),
))
resource = find.SearchResource(args, configs)
url = "{root_url}search?q={search_term}&start={start}&rows={rows}&ontology={ontology}&exact=on&obsoletes=on".format(
root_url=RESOURCE_LIST[resource_name]['root_url'],
search_term=args.search_term,
start=args.start - 1,
rows=args.rows,
ontology=args.ontology,
)
self.assertEqual(resource.get_url(), url)
def test_get_url_emdb(self):
"""Test url correctness for EMDB"""
resource_name = 'emdb'
args, configs = parse_args(shlex.split(
"notes search -R {resource_name} 'mitochondria' --config-path {config_fn}".format(
resource_name=resource_name,
config_fn=self.config_fn,
),
))
resource = find.SearchResource(args, configs)
url = "{root_url}?q={search_term}&start={start}&rows={rows}".format(
root_url=RESOURCE_LIST[resource_name]['root_url'],
search_term=args.search_term,
start=args.start,
rows=args.rows,
)
self.assertEqual(resource.get_url(), url)
def test_get_url_uniprot(self):
"""Test url correctness for UniProt"""
resource_name = 'uniprot'
args, configs = parse_args(shlex.split(
"notes search -R {resource_name} 'mitochondria' --config-path {config_fn}".format(
resource_name=resource_name,
config_fn=self.config_fn,
),
))
resource = find.SearchResource(args, configs)
url = "{root_url}?query={search_term}&format=tab&offset={start}&limit={rows}&columns=id,entry_name,protein_names,organism".format(
root_url=RESOURCE_LIST[resource_name]['root_url'],
search_term=args.search_term,
start=args.start,
rows=args.rows,
)
self.assertEqual(resource.get_url(), url)
def test_get_url_pdb(self):
"""Test url correctness for PDB"""
resource_name = 'pdb'
args, configs = parse_args(shlex.split(
"notes search -R {resource_name} 'mitochondria' --config-path {config_fn}".format(
resource_name=resource_name,
config_fn=self.config_fn,
),
))
resource = find.SearchResource(args, configs)
url = "{root_url}?q={search_term}&wt=json&fl=pdb_id,title,organism_scientific_name&start={start}&rows={rows}".format(
root_url=RESOURCE_LIST[resource_name]['root_url'],
search_term=args.search_term,
start=args.start,
rows=args.rows,
)
self.assertEqual(resource.get_url(), url)
# class TestNotesFindSearchResource(unittest.TestCase):
# @classmethod
# def setUpClass(cls):
# cls.config_fn = os.path.join(BASE_DIR, 'sff.conf')
#
# def test_search_args_attr(self):
# """Test that search_args attr works"""
# args, configs = parse_args(shlex.split(
# "notes search -R emdb mitochondria --config-path {}".format(self.config_fn)
# ))
# resource = find.SearchResource(args, configs)
# self.assertEqual(resource.search_args, args)
class TestNotesFindTableField(unittest.TestCase):
def test_init_name(self):
"""Test instantiation of TableField object"""
with self.assertRaisesRegexp(ValueError,
"key and text are mutually exclusive; only define one or none of them"):
find.TableField('my-field', key='k', text='t')
def test_init_width_type(self):
"""Test check on width type"""
with self.assertRaisesRegexp(ValueError, "field width must be int or long"):
find.TableField('my-field', width=1.3)
def test_init_width_value(self):
"""Test check on width value"""
with self.assertRaisesRegexp(ValueError, "field width must be greater than 0"):
find.TableField('my-field', width=0)
def test_init_pc_type(self):
"""Test pc type"""
with self.assertRaises(ValueError):
find.TableField('my-field', pc='1.3')
with self.assertRaises(ValueError):
find.TableField('my-field', pc=u'1.3')
with self.assertRaises(ValueError):
find.TableField('my-field', pc=list())
with self.assertRaises(ValueError):
find.TableField('my-field', pc=dict())
with self.assertRaises(ValueError):
find.TableField('my-field', pc=tuple())
def test_init_pc_value(self):
"""Test pc value"""
with self.assertRaises(ValueError):
find.TableField('my-field', pc=-1)
with self.assertRaises(ValueError):
find.TableField('my-field', pc=100)
self.assertIsInstance(find.TableField('my-field', pc=50), find.TableField)
def test_init_justify(self):
"""Test value for justify"""
self.assertIsInstance(find.TableField('my-field', text='t', justify='left'), find.TableField)
self.assertIsInstance(find.TableField('my-field', text='t', justify='right'), find.TableField)
self.assertIsInstance(find.TableField('my-field', text='t', justify='center'), find.TableField)
class TestNotes_view(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.config_fn = os.path.join(BASE_DIR, 'sff.conf')
def setUp(self):
self.segment_id = 15559
self.sff_file = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'emd_1014.sff')
def test_list_default(self):
"""Test that we can view the list of segmentations with annotations"""
args, configs = parse_args(shlex.split("notes list {} --config-path {}".format(
self.sff_file,
self.config_fn,
)))
status = view.list_notes(args, configs)
# assertions
self.assertEqual(status, 0)
def test_long_list(self):
"""Test that we can long list (-l) the list of segmentations with annotations"""
args, configs = parse_args(shlex.split("notes list -l {} --config-path {}".format(
self.sff_file,
self.config_fn,
)))
status = view.list_notes(args, configs)
# assertions
self.assertEqual(status, 0)
def test_show_default(self):
"""Test that we can show annotations in a single segment"""
args, configs = parse_args(shlex.split("notes show -i {} {} --config-path {}".format(
self.segment_id,
self.sff_file,
self.config_fn,
)))
status = view.show_notes(args, configs)
self.assertEqual(status, 0)
def test_long_show(self):
"""Test that we can show in long format annotations in a single segment"""
args, configs = parse_args(shlex.split("notes show -l -i {} {} --config-path {}".format(
self.segment_id,
self.sff_file,
self.config_fn,
)))
status = view.show_notes(args, configs)
self.assertEqual(status, 0)
class TestNotes_modify(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.config_fn = os.path.join(BASE_DIR, 'sff.conf')
cls.sff_file = None
cls.output = None
cls.annotated_sff_file = None
# test filetypeA to filetypeB
def setUp(self):
self.segment_id = 15559
def _test_add(self):
"""Test that we can add a note"""
segment_name = 'the segment name'
desc = 'a short description'
num = tests._random_integer()
extref = ['lsfj', 'sljfs', 'ldjls']
complexes = ['09ej', 'euoisd', 'busdif']
macromolecules = ['xuidh', '29hf98e', 'ygce']
cmd = shlex.split(
"notes add -i {} -s '{}' -d '{}' -E {} -n {} -C {} -M {} {} --config-path {}".format(
self.segment_id,
segment_name,
desc,
" ".join(extref),
num,
','.join(complexes),
','.join(macromolecules),
self.sff_file,
self.config_fn,
)
)
args, configs = parse_args(cmd)
status = modify.add_note(args, configs)
seg = schema.SFFSegmentation(self.sff_file)
segment = seg.segments.get_by_id(self.segment_id)
self.assertEqual(status, 0)
self.assertEqual(segment.biologicalAnnotation.description, desc)
self.assertEqual(segment.biologicalAnnotation.numberOfInstances, num)
self.assertEqual(segment.biologicalAnnotation.externalReferences[0].type, extref[0])
self.assertEqual(segment.biologicalAnnotation.externalReferences[0].otherType, extref[1])
self.assertEqual(segment.biologicalAnnotation.externalReferences[0].value, extref[2])
self.assertEqual(segment.complexesAndMacromolecules.complexes[0], complexes[0])
self.assertEqual(segment.complexesAndMacromolecules.complexes[1], complexes[1])
self.assertEqual(segment.complexesAndMacromolecules.complexes[2], complexes[2])
self.assertEqual(segment.complexesAndMacromolecules.macromolecules[0], macromolecules[0])
self.assertEqual(segment.complexesAndMacromolecules.macromolecules[1], macromolecules[1])
self.assertEqual(segment.complexesAndMacromolecules.macromolecules[2], macromolecules[2])
def _test_edit(self):
"""Test that we can edit a note"""
segment_name = "the segments name"
desc = 'a short description'
num = tests._random_integer()
extref = ['lsfj', 'sljfs', 'ldjss']
complexes = ['09ej', 'euoisd', 'busdif']
macromolecules = ['xuidh', '29hf98e', 'ygce']
# add
cmd = shlex.split(
"notes add -i {} -s '{}' -D '{}' -E {} -n {} -C {} -M {} {} --config-path {}".format(
self.segment_id,
segment_name,
desc,
" ".join(extref),
num,
','.join(complexes),
','.join(macromolecules),
self.sff_file,
self.config_fn,
)
)
args, configs = parse_args(cmd)
modify.add_note(args, configs)
segment_name1 = segment_name[::-1]
desc1 = desc[::-1]
num1 = tests._random_integer()
extref1 = map(lambda e: e[::-1], extref)
cmd1 = shlex.split(
"notes edit -i {} -s '{}' -d '{}' -e 0 -E {} -n {} -c 1 -C {} -m 2 -M {} {} --config-path {}".format(
self.segment_id,
segment_name1,
desc1,
" ".join(extref1),
num1,
complexes[1][::-1],
macromolecules[2][::-1],
self.sff_file,
self.config_fn,
))
args1, configs = parse_args(cmd1)
# edit
status1 = modify.edit_note(args1, configs)
seg = schema.SFFSegmentation(self.sff_file)
segment = seg.segments.get_by_id(self.segment_id)
self.assertEqual(status1, 0)
self.assertEqual(segment.biologicalAnnotation.name, segment_name1)
self.assertEqual(segment.biologicalAnnotation.description, desc1)
self.assertEqual(segment.biologicalAnnotation.numberOfInstances, num1)
self.assertEqual(segment.biologicalAnnotation.externalReferences[0].type, extref1[0])
self.assertEqual(segment.biologicalAnnotation.externalReferences[0].otherType, extref1[1])
self.assertEqual(segment.biologicalAnnotation.externalReferences[0].value, extref1[2])
self.assertEqual(segment.complexesAndMacromolecules.complexes[1], complexes[1][::-1])
self.assertEqual(segment.complexesAndMacromolecules.macromolecules[2], macromolecules[2][::-1])
def _test_del(self):
"""Test that we can delete a note"""
segment_name = 'the segment name'
desc = 'a short description'
num = tests._random_integer()
extref = ['lsfj', 'sljfs', 'dsljfl']
complexes = ['09ej', 'euoisd', 'busdif']
macromolecules = ['xuidh', '29hf98e', 'ygce']
cmd = shlex.split(
"notes add -i {} -D '{}' -E {} -n {} -C {} -M {} {} --config-path {}".format(
self.segment_id,
desc,
" ".join(extref),
num,
','.join(complexes),
','.join(macromolecules),
self.sff_file,
self.config_fn,
)
)
args, configs = parse_args(cmd)
# add
modify.add_note(args, configs)
# delete
cmd1 = shlex.split("notes del -i {} -D -e 0 -n -c 0 -m 1 {} --config-path {}".format(
self.segment_id,
self.sff_file,
self.config_fn,
))
args1, configs = parse_args(cmd1)
status1 = modify.del_note(args1, configs)
seg = schema.SFFSegmentation(self.sff_file)
segment = seg.segments.get_by_id(self.segment_id)
self.assertEqual(status1, 0)
self.assertIsNone(segment.biologicalAnnotation.name)
self.assertIsNone(segment.biologicalAnnotation.description)
self.assertIsNone(segment.biologicalAnnotation.numberOfInstances)
self.assertEqual(len(segment.biologicalAnnotation.externalReferences), 0)
self.assertEqual(len(segment.complexesAndMacromolecules.complexes), 2)
self.assertEqual(len(segment.complexesAndMacromolecules.macromolecules), 2)
def _test_merge(self):
"""Test that we can merge notes"""
segment_name = 'my very nice segment'
desc = 'a short description'
num = tests._random_integer()
extref = ['lsfj', 'sljfs', 'ldjss']
complexes = ['09ej', 'euoisd', 'busdif']
macromolecules = ['xuidh', '29hf98e', 'ygce']
# add
cmd = shlex.split(
"notes add -i {} -s '{}' -d '{}' -E {} -n {} -C {} -M {} {} --config-path {}".format(
self.segment_id,
segment_name,
desc,
" ".join(extref),
num,
','.join(complexes),
','.join(macromolecules),
self.sff_file,
self.config_fn,
)
)
args, configs = parse_args(cmd)
status = modify.add_note(args, configs)
self.assertEqual(status, 0)
# merge
cmd1 = shlex.split(
'notes merge --source {source} {other} --output {output} --config-path {config_fn}'.format(
source=self.sff_file,
other=self.other,
output=self.output,
config_fn=self.config_fn,
)
)
args1, configs1 = parse_args(cmd1)
status1 = modify.merge(args1, configs1)
self.assertEqual(status1, 0)
source_seg = schema.SFFSegmentation(self.sff_file)
output_seg = schema.SFFSegmentation(self.output)
source_segment = source_seg.segments.get_by_id(self.segment_id)
# print('description: ' + source_segment.biologicalAnnotation.description, file=sys.stderr)
output_segment = output_seg.segments.get_by_id(self.segment_id)
self.assertEqual(source_segment.biologicalAnnotation.name, segment_name)
self.assertEqual(source_segment.biologicalAnnotation.description, desc)
self.assertEqual(source_segment.biologicalAnnotation.description,
output_segment.biologicalAnnotation.description)
self.assertEqual(source_segment.biologicalAnnotation.numberOfInstances, num)
self.assertEqual(source_segment.biologicalAnnotation.numberOfInstances,
output_segment.biologicalAnnotation.numberOfInstances)
self.assertEqual(source_segment.biologicalAnnotation.externalReferences[0].type, extref[0])
self.assertEqual(source_segment.biologicalAnnotation.externalReferences[0].otherType, extref[1])
self.assertEqual(source_segment.biologicalAnnotation.externalReferences[0].value, extref[2])
self.assertEqual(source_segment.biologicalAnnotation.externalReferences[0].type,
output_segment.biologicalAnnotation.externalReferences[0].type)
self.assertEqual(source_segment.complexesAndMacromolecules.complexes[0], complexes[0])
self.assertEqual(source_segment.complexesAndMacromolecules.complexes[1], complexes[1])
self.assertEqual(source_segment.complexesAndMacromolecules.complexes[2], complexes[2])
self.assertEqual(source_segment.complexesAndMacromolecules.macromolecules[0], macromolecules[0])
self.assertEqual(source_segment.complexesAndMacromolecules.macromolecules[1], macromolecules[1])
self.assertEqual(source_segment.complexesAndMacromolecules.macromolecules[2], macromolecules[2])
self.assertEqual(source_segment.complexesAndMacromolecules.complexes[0],
output_segment.complexesAndMacromolecules.complexes[0])
self.assertEqual(source_segment.complexesAndMacromolecules.complexes[1],
output_segment.complexesAndMacromolecules.complexes[1])
self.assertEqual(source_segment.complexesAndMacromolecules.complexes[2],
output_segment.complexesAndMacromolecules.complexes[2])
self.assertEqual(source_segment.complexesAndMacromolecules.macromolecules[0],
output_segment.complexesAndMacromolecules.macromolecules[0])
self.assertEqual(source_segment.complexesAndMacromolecules.macromolecules[1],
output_segment.complexesAndMacromolecules.macromolecules[1])
self.assertEqual(source_segment.complexesAndMacromolecules.macromolecules[2],
output_segment.complexesAndMacromolecules.macromolecules[2])
def _test_clear(self):
"""Test that we can clear notes"""
segment_name = 'my very nice segment'
desc = 'a short description'
num = tests._random_integer()
extref = ['lsfj', 'sljfs', 'ldjss']
complexes = ['09ej', 'euoisd', 'busdif']
macromolecules = ['xuidh', '29hf98e', 'ygce']
# add
cmd = shlex.split(
"notes add -i {} -s '{}' -D '{}' -E {} -n {} -C {} -M {} {} --config-path {}".format(
self.segment_id,
segment_name,
desc,
" ".join(extref),
num,
','.join(complexes),
','.join(macromolecules),
self.sff_file,
self.config_fn,
)
)
args, configs = parse_args(cmd)
status = modify.add_note(args, configs)
self.assertEqual(status, 0)
# clear
cmd1 = shlex.split(
'notes clear --all {} --config-path {config_fn}'.format(
self.sff_file,
config_fn=self.config_fn,
)
)
args1, configs1 = parse_args(cmd1)
status1 = modify.clear_notes(args1, configs1)
self.assertEqual(status1, 0)
seg = schema.SFFSegmentation(self.sff_file)
segment = seg.segments.get_by_id(self.segment_id)
self.assertEqual(len(segment.biologicalAnnotation.externalReferences), 0)
def _test_copy(self):
"""Test that we can copy notes"""
# we have an annotated EMDB-SFF file
# make a copy of the file for the test
annotated_sff_file = os.path.join(os.path.dirname(self.annotated_sff_file),
'temp_' + os.path.basename(self.annotated_sff_file))
shutil.copy2(self.annotated_sff_file, annotated_sff_file)
# use the file copy
# before copy
seg = schema.SFFSegmentation(annotated_sff_file)
source_segment = seg.segments.get_by_id(15559)
# copy
cmd = "notes copy -i 15559 -t 15578 {} --config-path {}".format(
annotated_sff_file,
self.config_fn
)
status1 = modify.copy_notes(*parse_args(cmd, use_shlex=True))
# debug
cmd2 = "notes list {} --config-path {}".format(annotated_sff_file, self.config_fn)
view.list_notes(*parse_args(cmd2, use_shlex=True))
self.assertEqual(status1, 0)
copied_seg = schema.SFFSegmentation(annotated_sff_file)
copied_segment = copied_seg.segments.get_by_id(15578)
self.assertEqual(len(source_segment.biologicalAnnotation.externalReferences),
len(copied_segment.biologicalAnnotation.externalReferences))
self.assertEqual(source_segment.biologicalAnnotation.externalReferences[0].type,
copied_segment.biologicalAnnotation.externalReferences[0].type)
self.assertEqual(source_segment.biologicalAnnotation.externalReferences[0].otherType,
copied_segment.biologicalAnnotation.externalReferences[0].otherType)
self.assertEqual(source_segment.biologicalAnnotation.externalReferences[0].value,
copied_segment.biologicalAnnotation.externalReferences[0].value)
# # get rid of the copy
os.remove(annotated_sff_file)
class TestNotes_modify_sff(TestNotes_modify):
def setUp(self):
super(TestNotes_modify_sff, self).setUp()
self.sff_file = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'emd_1014.sff')
self.other = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'other_emd_1014.sff')
self.output = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'output_emd_1181.sff')
self.annotated_sff_file = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'annotated_emd_1014.sff')
def tearDown(self):
seg = schema.SFFSegmentation(self.sff_file)
# remove all annotations
for segment in seg.segments:
segment.biologicalAnnotation = schema.SFFBiologicalAnnotation()
segment.complexesAndMacromolecules = schema.SFFComplexesAndMacromolecules()
seg.export(self.sff_file)
def test_add(self):
super(TestNotes_modify_sff, self)._test_add()
def test_edit(self):
super(TestNotes_modify_sff, self)._test_edit()
def test_del(self):
super(TestNotes_modify_sff, self)._test_del()
def test_merge(self):
super(TestNotes_modify_sff, self)._test_merge()
def test_clear(self):
super(TestNotes_modify_sff, self)._test_clear()
def test_copy(self):
super(TestNotes_modify_sff, self)._test_copy()
# fixme: hff tests work but quadruple size of file
class TestNotes_modify_hff(TestNotes_modify):
def setUp(self):
super(TestNotes_modify_hff, self).setUp()
self.sff_file = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'emd_1014.hff')
self.other = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'other_emd_1014.hff')
self.output = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'output_emd_1014.hff')
self.annotated_sff_file = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'annotated_emd_1014.hff')
def tearDown(self):
seg = schema.SFFSegmentation(self.sff_file)
# remove all annotations
for segment in seg.segments:
segment.biologicalAnnotation = schema.SFFBiologicalAnnotation()
segment.complexesAndMacromolecules = schema.SFFComplexesAndMacromolecules()
seg.export(self.sff_file)
def test_add(self):
super(TestNotes_modify_hff, self)._test_add()
def test_edit(self):
super(TestNotes_modify_hff, self)._test_edit()
def test_del(self):
super(TestNotes_modify_hff, self)._test_del()
def test_clear(self):
super(TestNotes_modify_hff, self)._test_clear()
# fixme: can't figure out why this fails
# def test_copy(self):
# super(TestNotes_modify_hff, self)._test_copy()
class TestNotes_modify_json(TestNotes_modify):
def setUp(self):
super(TestNotes_modify_json, self).setUp()
self.sff_file = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'emd_1014.json')
self.other = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'other_emd_1014.json')
self.output = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'output_emd_1181.json')
self.annotated_sff_file = os.path.join(tests.TEST_DATA_PATH, 'sff', 'v0.7', 'annotated_emd_1014.json')
def tearDown(self):
seg = schema.SFFSegmentation(self.sff_file)
# remove all annotations
for segment in seg.segments:
segment.biologicalAnnotation = schema.SFFBiologicalAnnotation()
segment.complexesAndMacromolecules = schema.SFFComplexesAndMacromolecules()
seg.export(self.sff_file)
def test_add(self):
super(TestNotes_modify_json, self)._test_add()
def test_edit(self):
super(TestNotes_modify_json, self)._test_edit()
def test_del(self):
super(TestNotes_modify_json, self)._test_del()
def test_merge(self):
super(TestNotes_modify_json, self)._test_merge()
def test_clear(self):
super(TestNotes_modify_json, self)._test_clear()
def test_copy(self):
super(TestNotes_modify_json, self)._test_copy()
class TestNotes_find(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.config_fn = os.path.join(BASE_DIR, 'sff.conf')
def test_search_default(self):
"""Test default search parameters"""
args, configs = parse_args(shlex.split("notes search 'mitochondria' --config-path {}".format(self.config_fn)))
resource = find.SearchResource(args, configs)
try:
results = resource.search()
self.assertGreater(len(results), 0)
except ValueError as v:
print(str(v), file=sys.stderr)
self.assertTrue(False)
def test_search_no_results(self):
"""Test search that returns no results"""
# I'm not sure when some biological entity with such a name will be discovered!
args, configs = parse_args(
shlex.split("notes search 'nothing' --exact --config-path {}".format(self.config_fn)))
resource = find.SearchResource(args, configs)
try:
results = resource.search()
self.assertEqual(len(results), 0)
except ValueError as v:
print(str(v), file=sys.stderr)
self.assertTrue(False)
def test_search_exact_result(self):
"""Test that we get an exact result
NOTE: this test is likely to break as the ontologies get updated
"""
# this usually returns a single result
args, configs = parse_args(shlex.split(
"notes search 'DNA replication licensing factor MCM6' --exact --config-path {}".format(self.config_fn)))
resource = find.SearchResource(args, configs)
results = resource.search()
self.assertEqual(len(results), 2) # funny!
def test_search_ontology(self):
"""Test that we can search an ontology"""
# this search should bring at least one result
args, configs = parse_args(
shlex.split("notes search 'mitochondria' --exact -O omit --config-path {}".format(self.config_fn)))
resource = find.SearchResource(args, configs)
try:
results = resource.search()
self.assertGreaterEqual(len(results), 1)
except ValueError as v:
print(str(v), file=sys.stderr)
self.assertTrue(False)
def test_search_from_start(self):
"""Test that we can search from the starting index"""
# this search usually has close to 1000 results; 100 is a reasonable start
random_start = tests._random_integer(1, 970)
args, configs = parse_args(shlex.split("notes search 'mitochondria' --start {} --config-path {}".format(
random_start,
self.config_fn,
)))
resource = find.SearchResource(args, configs)
try:
results = resource.search()
self.assertGreaterEqual(results.structured_response['response']['start'], random_start - 1)
except ValueError as v:
print(str(v), file=sys.stderr)
self.assertTrue(False)
def test_search_result_rows(self):
"""Test that we get as many result rows as specified"""
# this search usually has close to 1000 results; 100 is a reasonable start
random_rows = tests._random_integer(10, 100)
args, configs = parse_args(shlex.split("notes search 'mitochondria' --rows {} --config-path {}".format(
random_rows,
self.config_fn,
)))
resource = find.SearchResource(args, configs)
try:
results = resource.search()
self.assertGreaterEqual(len(results), random_rows)
except ValueError as v:
print(str(v), file=sys.stderr)
self.assertTrue(False)
if __name__ == "__main__":
unittest.main()
| 37,047 | 11,646 |
import Merge_Sorting
read_filename = "/home/vampy/data/test1"
write_filename = "/home/vampy/data/test2"
fp = open(read_filename, "r")
N = int(fp.readline())
names = []
for i in range(N):
names.append(fp.readline().strip())
fp.close()
Merge_Sorting.mergesort(names)
fp = open(write_filename, "w")
fp.write("{0}\n".format(N))
for i in range(N):
fp.write("{0}".format(names[i]))
fp.close()
| 398 | 171 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
# Create a database in project and get it's path.
SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(basedir, "test.db")
| 252 | 102 |
"""A parser for YAML queries used by the Augmentor class."""
import yamale
schema = yamale.make_schema(content="""
input:
images:
loader: str()
options: map()
annotations:
loader: str()
options: map()
output:
images:
writer: str()
options: map()
annotations:
writer: str()
options: map()
augmentations: list(include('augmentation'))
save-original: bool()
save-bbox: bool()
---
augmentation:
name: str()
options: map(required=False)
""")
def load_query(filep):
"""Load a YAML query from the open file fp.
An exception will be raised if the query is invalid.
"""
content = filep.read()
query = yamale.make_data(content=content)
yamale.validate(schema, query)
return query[0][0]
| 755 | 254 |
"""empty message
Revision ID: efd88a3c30a8
Revises: edf37629d0f4
Create Date: 2020-08-23 12:34:41.656176
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'efd88a3c30a8'
down_revision = 'edf37629d0f4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('polar_users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('profile_id', sa.Integer(), nullable=False),
sa.Column('member_id', sa.String(length=24), nullable=False),
sa.Column('polar_user_id', sa.Integer(), nullable=False),
sa.Column('state', sa.String(length=16), nullable=True),
sa.Column('access_token', sa.String(length=64), nullable=True),
sa.Column('access_token_expires', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['profile_id'], ['user_profiles.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('member_id')
)
op.create_index(op.f('ix_polar_users_profile_id'), 'polar_users', ['profile_id'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_polar_users_profile_id'), table_name='polar_users')
op.drop_table('polar_users')
# ### end Alembic commands ###
| 1,423 | 532 |
from .loss_computer import NCESoftmaxLoss
import torch.nn.functional as F
import torch.nn as nn
import torch
import logging
import pdb
logger = logging.getLogger()
class ContrastiveLoss(nn.Module):
def __init__(self, loss_computer: str, temperature: float, args) -> None:
super().__init__()
self.device = args['device']
if loss_computer == 'nce_softmax':
self.loss_computer = NCESoftmaxLoss(self.device)
else:
raise NotImplementedError(f"Loss Computer {loss_computer} not Support!")
self.temperature = temperature
def forward(self, z_i, z_j):
# SimCSE
batch_size = z_i.size(0)
emb = F.normalize(torch.cat([z_i, z_j]))
similarity = torch.matmul(emb, emb.t()) - torch.eye(batch_size*2).to(self.device) * 1e12
similarity = similarity * 20
loss = self.loss_computer(similarity)
return loss
class FlatNCE(nn.Module):
def __init__(self, temperature):
self.temperature = temperature
super().__init__()
def forward(self, z_i, z_j):
batch_size = z_i.size(0)
features = torch.cat([z_i, z_j], dim=0)
labels = torch.cat([torch.arange(batch_size) for i in range(2)], dim=0)
labels = (labels.unsqueeze(0) == labels.unsqueeze(1)).float()
features = F.normalize(features, dim=1)
similarity_matrix = torch.matmul(features, features.T)
mask = torch.eye(labels.shape[0], dtype=torch.bool)
labels = labels[~mask].view(labels.shape[0], -1)
similarity_matrix = similarity_matrix[~mask].view(similarity_matrix.shape[0], -1)
positives = similarity_matrix[labels.bool()].view(labels.shape[0], -1)
negatives = similarity_matrix[~labels.bool()].view(labels.shape[0], -1)
# logits = torch.cat([positives, negatives], dim=1)
labels = torch.zeros(positives.shape[0], dtype=torch.long)
logits = (negatives - positives)/self.temperature
clogits = torch.logsumexp(logits, dim=1, keepdim=True)
loss = torch.exp(clogits - clogits.detach())
# _, features = self.model(images)
# logits, labels = self.flat_loss(features)
# v = torch.logsumexp(logits, dim=1, keepdim=True) #(512,1)
# loss_vec = torch.exp(v-v.detach())
# assert loss_vec.shape == (len(logits),1)
# dummy_logits = torch.cat([torch.zeros(logits.size(0),1).to(self.args.device), logits],1)
# loss = loss_vec.mean()-1 + self.criterion(logits, labels).detach()
| 2,497 | 895 |
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 15 12:16:14 2018
@author: gdavila
This is a example to get Full Band Channel information
On Docsis 3.0 Full Band Channels is a feature that allows to get detailed info
about the power distribution of the espectrum
"""
import docsisMon.cmDevices as cmDevices
from docsisMon.snmp import SnmpError
import time
import ggplot
import sys
def asint(s):
try: return int(s), ''
except ValueError: return sys.maxsize, s
def format_fb_data(data):
spectrum_freq = []
spectrum_pot = []
if data is not None:
for key in sorted(data, key=asint):
center_frec = int('0x'+data[key][2:10], 16)
span = int('0x'+data[key][10:18], 16)
samples = int('0x'+data[key][18:26], 16)
resolution_bw = int('0x'+data[key][26:34], 16)
offset = 42
for i in range(0, samples):
frec = (center_frec-span/2)+i*resolution_bw
dec_value = int('0x'+data[key][offset+i*4:offset+i*4+4], 16)
if dec_value > 32767:
value = (dec_value-65535)/100
else:
value = dec_value/100
item = [frec, round(value, 2)]
spectrum_freq.append(item[0])
spectrum_pot.append(item[1])
return spectrum_freq, spectrum_pot
else:
return None
def main():
try:
myIP = '10.218.49.38'
myCm = cmDevices.Cm(myIP)
myCmModel = myCm.getModel()
print ("CM IP:\t", myIP)
print ("CM Model:\t", myCmModel)
print ("CM Firmware:\t", myCm.getSw_rev())
#Accesing to Docsis Interfaces
myCmDocIf = myCm.DocsIf()
#Getting the MAC address of Docsis Interfaces (CM)
myCmMac = myCmDocIf.getMac()
print ("CM Mac:\t", myCmMac)
#Gettingfull band capture information;
print ("Full Band Capture Information:")
print("Modelo \t\tTiempo Espera SET/GET(s) \tTiempo de Descarga FB data(s)\t Result\t\t Nro Muestras")
for i in range(1,2):
data = {}
fbc = myCm.fbc()
fbc.turnOff()
time.sleep(2)
fbc.inactivityTimeout = 300
fbc.firstFrequency = 50000000
fbc.lastFrequency = 1000000000
fbc.span = 10000000
fbc.binsPerSegment = 250
fbc.noisebandwidth = 150
fbc.numberOfAverages = 1
fbc.config()
timeConfig = time.time()
result = 'data OK'
timeGet = time.time()
data = fbc.get()
timeResponse = time.time()
while(data == {}):
time.sleep(1)
if (time.time() - timeConfig > 600): break
timeGet = time.time()
data = fbc.get()
timeResponse = time.time()
print(str(i)+" "+myCm.getModel() +'\t\t\t' + str(round(timeGet-timeConfig)) + \
'\t\t\t'+ str(round(timeResponse - timeGet)) + '\t\t\t'+ str(result)+'\t\t'+ str(len(format_fb_data(data)[0])))
return(format_fb_data(data))
except SnmpError as e:
print(e)
result = e
freq, pot= main()
ggplot.qplot(freq[0:], pot[0:], geom="line")
| 3,359 | 1,180 |
# -*- coding: utf-8 -*-
"""
=============================
Crack Egg
=============================
This an example of how to crack an egg (take a slice of subjects/lists from it)
"""
# Code source: Andrew Heusser
# License: MIT
#import
import quail
#load data
egg = quail.load_example_data()
#crack egg
cracked_egg = quail.crack_egg(egg, subjects=range(5), lists=range(4))
cracked_egg.info()
| 398 | 149 |
#!/usr/bin/env python
# coding: utf-8
import pandas as pd
from statsmodels.stats.proportion import proportion_confint
from itertools import combinations
# remove patients for which we do not have any covid info
data = data[(data['MRN'].isin(pcr['MRN'])) | (data['MRN'].isin(misc['mrn']))]
data['MISCFound'] = data['MRN'].isin(misc['mrn'])
lab_names = data['Lab'].unique()
l1 = list(combinations(lab_names, 1))
l2 = list(combinations(lab_names, 2))
l3 = list(combinations(lab_names, 3))
l4 = list(combinations(lab_names, 4))
l5 = list(combinations(lab_names, 5))
patterns = l1 + l2 + l3 + l4 + l5
appended_data = []
for lab_names in tqdm(patterns):
df = data[data['Lab'].isin(lab_names)]
ct = pd.crosstab(df['MRN'], df['Lab'])
cp = ct[(ct.T != 0).all()].reset_index()['MRN']
vc = data[data['MRN'].isin(cp)].reset_index()
appended_data.append(
{
'Combo': ', '.join(lab_names),
'Size of Double Tested Population': len(vc['MRN'].unique()),
'MISC Negative Patients Tested for Combo': len(vc[vc['MISCFound'] == False]['MRN'].unique()),
'MISC Positive Patients Tested for Combo': len(vc[vc['MISCFound'] == True]['MRN'].unique()),
})
combinations_total = pd.DataFrame(appended_data).reset_index(drop = True)
appended_data = []
abnormal = data[data['Result Type'] == 'abnormal']
for lab_names in tqdm(patterns):
df = abnormal[abnormal['Lab'].isin(lab_names)]
ct = pd.crosstab(df['MRN'], df['Lab'])
cp = ct[(ct.T != 0).all()].reset_index()['MRN']
vc = abnormal[abnormal['MRN'].isin(cp)].reset_index()
appended_data.append(
{
'Combo': ', '.join(lab_names),
'Size of Abnormal Double Tested Population': len(vc['MRN'].unique()),
'MISC Negative Patients Abnormal for Combo': len(vc[vc['MISCFound'] == False]['MRN'].unique()),
'MISC Positive Patients Abnormal for Combo': len(vc[vc['MISCFound'] == True]['MRN'].unique()),
})
combinations_abnormal = pd.DataFrame(appended_data).reset_index(drop = True)
combinations = combinations_total.merge(combinations_abnormal, on = 'Combo', how = 'left')
combinations['Combo Size'] = combinations['Combo'].str.count(',').apply(lambda x: x + 1)
combinations['Within double-tested population, of all people who had abnormal for all blood tessts in Combo, how many also had MISC'] = (combinations['MISC Positive Patients Abnormal for Combo'] / (combinations['MISC Positive Patients Abnormal for Combo'] + combinations['MISC Negative Patients Abnormal for Combo'])).round(2)
combinations['Within double-tested population, of all people who had MISC, how many had abnormal for all blood tessts in Combo positive'] = (combinations['MISC Positive Patients Abnormal for Combo'] / (combinations['MISC Positive Patients Tested for Combo'])).round(2)
denom = (combinations['MISC Positive Patients Abnormal for Combo'] + combinations['MISC Negative Patients Abnormal for Combo'])
# https://www.statsmodels.org/stable/generated/statsmodels.stats.proportion.proportion_confint.html
# Statistical Science 16:101-133 suggests that Wilson or Jeffreys methods for small n and Agresti-Coull, Wilson, or Jeffreys, for larger n.
ci_low, ci_upp = proportion_confint(combinations['MISC Positive Patients Abnormal for Combo'], denom, alpha=0.05, method='wilson')
combinations['Lower CI 1'] = ci_low.round(2)
combinations['Upper CI 1'] = ci_upp.round(2)
# https://www.statsmodels.org/stable/generated/statsmodels.stats.proportion.proportion_confint.html
# Statistical Science 16:101-133 suggests that Wilson or Jeffreys methods for small n and Agresti-Coull, Wilson, or Jeffreys, for larger n.
ci_low, ci_upp = proportion_confint(combinations['MISC Positive Patients Abnormal for Combo'], combinations['MISC Positive Patients Tested for Combo'], alpha=0.05, method='wilson')
combinations['Lower CI 2'] = ci_low.round(2)
combinations['Upper CI 2'] = ci_upp.round(2)
cols = ['Combo',
'Combo Size',
'Size of Double Tested Population',
'Size of Abnormal Double Tested Population',
'MISC Negative Patients Tested for Combo',
'MISC Positive Patients Tested for Combo',
'MISC Negative Patients Abnormal for Combo',
'MISC Positive Patients Abnormal for Combo',
'Within double-tested population, of all people who had abnormal for all blood tessts in Combo, how many also had MISC',
'Lower CI 1', 'Upper CI 1',
'Within double-tested population, of all people who had MISC, how many had abnormal for all blood tessts in Combo positive',
'Lower CI 2', 'Upper CI 2'
]
combinations = combinations[cols]
| 4,674 | 1,596 |
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules=[
Extension("BBox", ["BBox.pyx"]),
Extension("spirals", ["spirals.pyx"]),
Extension("wordle", ["wordle.pyx"]),
]
setup(
name = 'wordle',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
)
# from terminal run the following command for cythonization
# python setup.py build_ext --inplace
| 488 | 168 |
# encoding: utf-8
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import magic, os, urllib2
from django.core.files.storage import default_storage as storage
class FileTypeTester(models.Model):
class Meta:
abstract = True
file_type_tester_fieldname = 'file'
file_type_tester_file = None
file_type_tester_url = None
file_typemime = models.CharField(_(u"Type Mime"), max_length=254, blank=True, null=True)
def get_file_file(self, **kwargs):
if self.file_type_tester_file:
return self.file_type_tester_file
fieldname = self.file_type_tester_fieldname
if not hasattr(self, fieldname):
raise Exception("Field '%s' doesn't exists in this FileTypeTesterMixin model" % fieldname)
else:
file = getattr(self, fieldname)
self.file_type_tester_file = file.file
return file.file
def get_file_url(self, **kwargs):
if self.file_type_tester_url:
return self.file_type_tester_url
fieldname = self.file_type_tester_fieldname
if not hasattr(self, fieldname):
raise Exception("Field '%s' doesn't exists in this FileTypeTesterMixin model" % fieldname)
else:
file = getattr(self, fieldname)
self.file_type_tester_url = file.url
return file.url
#TODO: open file with S3 storage system to retrieve typemime (storage.open(file) -> and get headers)
def get_file_mimetype(self, fieldname=None, **kwargs):
if self.file_typemime:
return self.file_typemime
if fieldname == None:
fieldname = self.file_type_tester_fieldname
if not hasattr(self, fieldname):
raise Exception("Field '%s' doesn't exists in this FileTypeTesterMixin model" % fieldname)
else:
file = getattr(self, fieldname)
#return magic.from_file(str(storage.open(file.name)), mime=True)
try:
# Normal storage condition
self.file_typemime = magic.from_file(file.path, mime=True)
self.save()
return self.file_typemime
except:
# if there is no name, stop right here
if not file.name:
self.file_typemime = None
return self.file_typemime
# Type static S3 amazon collection
try:
url = os.path.join(settings.MEDIA_URL, file.name)
# stop if the url is not a valid url
if any([
url.startswith('//'),
url.startswith('http://'),
url.startswith('https://')]):
file = urllib2.urlopen(url)
self.file_typemime = file.info().gettype()
self.save()
else:
self.file_typemime = None
except:
self.file_typemime = None
return self.file_typemime
def is_image(self, **kwargs):
mimetype = self.get_file_mimetype()
return mimetype in ['image/rgb', 'image/gif', 'image/pbm', 'image/pgm', 'image/ppm',
'image/tiff', 'image/rast', 'image/xbm', 'image/jpeg', 'image/bmp', 'image/png', 'image/x-icon']
def is_zip(self, **kwargs):
return self.get_file_mimetype() in ['application/zip']
def is_pdf(self, **kwargs):
return self.get_file_mimetype() in ['application/pdf']
def is_powerpoint(self, **kwargs):
return self.get_file_mimetype() in ['application/vnd.ms-powerpoint', 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'application/vnd.openxmlformats-officedocument.presentationml.slideshow']
def is_word(self, **kwargs):
return self.get_file_mimetype() in ['application/msword', 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.ms-excel', 'vnd.ms-word.document']
def is_zip(self, **kwargs):
return self.get_file_mimetype() in ['application/zip'] | 4,296 | 1,255 |
# A node is an instance of a single node in the trie tree
class Node(object):
def __init__(self):
self.children = {}
def insert_key(self,key):
if key not in self.children:
self.children[key] = Node()
# given a word this Trie will trie to insrt
class Trie(object):
def __init__(self, word):
self.root = 'root'
self.children = Node()
for i in word:
self.children.insert_key(i)
def traverse(self):
print(self.children.children)
t = Trie('alpha')
t.traverse()
| 484 | 184 |
from App import App
app = App()
app.main()
| 44 | 17 |
# flake8: noqa
# set of functions
from . import aug
from . import data
from . import geometry
from . import func
from . import image
from . import utils
from . import _io as io
# set of classes
from . import datasets
from . import models
# set of experimental modules
from . import contrib
| 293 | 82 |
#!/usr/bin/python3
# Copyright (C) 2021 Sam Steele
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, ntpath, json, pytz, urllib
import xml.etree.ElementTree as ET
from datetime import datetime
from influxdb import InfluxDBClient
from influxdb.exceptions import InfluxDBClientError
LOCAL_TIMEZONE = pytz.timezone('America/New_York')
RETROARCH_LOGS = '/home/ark/.config/retroarch/playlists/logs/'
EMULATIONSTATION_ROMS = '/roms'
IMAGE_WEB_PREFIX = 'https://example.net/retroarch_images/'
INFLUXDB_HOST = 'localhost'
INFLUXDB_PORT = 8086
INFLUXDB_USERNAME = 'root'
INFLUXDB_PASSWORD = 'root'
INFLUXDB_DATABASE = 'gaming'
points = []
try:
client = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD)
client.create_database(INFLUXDB_DATABASE)
client.switch_database(INFLUXDB_DATABASE)
except InfluxDBClientError as err:
print("InfluxDB connection failed: %s" % (err))
sys.exit()
roms = {}
for platform in os.listdir(EMULATIONSTATION_ROMS):
if os.path.exists(EMULATIONSTATION_ROMS + '/' + platform + '/gamelist.xml'):
gamelist = ET.parse(EMULATIONSTATION_ROMS + '/' + platform + '/gamelist.xml').getroot()
for game in gamelist.findall('game'):
if gamelist.find('provider/System') != None:
rom = {}
rom['name'] = game.find('name').text
rom['filename'] = ntpath.basename(game.find('path').text)
rom['key'] = os.path.splitext(rom['filename'])[0]
rom['path'] = platform
rom['platform'] = gamelist.find('provider/System').text
if(rom['platform'] == 'Mame'):
rom['platform'] = 'Arcade'
roms[rom['key']] = rom
for core in os.listdir(RETROARCH_LOGS):
totals = client.query('SELECT last("total") AS "total" FROM "time" WHERE "total" > 0 AND "player_id" = \'' + core + '\' GROUP BY "application_id" ORDER BY "time" DESC')
for log in os.listdir(RETROARCH_LOGS + '/' + core):
key = os.path.splitext(log)[0]
if key in roms:
with open(RETROARCH_LOGS + '/' + core + '/' + log, 'r') as f:
playtime = json.load(f)
rom = roms[key]
h, m, s = playtime['runtime'].split(':')
runtime = value = int(h) * 3600 + int(m) * 60 + int(s)
total = list(totals.get_points(tags={'application_id': rom['key']}))
if len(total) == 1 and total[0]['total'] > 0:
value -= total[0]['total']
if value > 1:
time = datetime.fromisoformat(playtime['last_played'])
utc_time = LOCAL_TIMEZONE.localize(time).astimezone(pytz.utc).isoformat()
points.append({
"measurement": "time",
"time": utc_time,
"tags": {
"player_id": core,
"application_id": rom['key'],
"platform": rom['platform'],
"player_name": core,
"title": rom['name'],
},
"fields": {
"value": int(value),
"total": runtime,
"image": IMAGE_WEB_PREFIX + urllib.parse.quote(rom['path']) + '/' + urllib.parse.quote(rom['key']) + '.png',
"url": 'https://thegamesdb.net/search.php?name=' + urllib.parse.quote_plus(rom['name'])
}
})
try:
client.write_points(points)
except InfluxDBClientError as err:
print("Unable to write points to InfluxDB: %s" % (err))
sys.exit()
print("Successfully wrote %s data points to InfluxDB" % (len(points)))
| 3,734 | 1,512 |
import numpy as np
def generate_map(lines, diags=False):
vents = np.zeros((np.max(lines[:, :, 1]) + 1, np.max(lines[:, :, 0]) + 1), dtype=np.uint32)
for l in lines:
if l[0][0] == l[1][0]: # horizontal line
vents[np.min(l[:, 1]):np.max(l[:, 1]) + 1, l[0][0]] += 1
elif l[0][1] == l[1][1]: # vertical line
vents[l[0][1], np.min(l[:, 0]):np.max(l[:, 0])+1] += 1
elif diags:
n = np.abs(l[1] - l[0])[0] + 1 # number of points in line
d = (l[1] - l[0])//(n - 1) # direction vector
for i in range(n):
vents[tuple(reversed(tuple(zip(l[0] + i * d))))] += 1 # **shrug emoji**
return vents
if __name__ == '__main__':
# Process the text input
with open('input.txt') as f:
pin = f.readlines()
lines = np.array([[[int(z) for z in y.split(',')] for y in x.strip().split('->')] for x in pin])
# Generate the grid of line overlaps
print(f"Solution 1: {np.sum(generate_map(lines) > 1)}")
print(f"Solution 2: {np.sum(generate_map(lines, diags=True) > 1)}")
| 1,092 | 458 |
import sys
from hypothesis import given
from rene import Orientation
from . import strategies
@given(strategies.orientations)
def test_round_trip(orientation: Orientation) -> None:
result = repr(orientation)
assert eval(result, sys.modules) is orientation
| 269 | 77 |
from PySide2.QtWidgets import QWidget
from PySide2.QtCore import Qt
from PySide2.QtCore import QPoint
from PySide2.QtWidgets import QWidget, QLabel
from PySide2.QtGui import QPainter
from PySide2.QtGui import QPixmap
from PySide2.QtGui import QPen
from draw_utils import is_adjacent, is_contained,is_intersect
from rectangle import Rectangle
from constants import RECT_A,RECT_B,PEN_WIDTH
"""
Rectangle Creator:
* GUI to create rectangles
* Extended from QWidget
"""
class RectangleCreator(QWidget):
def __init__(self):
super().__init__()
""" Setup """
self.setMouseTracking(True)
self.begin = QPoint()
self.end = QPoint()
self.coord_list = []
self.rect_list = []
self.clicked = False
"""
Paint Event
* Paints Rectangles onto a Pixmap from a list of coordinates
* Stores created rectangles in a list
* Rectangle store is cleared and rebuild each iteration
"""
def paintEvent(self, event):
"""Create pallet"""
pixmap = QPixmap()
painter = QPainter(self)
painter.drawPixmap(self.rect(), pixmap)
pen = QPen()
pen.setWidth(PEN_WIDTH)
pen.setColor(Qt.black)
painter.setPen(pen)
"""Rebuild rectangle store"""
self.rect_list.clear()
for coord in self.coord_list:
rec = Rectangle(coord[RECT_A], coord[RECT_B])
self.rect_list.append(rec)
painter.drawRect(rec)
if not self.clicked:
return
"""Create new rectangle"""
rec = Rectangle(self.begin, self.end)
self.rect_list.append(rec)
painter.drawRect(rec)
"""
mousePressEvent
* Deletes oldest rectangle from the coordinate list
* Updates begin and end values
* Tracks click for use in display of rectangles
"""
def mousePressEvent(self, event):
"""Remove oldest"""
if len(self.coord_list) > 1:
self.coord_list.pop(0)
"""Update tracking variables"""
self.begin = event.pos()
self.end = event.pos()
self.clicked = True
self.update()
"""
mouseMoveEvent
* Updates endpoint
* Updates Coordinates on display
"""
def mouseMoveEvent(self, event):
self.end = event.pos()
self.setWindowTitle('Coordinates: ( x = %d : y = %d )' % (event.x(), event.y()))
self.update()
"""
mouseReleaseEvent
* Checks for position of start and end points of rectangle
* Transforms rectangle so start is topleft and end is bottom right
* Adds rectangle coordinates to the coordinates list
* If two rectangle exist:
* Runs test for Adjacent, contained and intersection
"""
def mouseReleaseEvent(self, event):
"""Needs horizontal flip?"""
if self.begin.x() > self.end.x() and self.begin.y() < self.end.y():
if len(self.rect_list) == 1:
self.rect_list[RECT_A] = self.flip_hor(self.rect_list[RECT_A])
else:
self.rect_list[RECT_B] = self.flip_hor(self.rect_list[RECT_B])
"""Needs vertical flip?"""
if self.begin.x() < self.end.x() and self.begin.y() > self.end.y():
if len(self.rect_list) == 1:
self.rect_list[RECT_A] = self.flip_ver(self.rect_list[RECT_A])
else:
self.rect_list[RECT_B] = self.flip_ver(self.rect_list[RECT_B])
"""Needs refection?"""
if self.begin.x() > self.end.x() and self.begin.y() > self.end.y():
if len(self.rect_list) == 1:
self.rect_list[RECT_A] = self.reflect(self.rect_list[RECT_A])
else:
self.rect_list[RECT_B] = self.reflect(self.rect_list[RECT_B])
self.clicked = False
self.update()
"""Add new coordinates to the coordinates list"""
self.coord_list.append([self.begin,self.end])
"""Run Tests"""
if len(self.coord_list) == 2:
is_adjacent(self.rect_list[RECT_A],self.rect_list[RECT_B],silent=False)
contained = is_contained(self.rect_list[RECT_A],self.rect_list[RECT_B])
if not contained:
contained = is_contained(self.rect_list[RECT_B],self.rect_list[RECT_A])
if not contained:
is_intersect(self.rect_list[RECT_A],self.rect_list[RECT_B])
print('------')
"""
flip_hor
* Call rectangle flip_h function
* Flip start and end points horizontal
"""
def flip_hor(self,rect):
rect.flip_h()
self.begin = rect.topLeft()
self.end = rect.bottomRight()
return rect
"""
flip_ver
* Calls rectangle flip_v function and
* Flip start and end points vertical
"""
def flip_ver(self,rect):
rect.flip_v()
self.begin = rect.topLeft()
self.end = rect.bottomRight()
return rect
"""
reflect
* Calls flip_hor then flip_ver to produce a reflection of the start and end points
* Same as above for the input rectangle coordinates
"""
def reflect(self,rect):
rect = self.flip_hor(rect)
rect = self.flip_ver(rect)
return rect
| 5,254 | 1,656 |
from gensim import models
model = models.fasttext.load_facebook_model('cc.ko.300.bin')
for w, sim in model.wv.similar_by_word('치킨', 5):
print(f'{w}: {sim}')
for w, sim in model.wv.most_similar('치킨', topn=5):
print(f'{w}: {sim}')
print(model.wv.similarity('치킨', '피자'))
print(model.wv.most_similar(positive=['돼지', '소고기'], negative=['야채'], topn=1)) | 356 | 178 |
import toml
from discord.ext import commands
import discord
import pathlib
from peewee import SqliteDatabase, fn
import db_model as db
import datetime
from dataclasses import dataclass
import matplotlib.pyplot as plt
import asyncio
SKRIPTPFAD = pathlib.Path(__file__).parent
CONFIGPFAD = SKRIPTPFAD / "config.toml"
CONFIG = toml.load(CONFIGPFAD)
bot = commands.Bot(command_prefix="!")
db.DB_PROXY.initialize(SqliteDatabase(SKRIPTPFAD / "mining_shoots.db3"))
db.create_tables()
@dataclass
class UCShoot:
date: datetime.date
allianz: str
enemy: str
player: str
def check_input_allianz(input_):
if any(character.isdigit() for character in input_):
raise ValueError(f"digit in {input_!r} not allowed")
def check_user(userid):
return db.User.get_or_none(db.User.userid == userid)
def get_or_create_unbekannter_user():
user, _ = db.User.get_or_create(userid=1, name="unbekannter_user")
user.datum = datetime.datetime.now()
user.save()
return user
def create_uc_shoot(user, daten):
if len(daten) == 2:
allianz = daten[0]
check_input_allianz(allianz)
uc_shoot = UCShoot(
date=datetime.date.today(),
allianz=allianz,
enemy=daten[1],
player=user
)
elif len(daten) == 3:
allianz = daten[1]
check_input_allianz(allianz)
uc_shoot = UCShoot(
date=datetime.datetime.strptime(daten[0], "%d.%m.%Y").date(),
allianz=daten[1],
enemy=daten[2],
player=user
)
else:
uc_shoot = None
return uc_shoot
def schreibe_in_datenbank(daten):
db.UCShoots.create(
date=daten.date,
allianz=daten.allianz.upper(),
enemy=daten.enemy.lower(),
player=daten.player
)
def load_players():
query = db.UCShoots.raw("select enemy, allianz, count(enemy) as cenemy from ucshoots group by enemy")
return query
def load_allys():
query = db.UCShoots.raw("select allianz, count(enemy) as cenemy from ucshoots group by allianz")
return query
@bot.command(name="reg",
help=CONFIG["texte"]["reg"]["help"],
brief=CONFIG["texte"]["reg"]["brief"])
async def user_registrieren(ctx):
channel = ctx.channel
author = ctx.author
await ctx.channel.send(CONFIG["texte"]["reg"]["zustimmung_text"])
def check(m):
return m.content.lower() == CONFIG["texte"]["reg"][
"zustimmung_antwort"].lower() and m.channel == channel and m.author == author
try:
msg = await bot.wait_for('message', timeout=60*2, check=check)
except asyncio.TimeoutError:
await channel.send('👎 - Timeout')
else:
if msg.content.lower() == CONFIG["texte"]["reg"]["zustimmung_antwort"].lower():
db.User.get_or_create(userid=ctx.author.id, name=ctx.author.name, datum=datetime.datetime.now())
await ctx.channel.send('Benutzer registriert, Bot kann nun verwendet werden')
@bot.command(name="del",
help=CONFIG["texte"]["del"]["help"],
brief=CONFIG["texte"]["del"]["brief"])
async def user_loeschen(ctx):
user = check_user(ctx.author.id)
if user is not None:
channel = ctx.channel
author = ctx.author
await ctx.channel.send(CONFIG["texte"]["del"]["text"])
def check(m):
return m.content.lower() in CONFIG["texte"]["del"][
"antworten"] and m.channel == channel and m.author == author
try:
msg = await bot.wait_for('message', timeout=60 * 2, check=check)
except asyncio.TimeoutError:
await channel.send('👎 - Timeout')
else:
if msg.content.lower() == "ja":
user = check_user(ctx.author.id)
unbekannter_user = get_or_create_unbekannter_user()
db.UCShoots.update(player=unbekannter_user).where(db.UCShoots.player == user).execute()
db.User.delete().where(db.User.userid == user.userid).execute()
await ctx.channel.send('Benutzer gelöscht')
else:
await ctx.channel.send('Löschung abgebrochen')
else:
await ctx.channel.send("Benutzer unbekannt")
@bot.command(name="dia",
help=CONFIG["texte"]["dia"]["help"],
brief=CONFIG["texte"]["dia"]["brief"])
async def kuchen_backen(ctx):
user = check_user(ctx.author.id)
if user is not None:
labels = []
values = []
query = load_allys()
for datum in query:
labels.append(datum.allianz)
values.append(datum.cenemy)
fig1, ax1 = plt.subplots()
ax1.pie(values, labels=labels, autopct='%1.1f%%')
ax1.axis('equal')
picpfad = pathlib.Path(SKRIPTPFAD / CONFIG["pic"])
plt.savefig(picpfad)
await ctx.send(file=discord.File(picpfad))
else:
await ctx.send(CONFIG["texte"]["nicht_registiert"])
@bot.command(name="uca",
help=CONFIG["texte"]["uca"]["help"],
brief=CONFIG["texte"]["uca"]["brief"])
async def show_player_stat(ctx):
user = check_user(ctx.author.id)
if user is not None:
player_stat = load_allys()
await ctx.send(
"\n".join(
f"{datum.allianz}: {datum.cenemy}"
for datum in player_stat
)
)
else:
await ctx.send(CONFIG["texte"]["nicht_registiert"])
@bot.command(name="ucp",
help=CONFIG["texte"]["ucp"]["help"],
brief=CONFIG["texte"]["ucp"]["brief"])
async def show_player_stat(ctx):
user = check_user(ctx.author.id)
if user is not None:
player_stat = load_players()
await ctx.send(
"\n".join(
f"[{datum.allianz}]{datum.enemy}: {datum.cenemy}"
for datum in player_stat
)
)
else:
await ctx.send(CONFIG["texte"]["nicht_registiert"])
@bot.command(name="uc",
help=CONFIG["texte"]["uc"]["help"],
brief=CONFIG["texte"]["uc"]["brief"]
)
async def add_uc_shoot(ctx, *args):
user = check_user(ctx.author.id)
if user is not None:
try:
uc_shoot = create_uc_shoot(user, args)
except ValueError:
uc_shoot = None
if uc_shoot is not None:
schreibe_in_datenbank(uc_shoot)
await ctx.send(f"UC Abschuss gespeichert")
else:
await ctx.send("Ungültiges Format")
else:
await ctx.send(CONFIG["texte"]["nicht_registiert"])
bot.run(CONFIG["token"])
| 6,622 | 2,211 |
import string
import numpy as np
from yeti.get_features.hydrogen_bonds import Triplet
from yeti.systems.building_blocks import Atom
def create_data_type_exception_messages(parameter_name, data_type_name):
return 'Wrong data type for parameter "{name}". Desired type is {data_type}'.format(name=parameter_name,
data_type=data_type_name)
def create_array_shape_exception_messages(parameter_name, desired_shape):
return 'Wrong shape for parameter "{name}". Desired shape: {des_shape}.'.format(name=parameter_name,
des_shape=desired_shape)
def create_array_dtype_exception_messages(parameter_name, dtype_name):
return 'Wrong dtype for ndarray "{name}". Desired dtype is {data_type}'.format(name=parameter_name,
data_type=dtype_name)
def build_unit_cell_angles_and_vectors(number_of_frames):
angles = []
vectors = []
for i in range(number_of_frames):
angles.append([90, 90, 90])
vectors.append([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
angles = np.array(angles, dtype=np.float32)
vectors = np.array(vectors, dtype=np.float32)
return angles, vectors
def build_atom_triplet():
# first frame is h-bond
# second frame is not because of distance
# third frame is not because of angle
donor = Atom(structure_file_index=0, subsystem_index=0, name='A',
xyz_trajectory=np.array([[0.1, 0.4, 0.3], [0.1, 0.4, 0.3], [0.1, 0.4, 0.3]]))
donor_atom = Atom(structure_file_index=1, subsystem_index=1, name='B',
xyz_trajectory=np.array([[0.1, 0.5, 0.2], [0.1, 0.5, 0.2], [0.5, 0.5, 0.2]]))
acceptor = Atom(structure_file_index=2, subsystem_index=2, name='C',
xyz_trajectory=np.array([[0.1, 0.6, 0.4], [0.1, 0.7, 0.4], [0.1, 0.6, 0.4]]))
donor.add_covalent_bond(atom=donor_atom)
donor_atom.update_donor_state(is_donor_atom=True, donor_slots=1)
acceptor.update_acceptor_state(is_acceptor=True, acceptor_slots=2)
return donor, donor_atom, acceptor
def build_multi_atom_triplets(amount=2):
all_atoms = []
names = list(string.ascii_uppercase)
for triplet_number in range(amount):
atoms = build_atom_triplet()
starting_index = triplet_number * len(atoms)
for atom_number, atom in enumerate(atoms):
atom.subsystem_index = starting_index + atom_number
atom.name = names[atom.subsystem_index]
atom.structure_file_index = atom.subsystem_index + 2
all_atoms.append(atom)
return tuple(all_atoms)
def build_triplet():
donor, donor_atom, acceptor = build_atom_triplet()
unit_cell_angles, unit_cell_vectors = build_unit_cell_angles_and_vectors(number_of_frames=3)
return Triplet(donor_atom=donor_atom, acceptor=acceptor, periodic=True, unit_cell_angles=unit_cell_angles,
unit_cell_vectors=unit_cell_vectors)
| 3,115 | 1,093 |
# Third Party Imports
import pytest
@pytest.fixture
def test_attributes_allocation():
yield {
"allocation_method_id": 1,
"availability_alloc": 0.9998,
"duty_cycle": 100.0,
"env_factor": 6,
"goal_measure_id": 1,
"hazard_rate_alloc": 0.0,
"hazard_rate_goal": 0.0,
"included": 1,
"int_factor": 3,
"mission_time": 100.0,
"mtbf_alloc": 0.0,
"mtbf_goal": 0.0,
"n_sub_systems": 3,
"n_sub_elements": 3,
"parent_id": 1,
"percent_weight_factor": 0.8,
"reliability_alloc": 0.99975,
"reliability_goal": 0.999,
"op_time_factor": 5,
"soa_factor": 2,
"weight_factor": 1,
}
| 736 | 311 |
INSTAVARIETY_KEYPOINTS = [
'right_heel_openpose',
'right_knee_openpose',
'right_hip_openpose',
'left_hip_openpose',
'left_knee_openpose',
'left_heel_openpose',
'right_wrist_openpose',
'right_elbow_openpose',
'right_shoulder_openpose',
'left_shoulder_openpose',
'left_elbow_openpose',
'left_wrist_openpose',
'neck_openpose',
'headtop',
'nose_openpose',
'left_eye_openpose',
'right_eye_openpose',
'left_ear_openpose',
'right_ear_openpose',
'left_bigtoe_openpose',
'right_bigtoe_openpose',
'left_smalltoe_openpose',
'right_smalltoe_openpose',
'left_ankle_openpose',
'right_ankle_openpose',
]
| 689 | 284 |
from __future__ import annotations
import uuid
from datetime import datetime, timedelta
from functools import reduce
from operator import or_
from typing import Any, List, NamedTuple, Optional
import beeline
import pytz
import sentry_sdk
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.gis.db import models as gis_models
from django.contrib.gis.geos import Point
from django.db import IntegrityError, models, transaction
from django.db.models import Min, Q
from django.db.models.query import QuerySet
from django.db.models.signals import m2m_changed
from django.dispatch import receiver
from django.utils import dateformat, timezone
from social_django.models import UserSocialAuth
from .baseconverter import pid
from .fields import CharTextField
class LocationType(models.Model):
"""
Represents a type of location, such as "Pharmacy" or "Hospital/Clinic"
"""
name = CharTextField(unique=True)
def __str__(self):
return self.name
class Meta:
db_table = "location_type"
class ProviderType(models.Model):
"""
Represents a type of provider, such as "Pharmacy" for CVS or "Health Plan" for Kaiser.
"""
name = CharTextField(unique=True)
def __str__(self):
return self.name
class Meta:
db_table = "provider_type"
class ProviderPhase(models.Model):
"Current phase, e.g. 'Not currently vaccinating'"
name = CharTextField(unique=True)
def __str__(self):
return self.name
class Meta:
db_table = "provider_phase"
class Provider(models.Model):
"""
A provider is a larger entity that encompasses several vaccination sites. A provider will generally have its own
vaccination policies, which at least nominally apply to all locations.
Examples include:
- The pharmacy chain CVS
- The Kaiser HMO
- LA County Fire Department-operated Super Sites in LA County
"""
name = CharTextField(unique=True)
contact_phone_number = CharTextField(null=True, blank=True)
main_url = CharTextField(null=True, blank=True)
vaccine_info_url = CharTextField(null=True, blank=True)
vaccine_locations_url = CharTextField(null=True, blank=True)
public_notes = models.TextField(null=True, blank=True)
appointments_url = CharTextField(null=True, blank=True)
provider_type = models.ForeignKey(
ProviderType, related_name="providers", on_delete=models.PROTECT
)
internal_contact_instructions = models.TextField(null=True, blank=True)
last_updated = models.DateField(null=True, blank=True)
airtable_id = models.CharField(
max_length=20,
null=True,
blank=True,
help_text="Airtable record ID, if this has one",
)
public_id = models.SlugField(
unique=True,
help_text="ID that we expose outside of the application",
)
import_json = models.JSONField(
null=True,
blank=True,
help_text="Original JSON if this record was imported from elsewhere",
)
phases = models.ManyToManyField(
ProviderPhase,
blank=True,
related_name="providers",
db_table="provider_provider_phase",
)
def __str__(self):
return self.name
class Meta:
db_table = "provider"
@property
def pid(self):
return "p" + pid.from_int(self.pk)
def save(self, *args, **kwargs):
set_public_id_later = False
if (not self.public_id) and self.airtable_id:
self.public_id = self.airtable_id
elif not self.public_id:
set_public_id_later = True
self.public_id = "tmp:{}".format(uuid.uuid4())
super().save(*args, **kwargs)
if set_public_id_later:
self.public_id = self.pid
Provider.objects.filter(pk=self.pk).update(public_id=self.pid)
class State(models.Model):
"""
Information about a US state or territory
"""
abbreviation = models.CharField(max_length=2, unique=True)
name = CharTextField(unique=True)
fips_code = models.CharField(unique=True, blank=True, null=True, max_length=2)
def __str__(self):
return self.name
class Meta:
db_table = "state"
@classmethod
def __get_validators__(cls):
yield cls.pydantic_convert
@classmethod
def pydantic_convert(cls, abbreviation: str) -> State:
try:
return cls.objects.get(abbreviation=abbreviation)
except cls.DoesNotExist:
raise ValueError("State '{}' does not exist".format(abbreviation))
class County(models.Model):
"""
Every part of California is in one of the state's 58 counties, which are also the primary unit that coordinates
vaccinations and sets vaccination policies. A county's policies may not apply to every location in the county if the
locations vaccines are sourced directly from the state or federal government.
"""
fips_code = models.CharField(unique=True, max_length=5)
name = CharTextField()
state = models.ForeignKey(State, related_name="counties", on_delete=models.PROTECT)
hotline_phone_number = CharTextField(null=True, blank=True)
vaccine_info_url = CharTextField(null=True, blank=True)
vaccine_locations_url = CharTextField(null=True, blank=True)
official_volunteering_url = CharTextField(null=True, blank=True)
public_notes = models.TextField(null=True, blank=True)
internal_notes = models.TextField(null=True, blank=True)
facebook_page = CharTextField(null=True, blank=True)
twitter_page = CharTextField(null=True, blank=True)
vaccine_reservations_url = CharTextField(null=True, blank=True)
population = models.IntegerField(null=True, blank=True)
vaccine_dashboard_url = CharTextField(null=True, blank=True)
vaccine_data_url = CharTextField(null=True, blank=True)
vaccine_arcgis_url = CharTextField(null=True, blank=True)
age_floor_without_restrictions = models.IntegerField(
null=True, blank=True, verbose_name="Age Floor"
)
airtable_id = models.CharField(
max_length=20,
null=True,
unique=True,
help_text="Airtable record ID, if this has one",
)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = "counties"
db_table = "county"
class ImportRun(models.Model):
created_at = models.DateTimeField(default=timezone.now)
api_key = models.ForeignKey(
"api.ApiKey", blank=True, null=True, on_delete=models.SET_NULL
)
def __str__(self):
return str(self.created_at)
class Meta:
db_table = "import_run"
class DeriveAvailabilityAndInventoryResults(NamedTuple):
vaccines_offered: Optional[list[str]]
vaccines_offered_provenance_report: Optional[Report]
vaccines_offered_provenance_source_location: Optional[SourceLocation]
vaccines_offered_last_updated_at: Optional[datetime]
accepts_appointments: Optional[bool]
accepts_walkins: Optional[bool]
appointments_walkins_provenance_report: Optional[Report]
appointments_walkins_provenance_source_location: Optional[SourceLocation]
appointments_walkins_last_updated_at: Optional[datetime]
# Additional debugging info:
most_recent_report_on_vaccines_offered: Optional[Report]
most_recent_source_location_on_vaccines_offered: Optional[SourceLocation]
most_recent_report_on_availability: Optional[Report]
most_recent_source_location_on_availability: Optional[SourceLocation]
class Location(gis_models.Model):
"A location is a distinct place where one can receive a COVID vaccine."
name = CharTextField()
phone_number = CharTextField(null=True, blank=True)
full_address = models.TextField(
null=True,
blank=True,
help_text="the entire address, including city and zip code",
)
street_address = CharTextField(
null=True, blank=True, help_text="the first line of the address"
)
city = CharTextField(null=True, blank=True)
state = models.ForeignKey(State, related_name="locations", on_delete=models.PROTECT)
zip_code = models.CharField(
max_length=10,
blank=True,
null=True,
help_text="can accomodate ZIP+4 in standard formatting if needed",
)
hours = models.TextField(
blank=True,
null=True,
help_text="Do not enter hours here for mobile clinics! File a report and put mobile clinic hours in the public notes.",
)
website = CharTextField(blank=True, null=True)
location_type = models.ForeignKey(
LocationType, related_name="locations", on_delete=models.PROTECT
)
vaccines_offered = models.JSONField(
null=True,
blank=True,
help_text="JSON array of strings representing vaccines on offer here - enter 'null' if we do not know",
)
vaccines_offered_provenance_report = models.ForeignKey(
"Report",
null=True,
blank=True,
related_name="+",
help_text="The report that last populated vaccines_offered",
on_delete=models.PROTECT,
)
vaccines_offered_provenance_source_location = models.ForeignKey(
"SourceLocation",
null=True,
blank=True,
related_name="+",
help_text="The source location that last populated vaccines_offered",
on_delete=models.PROTECT,
)
vaccines_offered_last_updated_at = models.DateTimeField(
help_text="When vaccines_offered was last updated",
blank=True,
null=True,
)
accepts_appointments = models.BooleanField(
null=True, blank=True, help_text="Does this location accept appointments"
)
accepts_walkins = models.BooleanField(
null=True, blank=True, help_text="Does this location accept walkins"
)
appointments_walkins_provenance_report = models.ForeignKey(
"Report",
null=True,
blank=True,
related_name="+",
help_text="The report that last populated accepts_walkins and accepts_appointments",
on_delete=models.PROTECT,
)
appointments_walkins_provenance_source_location = models.ForeignKey(
"SourceLocation",
null=True,
blank=True,
related_name="+",
help_text="The source location that last populated accepts_walkins and accepts_appointments",
on_delete=models.PROTECT,
)
appointments_walkins_last_updated_at = models.DateTimeField(
help_text="When accepts_walkins and accepts_appointments were last updated",
blank=True,
null=True,
)
public_notes = models.TextField(blank=True, null=True)
google_places_id = CharTextField(
null=True,
blank=True,
help_text="an ID that associates a location with a unique entry in the Google Places ontology",
)
vaccinespotter_location_id = CharTextField(
null=True,
blank=True,
help_text="This location's ID on vaccinespotter.org",
)
vaccinefinder_location_id = CharTextField(
null=True,
blank=True,
help_text="This location's ID on vaccinefinder.org",
)
provider = models.ForeignKey(
Provider,
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="locations",
help_text="If you're certain that this location is part of a chain or network of providers -- like CVS, Costco, or Kroger -- add the right provider network.",
)
county = models.ForeignKey(
County,
null=True,
blank=True,
related_name="locations",
on_delete=models.PROTECT,
help_text="Use the 🔍 lookup tool or enter the county number.",
)
# This was originally specified as a 'coordinate point' but Django doesn't easily
# expose the 'point' type - we could adopt GeoDjango later though but it's a heavy dependency
latitude = models.DecimalField(max_digits=9, decimal_places=5)
longitude = models.DecimalField(
max_digits=9,
decimal_places=5,
help_text="Enter coordinates up to 5 decimal places, or use search box and pin below to ‘pin’ the location",
)
point = gis_models.PointField(
geography=True, blank=True, null=True, spatial_index=True
)
soft_deleted = models.BooleanField(
default=False,
help_text="we never delete rows from this table; all deletes are soft",
)
soft_deleted_because = CharTextField(null=True, blank=True)
duplicate_of = models.ForeignKey(
"self",
null=True,
blank=True,
related_name="duplicate_locations",
on_delete=models.PROTECT,
help_text="duplicate locations are associated with a canonical location",
)
import_run = models.ForeignKey(
ImportRun,
null=True,
blank=True,
related_name="created_locations",
on_delete=models.PROTECT,
help_text="the import run that created this location, if any",
)
provenance = CharTextField(null=True, blank=True)
internal_notes = models.TextField(null=True, blank=True)
do_not_call = models.BooleanField(default=False)
do_not_call_reason = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(
"auth.User",
blank=True,
null=True,
related_name="created_locations",
on_delete=models.PROTECT,
)
airtable_id = models.CharField(
max_length=20,
null=True,
unique=True,
help_text="Airtable record ID, if this has one",
)
public_id = models.SlugField(
unique=True, help_text="ID that we expose outside of the application"
)
import_json = models.JSONField(
null=True,
blank=True,
help_text="Original JSON if this record was imported from elsewhere",
)
import_ref = models.CharField(
max_length=100,
db_index=True,
null=True,
blank=True,
help_text="If imported, unique identifier in the system it was imported from",
)
preferred_contact_method = models.CharField(
max_length=32,
choices=(
("online_only", "online_only"),
("online_preferred", "online_preferred"),
("call_preferred", "call_preferred"),
("call_only", "call_only"),
),
blank=True,
null=True,
help_text="Preferred method of collecting status about this location",
)
# Denormalized foreign keys for efficient "latest yes report" style queries
# https://github.com/CAVaccineInventory/vial/issues/193
# Latest report, NOT including is_pending_review reports:
dn_latest_report = models.ForeignKey(
"Report", related_name="+", on_delete=models.SET_NULL, null=True, blank=True
)
# Latest report including is_pending_review reports:
dn_latest_report_including_pending = models.ForeignKey(
"Report", related_name="+", on_delete=models.SET_NULL, null=True, blank=True
)
# Latest with at least one YES availability tag, NOT including is_pending_review:
dn_latest_yes_report = models.ForeignKey(
"Report", related_name="+", on_delete=models.SET_NULL, null=True, blank=True
)
# Latest with at least one SKIP availability tag, NOT including is_pending_review:
dn_latest_skip_report = models.ForeignKey(
"Report", related_name="+", on_delete=models.SET_NULL, null=True, blank=True
)
# Latest report that is NOT is_pending_review and does NOT have a skip tag:
dn_latest_non_skip_report = models.ForeignKey(
"Report", related_name="+", on_delete=models.SET_NULL, null=True, blank=True
)
# Denormalized counts for non is_pending_review reports:
dn_skip_report_count = models.IntegerField(default=0)
dn_yes_report_count = models.IntegerField(default=0)
is_pending_review = models.BooleanField(
default=False, help_text="Locations that are pending review by our QA team"
)
claimed_by = models.ForeignKey(
"auth.User",
related_name="claimed_locations",
on_delete=models.PROTECT,
blank=True,
null=True,
help_text="QA reviewer who has claimed this location",
)
claimed_at = models.DateTimeField(
help_text="When the QA reviewer claimed this location",
blank=True,
null=True,
)
def __str__(self):
return self.name
@classmethod
def __get_validators__(cls):
yield cls.pydantic_convert
@classmethod
def pydantic_convert(cls, id: str) -> Location:
if str(id).isdigit():
kwargs = {"pk": id}
else:
kwargs = {"public_id": id}
try:
obj = cls.objects.get(**kwargs)
except cls.DoesNotExist:
raise ValueError("Location '{}' does not exist".format(id))
return obj
class Meta:
db_table = "location"
permissions = [
("merge_locations", "Can merge two locations"),
]
@property
def pid(self):
return "l" + pid.from_int(self.pk)
@classmethod
def valid_for_call(cls) -> QuerySet[Location]:
return (
cls.objects.filter(soft_deleted=False, do_not_call=False)
.exclude(phone_number__isnull=True)
.exclude(phone_number="")
.exclude(
preferred_contact_method="research_online",
)
)
def derive_availability_and_inventory(
self, save=False
) -> DeriveAvailabilityAndInventoryResults:
"""
Use recent reports and matched source_locations to derive inventory/availability
This populates self.vaccines_offered, .accepts_appointments and .accepts_walkins
plus the columns that track when and why they were updated based on finding the
reports or source locations with the most recent opinions on these.
Returns namedtuple of changes it would make. save=True to save those changes.
"""
SOURCE_NAMES_TO_CONSIDER = (
"vaccinefinder_org",
"vaccinespotter_org",
"getmyvax_org",
)
vaccines_offered = None
vaccines_offered_provenance_report = None
vaccines_offered_provenance_source_location = None
vaccines_offered_last_updated_at = None
accepts_appointments = None
accepts_walkins = None
appointments_walkins_provenance_report = None
appointments_walkins_provenance_source_location = None
appointments_walkins_last_updated_at = None
most_recent_report_on_vaccines_offered = None
most_recent_source_location_on_vaccines_offered = None
most_recent_report_on_availability = None
most_recent_source_location_on_availability = None
most_recent_report_on_vaccines_offered = (
self.reports.all()
.exclude(soft_deleted=True)
.prefetch_related("availability_tags")
.exclude(availability_tags__group="skip")
.exclude(vaccines_offered__isnull=True)
.order_by("-created_at")
.first()
)
most_recent_source_location_on_vaccines_offered = (
self.matched_source_locations.all()
.filter(source_name__in=SOURCE_NAMES_TO_CONSIDER)
.exclude(import_json__inventory=None)
.order_by("-last_imported_at")
.first()
)
report_to_use_for_vaccines_offered = most_recent_report_on_vaccines_offered
source_location_to_use_for_vaccines_offered = (
most_recent_source_location_on_vaccines_offered
)
if (
report_to_use_for_vaccines_offered
and source_location_to_use_for_vaccines_offered
):
# Should we go with the report or the source location? Depends which is most recent
if (
source_location_to_use_for_vaccines_offered.last_imported_at
and source_location_to_use_for_vaccines_offered.last_imported_at
> report_to_use_for_vaccines_offered.created_at
):
# Use the source_location, ignore the report
report_to_use_for_vaccines_offered = None
else:
# Use the report, ignore the source location
source_location_to_use_for_vaccines_offered = None
if source_location_to_use_for_vaccines_offered:
vaccines_offered = (
source_location_to_use_for_vaccines_offered.vaccines_offered
)
vaccines_offered_provenance_source_location = (
source_location_to_use_for_vaccines_offered
)
vaccines_offered_last_updated_at = (
source_location_to_use_for_vaccines_offered.last_imported_at
)
elif report_to_use_for_vaccines_offered:
vaccines_offered = report_to_use_for_vaccines_offered.vaccines_offered
vaccines_offered_provenance_report = report_to_use_for_vaccines_offered
vaccines_offered_last_updated_at = (
report_to_use_for_vaccines_offered.created_at
)
# Now do accepts_appointments and accepts_walkins based on most recent report
# or source_location that provides useful data on those
most_recent_report_on_availability = (
self.reports.all()
.exclude(soft_deleted=True)
.prefetch_related("availability_tags")
.exclude(availability_tags__group="skip")
.order_by("-created_at")
.first()
)
most_recent_source_location_on_availability = (
self.matched_source_locations.all()
.filter(source_name__in=SOURCE_NAMES_TO_CONSIDER)
.exclude(import_json__availability=None)
.order_by("-last_imported_at")
.first()
)
report_to_use_for_availability = most_recent_report_on_availability
source_location_to_use_for_availability = (
most_recent_source_location_on_availability
)
if report_to_use_for_availability and source_location_to_use_for_availability:
# Should we go with the report or the source location? Depends which is most recent
if source_location_to_use_for_availability.last_imported_at and (
source_location_to_use_for_availability.last_imported_at
> report_to_use_for_availability.created_at
):
# Use the source_location, ignore the report
report_to_use_for_availability = None
else:
# Use the report, ignore the source location
source_location_to_use_for_availability = None
if source_location_to_use_for_availability:
availability = source_location_to_use_for_availability.import_json[
"availability"
]
accepts_appointments = bool(availability.get("appointments"))
accepts_walkins = bool(availability.get("drop_in"))
appointments_walkins_provenance_source_location = (
source_location_to_use_for_availability
)
appointments_walkins_last_updated_at = (
source_location_to_use_for_availability.last_imported_at
)
elif report_to_use_for_availability:
# Use the availability tags
tags = {
t.slug for t in report_to_use_for_availability.availability_tags.all()
}
accepts_appointments = any(
tag in tags
for tag in (
"appointment_calendar_currently_full",
"appointment_required",
"appointments_available",
"appointments_or_walkins",
)
)
accepts_walkins = any(
tag in tags for tag in ("walk_ins_only", "appointments_or_walkins")
)
appointments_walkins_provenance_report = report_to_use_for_availability
appointments_walkins_last_updated_at = (
report_to_use_for_availability.created_at
)
derived = DeriveAvailabilityAndInventoryResults(
vaccines_offered=vaccines_offered,
vaccines_offered_provenance_report=vaccines_offered_provenance_report,
vaccines_offered_provenance_source_location=vaccines_offered_provenance_source_location,
vaccines_offered_last_updated_at=vaccines_offered_last_updated_at,
accepts_appointments=accepts_appointments,
accepts_walkins=accepts_walkins,
appointments_walkins_provenance_report=appointments_walkins_provenance_report,
appointments_walkins_provenance_source_location=appointments_walkins_provenance_source_location,
appointments_walkins_last_updated_at=appointments_walkins_last_updated_at,
most_recent_report_on_vaccines_offered=most_recent_report_on_vaccines_offered,
most_recent_source_location_on_vaccines_offered=most_recent_source_location_on_vaccines_offered,
most_recent_report_on_availability=most_recent_report_on_availability,
most_recent_source_location_on_availability=most_recent_source_location_on_availability,
)
if save:
self.vaccines_offered = derived.vaccines_offered
self.vaccines_offered_provenance_report = (
derived.vaccines_offered_provenance_report
)
self.vaccines_offered_provenance_source_location = (
derived.vaccines_offered_provenance_source_location
)
self.vaccines_offered_last_updated_at = (
derived.vaccines_offered_last_updated_at
)
self.accepts_appointments = derived.accepts_appointments
self.accepts_walkins = derived.accepts_walkins
self.appointments_walkins_provenance_report = (
derived.appointments_walkins_provenance_report
)
self.appointments_walkins_provenance_source_location = (
derived.appointments_walkins_provenance_source_location
)
self.appointments_walkins_last_updated_at = (
derived.appointments_walkins_last_updated_at
)
self.save(
update_fields=[
"vaccines_offered",
"vaccines_offered_provenance_report",
"vaccines_offered_provenance_source_location",
"vaccines_offered_last_updated_at",
"accepts_appointments",
"accepts_walkins",
"appointments_walkins_provenance_report",
"appointments_walkins_provenance_source_location",
"appointments_walkins_last_updated_at",
]
)
return derived
@beeline.traced("update_denormalizations")
def update_denormalizations(self):
reports = (
self.reports.all()
.exclude(soft_deleted=True)
.prefetch_related("availability_tags")
.order_by("-created_at")
)
try:
dn_latest_report = [r for r in reports if not r.is_pending_review][0]
except IndexError:
dn_latest_report = None
try:
dn_latest_report_including_pending = reports[0]
except IndexError:
dn_latest_report_including_pending = None
dn_latest_yes_reports = [
r
for r in reports
if not r.is_pending_review
and any(t for t in r.availability_tags.all() if t.group == "yes")
]
dn_yes_report_count = len(dn_latest_yes_reports)
if dn_latest_yes_reports:
dn_latest_yes_report = dn_latest_yes_reports[0]
else:
dn_latest_yes_report = None
dn_latest_skip_reports = [
r
for r in reports
if not r.is_pending_review
and any(t for t in r.availability_tags.all() if t.group == "skip")
]
dn_skip_report_count = len(dn_latest_skip_reports)
if dn_latest_skip_reports:
dn_latest_skip_report = dn_latest_skip_reports[0]
else:
dn_latest_skip_report = None
dn_latest_non_skip_reports = [
r
for r in reports
if not r.is_pending_review
and not any(t for t in r.availability_tags.all() if t.group == "skip")
]
if dn_latest_non_skip_reports:
dn_latest_non_skip_report = dn_latest_non_skip_reports[0]
else:
dn_latest_non_skip_report = None
# Has anything changed?
def pk_or_none(record):
if record is None:
return None
return record.pk
if (
self.dn_latest_report_id != pk_or_none(dn_latest_report)
or self.dn_latest_report_including_pending_id
!= pk_or_none(dn_latest_report_including_pending)
or self.dn_latest_yes_report_id != pk_or_none(dn_latest_yes_report)
or self.dn_latest_skip_report_id != pk_or_none(dn_latest_skip_report)
or self.dn_latest_non_skip_report_id
!= pk_or_none(dn_latest_non_skip_report)
or self.dn_skip_report_count != dn_skip_report_count
or self.dn_yes_report_count != dn_yes_report_count
):
beeline.add_context({"updates": True})
self.dn_latest_report = dn_latest_report
self.dn_latest_report_including_pending = dn_latest_report_including_pending
self.dn_latest_yes_report = dn_latest_yes_report
self.dn_latest_skip_report = dn_latest_skip_report
self.dn_latest_non_skip_report = dn_latest_non_skip_report
self.dn_skip_report_count = dn_skip_report_count
self.dn_yes_report_count = dn_yes_report_count
self.save(
update_fields=(
"dn_latest_report",
"dn_latest_report_including_pending",
"dn_latest_yes_report",
"dn_latest_skip_report",
"dn_latest_non_skip_report",
"dn_skip_report_count",
"dn_yes_report_count",
)
)
else:
beeline.add_context({"updates": False})
def save(self, *args, **kwargs):
# Point is derived from latitude/longitude
if self.longitude and self.latitude:
self.point = Point(float(self.longitude), float(self.latitude), srid=4326)
else:
self.point = None
set_public_id_later = False
if (not self.public_id) and self.airtable_id:
self.public_id = self.airtable_id
elif not self.public_id:
set_public_id_later = True
self.public_id = "tmp:{}".format(uuid.uuid4())
super().save(*args, **kwargs)
if set_public_id_later:
self.public_id = self.pid
Location.objects.filter(pk=self.pk).update(public_id=self.pid)
# If we don't belong in the callable locations anymore, remove
# from the call request queue
if Location.valid_for_call().filter(pk=self.pk).count() == 0:
CallRequest.objects.filter(location_id=self.id, completed=False).delete()
class LocationReviewTag(models.Model):
tag = models.CharField(unique=True, max_length=64)
description = models.TextField(blank=True)
def __str__(self):
return self.tag
class LocationReviewNote(models.Model):
location = models.ForeignKey(
Location, related_name="location_review_notes", on_delete=models.PROTECT
)
author = models.ForeignKey(
"auth.User", related_name="location_review_notes", on_delete=models.PROTECT
)
created_at = models.DateTimeField(default=timezone.now)
note = models.TextField(blank=True)
tags = models.ManyToManyField(
LocationReviewTag,
related_name="location_review_notes",
blank=True,
)
def __str__(self):
return f"{self.author} review note on {self.location}"
class Reporter(models.Model):
"""
A reporter is a user.
There are two types of reporters:
- Auth0 users: these include reports made through our reporting apps, and SQL users who are authenticated through Auth0
- Airtable users: these are users who are authenticated through Airtable rather than Auth0.
"""
external_id = models.SlugField(unique=True, max_length=400)
name = CharTextField(null=True, blank=True)
display_name = CharTextField(
null=True,
blank=True,
help_text="If set this is displayed within VIAL in place of the Auth0 name",
)
email = CharTextField(null=True, blank=True)
auth0_role_names = CharTextField(null=True, blank=True)
user = models.ForeignKey(
"auth.User",
blank=True,
null=True,
related_name="reporters",
help_text="Corresponding user record for this reporter",
on_delete=models.PROTECT,
)
def __str__(self):
return self.display_name or self.name or self.external_id
class Meta:
db_table = "reporter"
def get_user(self):
# Populates self.user if it does not yet have a value, then returns it
if self.user:
return self.user
# A user may exist based on a `UserSocialAuth` record
assert self.external_id.startswith(
"auth0:"
), "Only auth0 reporters can be associated with Django users, not {}".format(
self.external_id
)
identifier = self.external_id[len("auth0:") :]
user_social_auth = UserSocialAuth.objects.filter(uid=identifier).first()
if not user_social_auth:
# Create user, associate it and return
username = "r{}".format(self.pk)
# Some users have their email address as their name
email = self.email
if not email and self.name and "@" in self.name:
email = self.name
if email and "@" in email:
username += "-" + email.split("@")[0]
user = User.objects.create(
username=username,
email=email or "",
first_name=self.name or "",
)
UserSocialAuth.objects.create(uid=identifier, provider="auth0", user=user)
self.user = user
else:
self.user = user_social_auth.user
self.save()
return self.user
@classmethod
def for_user(cls, user):
return user.reporters.first()
class AvailabilityTag(models.Model):
"""
A tag indicating the nature of availability at a vaccination site.
This might be:
- a restriction on general availability (no inventory available)
- a restriction on who may be vaccinated (65+ only)
- an expansion of availability (vaccinating essential workers)
This free-form tagging interface is meant to make it easy to add new entries to our ontology as we (frequently)
encounter new rules.
This is modelled as a separate table so that metadata can be easily added to the tags.
For example, the 'disabled' boolean is used to determine which tags should no longer be used, even as they exist in
historical data.
"""
name = CharTextField(unique=True)
slug = models.SlugField(null=True)
group = models.CharField(
max_length=10,
choices=(("yes", "yes"), ("no", "no"), ("skip", "skip"), ("other", "other")),
null=True,
)
notes = CharTextField(null=True, blank=True)
disabled = models.BooleanField(default=False)
previous_names = models.JSONField(
default=list,
help_text="Any previous names used for this tag, used for keeping import scripts working",
blank=True,
)
def __str__(self):
return self.name
class Meta:
db_table = "availability_tag"
ordering = ["-group", "name"]
class AppointmentTag(models.Model):
"""
A tag indicating whether an appointment is needed and, if so, how it should be scheduled (e.g., by phone, online, other).
This is modelled as a separate table so that metadata can be easily added to the tags.
For example, has_details indicates whether the appointment_details on the report should contain more information,
such as a URL.
"""
slug = models.SlugField(unique=True)
name = models.CharField(max_length=30, unique=True)
has_details = models.BooleanField(
default=False,
help_text="should the report refer to the appointment details. Unfortunately we can't enforce constraints across joins.",
)
def __str__(self):
return self.name
class Meta:
db_table = "appointment_tag"
class Report(models.Model):
"""
A report on the availability of the vaccine. Could be from a phone call, or a site visit, or reading a website.
"""
class ReportSource(models.TextChoices):
CALLER_APP = "ca", "Caller app"
DATA_CORRECTIONS = "dc", "Data corrections"
WEB_BANK = "wb", "Web banking"
location = models.ForeignKey(
Location,
related_name="reports",
on_delete=models.PROTECT,
help_text="a report must have a location",
)
is_pending_review = models.BooleanField(
default=False, help_text="Reports that are pending review by our QA team"
)
originally_pending_review = models.BooleanField(
null=True,
help_text="Reports that were originally flagged as pending review",
)
pending_review_because = CharTextField(
null=True, blank=True, help_text="Reason this was originally flagged for review"
)
claimed_by = models.ForeignKey(
"auth.User",
related_name="claimed_reports",
on_delete=models.PROTECT,
blank=True,
null=True,
help_text="QA reviewer who has claimed this report",
)
claimed_at = models.DateTimeField(
help_text="When the QA reviewer claimed this report",
blank=True,
null=True,
)
soft_deleted = models.BooleanField(
default=False,
help_text="we never delete rows from this table; all deletes are soft",
)
soft_deleted_because = CharTextField(null=True, blank=True)
report_source = models.CharField(
max_length=2,
choices=ReportSource.choices,
default=ReportSource.CALLER_APP,
)
appointment_tag = models.ForeignKey(
AppointmentTag,
related_name="reports",
on_delete=models.PROTECT,
help_text="a single appointment tag, indicating how appointments are made",
)
appointment_details = CharTextField(
null=True,
blank=True,
help_text="appointment details (e.g., a URL). Should not be used if the appointment_tag's has_details is false.",
)
public_notes = models.TextField(null=True, blank=True)
internal_notes = models.TextField(
null=True, blank=True, verbose_name="Private notes"
)
restriction_notes = models.TextField(null=True, blank=True)
vaccines_offered = models.JSONField(
null=True,
blank=True,
help_text="JSON array of strings representing vaccines on offer here",
)
website = CharTextField(
null=True, blank=True, help_text="Update for website information"
)
full_address = models.TextField(
null=True,
blank=True,
help_text="Update for the entire address, including city and zip code",
)
hours = models.TextField(
blank=True,
null=True,
help_text="Update for hours information",
)
planned_closure = models.DateField(
blank=True,
null=True,
help_text='Date this site a site plans to stop operating, "planned_closure" in our API',
verbose_name="Last known event date",
)
reported_by = models.ForeignKey(
Reporter, related_name="reports", on_delete=models.PROTECT
)
created_at = models.DateTimeField(
default=timezone.now,
help_text="the time when the report was submitted. We will interpret this as a validity time",
)
call_request = models.ForeignKey(
"CallRequest",
null=True,
blank=True,
related_name="reports",
on_delete=models.SET_NULL,
help_text="the call request that this report was based on, if any.",
)
availability_tags = models.ManyToManyField(
AvailabilityTag,
related_name="reports",
db_table="call_report_availability_tag",
)
airtable_id = models.CharField(
max_length=20,
null=True,
unique=True,
help_text="Airtable record ID, if this has one",
)
airtable_json = models.JSONField(null=True, blank=True)
public_id = models.SlugField(
unique=True, help_text="ID that we expose outside of the application"
)
def created_at_utc(self):
tz = pytz.UTC
created_at_utc = timezone.localtime(self.created_at, tz)
return dateformat.format(created_at_utc, "jS M Y fA e")
def availability(self):
# Used by the admin list view
return ", ".join(t.name for t in self.availability_tags.all())
def based_on_call_request(self):
return self.call_request is not None
def full_appointment_details(self, location: Optional[Location] = None):
# We often call this from contexts where the report was
# prefetched off of a location, and fetching self.location
# would be another DB query within a tight loop; support
# passing it in as an extra arg.
if location is not None:
assert location.id == self.location_id
else:
location = self.location
# Do not access self.location below; use location instead.
if self.appointment_details:
return self.appointment_details
elif location.county and self.appointment_tag.slug == "county_website":
return location.county.vaccine_reservations_url
elif self.appointment_tag.slug == "myturn_ca_gov":
return "https://myturn.ca.gov/"
elif location.website:
return location.website
elif location.provider and location.provider.appointments_url:
return location.provider.appointments_url
return None
class Meta:
db_table = "report"
def __str__(self):
return "Call to {} by {} at {}".format(
self.location, self.reported_by, self.created_at
)
@property
def pid(self):
return "r" + pid.from_int(self.pk)
def save(self, *args, **kwargs):
set_public_id_later = False
if (not self.public_id) and self.airtable_id:
self.public_id = self.airtable_id
elif not self.public_id:
set_public_id_later = True
self.public_id = "tmp:{}".format(uuid.uuid4())
super().save(*args, **kwargs)
if set_public_id_later:
self.public_id = self.pid
Report.objects.filter(pk=self.pk).update(public_id=self.pid)
location = self.location
location.update_denormalizations()
# location.derive_availability_and_inventory(save=True)
# will not work here because the availability tags have not yet been saved
def delete(self, *args, **kwargs):
location = self.location
super().delete(*args, **kwargs)
location.update_denormalizations()
location.derive_availability_and_inventory(save=True)
class ReportReviewTag(models.Model):
tag = models.CharField(unique=True, max_length=64)
description = models.TextField(blank=True)
def __str__(self):
return self.tag
class ReportReviewNote(models.Model):
report = models.ForeignKey(
Report, related_name="review_notes", on_delete=models.PROTECT
)
author = models.ForeignKey(
"auth.User", related_name="review_notes", on_delete=models.PROTECT
)
created_at = models.DateTimeField(default=timezone.now)
note = models.TextField(blank=True)
tags = models.ManyToManyField(
ReportReviewTag,
related_name="review_notes",
blank=True,
)
def __str__(self):
return "{} review note on {}".format(self.author, self.report)
class EvaReport(models.Model):
"""
A report obtained by our robotic assistant Eva. Eva only gathers a subset of the data that we would normally gather.
"""
location = models.ForeignKey(
Location, related_name="eva_reports", on_delete=models.PROTECT
)
name_from_import = CharTextField(null=True, blank=True)
phone_number_from_import = CharTextField(null=True, blank=True)
has_vaccines = models.BooleanField()
hung_up = models.BooleanField()
valid_at = models.DateTimeField(
help_text="the time when Eva's report was made (or our best estimate"
)
uploaded_at = models.DateTimeField(
help_text="this is the time when we uploaded Eva's report. It might not even be on the same day that the report was filed"
)
airtable_id = models.CharField(
max_length=20,
null=True,
unique=True,
help_text="Airtable record ID, if this has one",
)
def __str__(self):
return "Eva call to {} at {}".format(self.location, self.valid_at)
class Meta:
db_table = "eva_report"
class CallRequestReason(models.Model):
short_reason = CharTextField(unique=True)
long_reason = models.TextField(null=True, blank=True)
def __str__(self):
return self.short_reason
class Meta:
db_table = "call_request_reason"
class CallRequest(models.Model):
"""
A request to make a phone call (i.e., an entry in the call queue).
This reifies the notion of "requesting a call" so that all of the call attempts can be tracked with full history.
For example, if a bug in an app has us call a location repeatedly, we have the full record of why those calls were made.
"""
class PriorityGroup(models.IntegerChoices):
CRITICAL_1 = 1, "1-critical"
IMPORTANT_2 = 2, "2-important"
NORMAL_3 = 3, "3-normal"
LOW_4 = 4, "4-low"
NOT_PRIORITIZED_99 = 99, "99-not_prioritized"
class TipType(models.TextChoices):
EVA = "eva_report", "Eva report"
SCOOBY = "scooby_report", "Scooby report"
DATA_CORRECTIONS = "data_corrections_report", "Data corrections report"
location = models.ForeignKey(
Location, related_name="call_requests", on_delete=models.PROTECT
)
created_at = models.DateTimeField(
help_text="the time the call request entered the queue.",
null=True,
blank=True,
default=timezone.now,
)
vesting_at = models.DateTimeField(
help_text="the time at which this call request is considered 'active'. For example, a call request made by a skip will have a future vesting time."
)
claimed_by = models.ForeignKey(
Reporter,
blank=True,
null=True,
related_name="call_requests_claimed",
on_delete=models.PROTECT,
help_text="if non-null, the reporter who has currently 'claimed' this request",
)
claimed_until = models.DateTimeField(
blank=True,
null=True,
help_text="if non-null, the time until which the report is considered claimed",
)
call_request_reason = models.ForeignKey(
CallRequestReason,
related_name="call_requests",
on_delete=models.PROTECT,
help_text="a tag indicating why the call was added to the queue",
)
completed = models.BooleanField(
default=False, help_text="Has this call been completed"
)
completed_at = models.DateTimeField(
blank=True, null=True, help_text="When this call was marked as completed"
)
priority_group = models.IntegerField(
choices=PriorityGroup.choices,
default=PriorityGroup.NOT_PRIORITIZED_99,
)
priority = models.IntegerField(
default=0,
db_index=True,
help_text="Priority within this priority group - higher number means higher priority",
)
tip_type = CharTextField(
choices=TipType.choices,
blank=True,
null=True,
help_text=" the type of tip that prompted this call request, if any",
)
tip_report = models.ForeignKey(
Report,
blank=True,
null=True,
related_name="prompted_call_requests",
on_delete=models.PROTECT,
help_text="the id of the report, if any that prompted this call request",
)
def __str__(self):
return "Call request to {} vesting at {}".format(self.location, self.vesting_at)
class Meta:
db_table = "call_request"
# Group 1 comes before group 2 comes before group 3
# Within those groups, lower priority scores come before higher
# Finally we tie-break on ID optimizing for mostl recently created first
ordering = ("priority_group", "-priority", "-id")
constraints = [
models.UniqueConstraint(
name="unique_locations_in_queue",
fields=["location"],
condition=Q(completed=False),
)
]
@classmethod
def available_requests(
cls, qs: Optional[QuerySet[CallRequest]] = None
) -> QuerySet[CallRequest]:
if qs is None:
qs = cls.objects
now = timezone.now()
return qs.filter(
# Unclaimed
Q(claimed_until__isnull=True)
| Q(claimed_until__lte=now)
).filter(completed=False, vesting_at__lte=now)
@classmethod
@beeline.traced("insert")
def insert(
cls,
locations: QuerySet[Location],
reason: str,
limit: Optional[int] = 0,
**kwargs: Any,
) -> List[CallRequest]:
now = timezone.now()
reason_obj = CallRequestReason.objects.get_or_create(short_reason=reason)[0]
with transaction.atomic():
# Lock the locations we want to insert, so they don't
# change if they're valid to be in the queue, while we
# insert them.
locations = (locations & Location.valid_for_call()).select_for_update(
of=["self"]
)
# Now that we have a lock on them, we know any other
# inserts of them (though not others) will block behind
# that. Estimate out how many duplicates we possibly
# have. We lock them so our estimate is more accurate.
existing_call_requests = CallRequest.objects.filter(
location__in=locations, completed=False
).select_for_update()
# Filter duplicates them out of the insert. Note that
# this is mostly advisory, so we get the right-ish objects
# from the bulk_create -- the `ignore_conflicts` on it
# will enfoce the uniqueness.
locations = locations.exclude(
id__in=existing_call_requests.values("location_id")
)
if limit:
locations = locations[0:limit]
args = {
"vesting_at": now,
"call_request_reason": reason_obj,
}
args.update(**kwargs)
# Do the insert, ignoring duplicates. bulk_create returns
# all rows, even ones whose insert failed because of
# conflicts; this _may_, on races, contain too many rows
# in the return value, so the returned list of "new"
# values is advisory.
return cls.objects.bulk_create(
[cls(location=location, **args) for location in locations],
ignore_conflicts=True,
)
@classmethod
@beeline.traced("get_call_request")
def get_call_request(
cls,
claim_for: Optional[Reporter] = None,
state: Optional[str] = None,
) -> Optional[CallRequest]:
# First, drop some items there are some items in the queue, in case
# it has run dry. We backfill according to the state we're
# looking for, which may affect which locations are in the
# queue for people who are _not_ asking for a specific state.
cls.backfill_queue(state=state)
now = timezone.now()
available_requests = cls.available_requests()
if state is not None:
available_requests = available_requests.filter(
location__state__abbreviation=state
)
# We need to lock the record we select so we can update
# it marking that we have claimed it
with transaction.atomic():
call_requests = available_requests.select_for_update()[:1]
try:
call_request: Optional[CallRequest] = call_requests[0]
except IndexError:
call_request = None
if call_request is not None and claim_for:
call_request.claimed_by = claim_for
call_request.claimed_until = now + timedelta(
minutes=settings.CLAIM_LOCK_MINUTES
)
call_request.save()
return call_request
@classmethod
@beeline.traced("mark_completed_by")
def mark_completed_by(
cls, report: Report, enqueue_again_at: Optional[datetime] = None
) -> None:
# Make sure the call request doesn't go away (e.g. from a bulk
# load) while we update it
with transaction.atomic():
# There can only be _one_ incomplete report for a
# location; find it and lock it.
existing_call_request = (
report.location.call_requests.filter(completed=False)
.select_for_update()
.first()
)
# The call request may no longer exist -- either it never
# did, because this was web-banked, orsomeone else also
# fulfilled it, or a queue update happened between when we
# took it and completed it, removing it.
if existing_call_request is not None:
# If this was based on a call request, mark it as
# completed and associate it with the report
existing_call_request.completed = True
existing_call_request.completed_at = timezone.now()
existing_call_request.save()
report.call_request = existing_call_request
report.save()
# If we're not re-scheduling, we're done!
if enqueue_again_at is None:
return
priority_in_group = 0
priority_group = 99
if existing_call_request:
# Priority group should match that of the original call
# request, BUT we use the separate priority integer to
# drop them to the very end of the queue within that
# priority group. "end" here means one less than the
# _smallest_ priority within the group, since we take from
# high to low priority within a group.
priority_group = existing_call_request.priority_group
priority_in_group = (
cls.objects.filter(
priority_group=existing_call_request.priority_group
).aggregate(min=Min("priority"))["min"]
- 1
)
cls.insert(
locations=Location.objects.filter(id=report.location.id),
reason="Previously skipped",
vesting_at=enqueue_again_at,
tip_type=cls.TipType.SCOOBY,
tip_report=report,
priority_group=priority_group,
priority=priority_in_group,
)
@classmethod
@beeline.traced("backfill_queue")
def backfill_queue(
cls, minimum: Optional[int] = None, state: Optional[str] = None
) -> None:
"""This is a last-resort refill of the queue.
It should only happen when we have exhausted all things
explicitly placed in the queue.
"""
if minimum is None:
minimum = settings.MIN_CALL_REQUEST_QUEUE_ITEMS
num_to_create = max(0, minimum - cls.available_requests().count())
beeline.add_context({"count": num_to_create})
if num_to_create == 0:
return
# num_to_create may be stale by now, but worst case if we race
# we'll insert more locations than necessary.
try:
with transaction.atomic():
# Only consider existing locations that are valid for
# calling that are not currently queued in _any_ form
# (even if that's claimed or not-yet-vested)
location_options = Location.valid_for_call().exclude(
id__in=cls.objects.filter(completed=False).values("location_id")
)
if state is not None:
location_options = location_options.filter(
state__abbreviation=state
)
# Add any locations that have never been called
created_call_requests = cls.insert(
location_options.filter(
dn_latest_report_including_pending__isnull=True
),
reason="Automatic backfill",
limit=num_to_create,
)
num_to_create -= len(created_call_requests)
if num_to_create <= 0:
return
# Then add locations by longest-ago
cls.insert(
location_options.order_by(
"dn_latest_report_including_pending__created_at"
),
reason="Automatic backfill",
limit=num_to_create,
)
except IntegrityError:
# We tried to add a location that was already in the
# queue, probably via a race condition! Just log, and
# carry on.
sentry_sdk.capture_exception()
class PublishedReport(models.Model):
"""
NOT CURRENTLY USED
See https://github.com/CAVaccineInventory/vial/issues/179#issuecomment-815353624
A report that should be published to our website and API feed.
This report is generally derived from one or more other report types, and might be created automatically or manually.
If a report is edited for publication, the published_report should be edited to maintain the integrity of our records.
This report represents the (possibly implicit) editorial aspects of our data pipeline.
The relationship between published reports and the various report types may be many-to-many:
a single report may trigger many published reports, and each published report may draw on several data sources.
"""
location = models.ForeignKey(
Location, related_name="published_reports", on_delete=models.PROTECT
)
appointment_tag = models.ForeignKey(
AppointmentTag,
related_name="published_reports",
on_delete=models.PROTECT,
help_text="a single appointment tag, indicating how appointments are made",
)
appointment_details = models.TextField(
blank=True,
null=True,
help_text="appointment details (e.g., a URL). Should not be used if the appointment_tag's has_details is false.",
)
public_notes = models.TextField(blank=True, null=True)
reported_by = models.ForeignKey(
Reporter, related_name="published_reports", on_delete=models.PROTECT
)
valid_at = models.DateTimeField(
help_text='the time that determines this report\'s time priority. Generally, only the latest report is displayed. This determines the "freshness" of the published report.'
)
created_at = models.DateTimeField(
help_text="the time at which this report is created (which may be different from the time at which it is valid)"
)
availability_tags = models.ManyToManyField(
AvailabilityTag,
related_name="published_reports",
db_table="published_report_availability_tag",
)
reports = models.ManyToManyField(
Report,
related_name="published_reports",
db_table="published_report_reports",
)
eva_reports = models.ManyToManyField(
EvaReport,
related_name="published_reports",
db_table="published_report_eva_report",
)
def __str__(self):
return "Published report for {} valid at {}".format(
self.location, self.valid_at
)
class Meta:
db_table = "published_report"
class SourceLocation(gis_models.Model):
"Source locations are unmodified records imported from other sources"
import_run = models.ForeignKey(
ImportRun,
blank=True,
null=True,
related_name="imported_source_locations",
on_delete=models.SET_NULL,
)
source_uid = CharTextField(
unique=True,
help_text="The ID within that other source, UUID etc or whatever they have - globally unique because it includes a prefix which is a copy of the source_name",
)
source_name = CharTextField(help_text="e.g. vaccinespotter")
content_hash = CharTextField(
blank=True,
null=True,
help_text="Hash of the content JSON, to allow our importer clients to avoid sending data we already have",
)
name = CharTextField(null=True, blank=True)
latitude = models.DecimalField(
max_digits=9, decimal_places=5, null=True, blank=True
)
longitude = models.DecimalField(
max_digits=9, decimal_places=5, null=True, blank=True
)
point = gis_models.GeometryField(
geography=True, blank=True, null=True, spatial_index=True
)
import_json = models.JSONField(
null=True,
blank=True,
help_text="Big bag of JSON with original data",
)
matched_location = models.ForeignKey(
Location,
blank=True,
null=True,
related_name="matched_source_locations",
on_delete=models.SET_NULL,
)
created_at = models.DateTimeField(default=timezone.now)
last_imported_at = models.DateTimeField(
blank=True, null=True, help_text="When this source location was last imported"
)
def save(self, *args, **kwargs):
if self.longitude and self.latitude:
self.point = Point(float(self.longitude), float(self.latitude), srid=4326)
else:
self.point = None
super().save(*args, **kwargs)
def __str__(self):
bits = [self.source_uid]
if self.name:
bits.extend((" - ", self.name))
return "".join(bits)
@classmethod
def __get_validators__(cls):
yield cls.pydantic_convert
@classmethod
def pydantic_convert(cls, id: str) -> SourceLocation:
if str(id).isdigit():
kwargs = {"pk": id}
else:
kwargs = {"source_uid": id}
try:
obj = cls.objects.get(**kwargs)
except cls.DoesNotExist:
raise ValueError("SourceLocation '{}' does not exist".format(id))
return obj
@property
def vaccines_offered(self):
try:
inventory = self.import_json["inventory"]
except KeyError:
return None
inventory_mapping = {
"moderna": "Moderna",
"pfizer_biontech": "Pfizer",
"johnson_johnson_janssen": "Johnson & Johnson",
"oxford_astrazeneca": "Astrazeneca",
}
in_stock = [
stock["vaccine"]
for stock in inventory
if stock.get("supply_level") != "out_of_stock"
]
return list(sorted([inventory_mapping[v] for v in in_stock]))
class Meta:
db_table = "source_location"
indexes = [models.Index(fields=["matched_location"])]
class SourceLocationMatchHistory(models.Model):
created_at = models.DateTimeField(default=timezone.now)
api_key = models.ForeignKey(
"api.ApiKey",
null=True,
blank=True,
related_name="source_location_match_history",
on_delete=models.SET_NULL,
)
reporter = models.ForeignKey(
Reporter,
null=True,
blank=True,
related_name="source_location_match_history",
on_delete=models.PROTECT,
)
source_location = models.ForeignKey(
SourceLocation,
related_name="source_location_match_history",
on_delete=models.PROTECT,
)
old_match_location = models.ForeignKey(
Location,
blank=True,
null=True,
related_name="+",
on_delete=models.PROTECT,
)
new_match_location = models.ForeignKey(
Location,
blank=True,
null=True,
related_name="source_location_match_history",
on_delete=models.PROTECT,
)
def __str__(self):
return "{} set source_location {} to match {} on {}".format(
self.reporter or self.api_key,
self.source_location,
self.new_match_location,
self.created_at,
)
class Meta:
db_table = "source_location_match_history"
verbose_name_plural = "Source location match history"
class ConcordanceIdentifier(models.Model):
created_at = models.DateTimeField(default=timezone.now)
authority = models.CharField(max_length=32)
identifier = models.CharField(max_length=128)
locations = models.ManyToManyField(
Location,
related_name="concordances",
blank=True,
db_table="concordance_location",
)
source_locations = models.ManyToManyField(
SourceLocation,
related_name="concordances",
blank=True,
db_table="concordance_source_location",
)
class Meta:
unique_together = ("authority", "identifier")
db_table = "concordance_identifier"
def __str__(self):
return "{}:{}".format(self.authority, self.identifier)
@classmethod
def for_idref(cls, idref):
authority, identifier = idref.split(":", 1)
return cls.objects.get_or_create(authority=authority, identifier=identifier)[0]
@classmethod
def filter_for_idrefs(cls, idrefs):
# Returns a Q() object for use with .filter(), for example:
# e.g. Q(authority = 'cvs', identifier='11344') | Q(authority = 'cvs', identifier='11345')
pairs = [idref.split(":", 1) for idref in idrefs]
return reduce(or_, (Q(authority=p[0], identifier=p[1]) for p in pairs))
ConcordanceIdentifier.locations.through.__str__ = lambda self: "{} on {}".format( # type: ignore[assignment]
self.concordanceidentifier, self.location.public_id # type: ignore[attr-defined]
)
ConcordanceIdentifier.source_locations.through.__str__ = lambda self: "{} on source location {}".format( # type: ignore[assignment]
self.concordanceidentifier, self.sourcelocation_id # type: ignore[attr-defined]
)
class TaskType(models.Model):
"Types of task that we present to our volunteers"
name = CharTextField(unique=True)
def __str__(self):
return self.name
class Meta:
db_table = "task_type"
@classmethod
def __get_validators__(cls):
yield cls.pydantic_convert
@classmethod
def pydantic_convert(cls, name: str) -> TaskType:
try:
return cls.objects.get(name=name)
except cls.DoesNotExist:
raise ValueError("TaskType '{}' does not exist".format(name))
class Task(models.Model):
"A task for our volunteers"
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(
"auth.User",
related_name="created_tasks",
on_delete=models.PROTECT,
blank=True,
null=True,
)
location = models.ForeignKey(
Location, related_name="tasks", on_delete=models.PROTECT
)
other_location = models.ForeignKey(
Location, related_name="+", blank=True, null=True, on_delete=models.SET_NULL
)
task_type = models.ForeignKey(
TaskType, related_name="tasks", on_delete=models.PROTECT
)
details = models.JSONField(
null=True,
blank=True,
help_text="Task details",
)
resolved_at = models.DateTimeField(blank=True, null=True)
resolved_by = models.ForeignKey(
"auth.User",
blank=True,
null=True,
related_name="resolved_tasks",
on_delete=models.PROTECT,
)
resolution = models.JSONField(
null=True,
blank=True,
help_text="Details from when this task was resolved",
)
def __str__(self):
return "{} task against {}{}".format(
self.task_type, self.location, " - resolved" if self.resolved_at else ""
)
class Meta:
db_table = "task"
@classmethod
def __get_validators__(cls):
yield cls.pydantic_convert
@classmethod
def pydantic_convert(cls, id: str) -> Task:
try:
return cls.objects.get(pk=id)
except cls.DoesNotExist:
raise ValueError("Task {} does not exist".format(id))
class CompletedLocationMerge(models.Model):
winner_location = models.ForeignKey(
Location, related_name="+", on_delete=models.PROTECT
)
loser_location = models.ForeignKey(
Location, related_name="+", on_delete=models.PROTECT
)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(
"auth.User", related_name="completed_location_merges", on_delete=models.PROTECT
)
task = models.ForeignKey(
Task,
null=True,
blank=True,
related_name="completed_location_merges",
on_delete=models.PROTECT,
)
details = models.JSONField(
null=True,
blank=True,
help_text="Detailed information about the merge",
)
def __str__(self):
return "winner={}, loser={}, merged by {} at {}".format(
self.winner_location, self.loser_location, self.created_by, self.created_at
)
class Meta:
db_table = "completed_location_merge"
# Signals
@receiver(m2m_changed, sender=Report.availability_tags.through)
def denormalize_location(sender, instance, action, **kwargs):
if action in ("post_add", "post_remove", "post_clear"):
instance.location.update_denormalizations()
@receiver(m2m_changed, sender=ReportReviewNote.tags.through)
def approval_review_report_denormalize_location(sender, instance, action, **kwargs):
if action == "post_add" and len(instance.tags.filter(tag="Approved")):
instance.report.is_pending_review = False
instance.report.save()
# We don't _un-approve_ if the tag is removed because the flag can
# _also_ be just generally unset manually. Imagine:
# - report is flagged on creation
# - is_pending_review unset by unchecking the box
# - approval is made
# - approval is deleted
@receiver(m2m_changed, sender=LocationReviewNote.tags.through)
def approval_review_location_denormalize_location(sender, instance, action, **kwargs):
if action == "post_add" and len(instance.tags.filter(tag="Approved")):
instance.location.is_pending_review = False
instance.location.save()
# We don't _un-approve_ if the tag is removed because the flag can
# _also_ be just generally unset manually. Imagine:
# - location is flagged on creation
# - is_pending_review unset by unchecking the box
# - approval is made
# - approval is deleted
| 72,247 | 21,134 |
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
APP_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
SESSION_FILE_PATH = os.path.join(APP_PATH, 'settings', 'opentfd_session')
| 205 | 82 |
import pandas as pd
from dataset import create_dataloader
from engine import train_fn
from model import SunBERT
from transformers import AdamW
from torch import nn
def run():
MAX_LEN = 66
BATCH_SIZE = 32
EPOCHS = 5
device = torch.device("cuda:0" if torch.cuda.is_available() else "Couldn't find GPU device")
# BERT Specific Pre-processing
bert_cased = 'bert-base-cased'
tokenizer = BertTokenizer.from_pretrained(bert_cased)
cls_model = SunBERT(3) # replace hard-coded number of categories
cls_model = cls_model.to(device)
# Put the above lines in a config file
dfx = pd.read_csv("dataset.csv")
dfx.category = dfx.category.apply(lambda x: 0 if x=="Organic" else 1 if x=="Editorial" else 2) # replace hard-coded categories
random_seed = 42
df_train, df_test = train_test_split(dfx, test_size=0.15, random_state = random_seed)
df_val, df_test = train_test_split(df_test, test_size=0.5, random_state = random_seed)
dataloader_train = create_dataloader(df_train, tokenizer, MAX_LEN, BATCH_SIZE)
dataloader_test = create_dataloader(df_test, tokenizer, MAX_LEN, BATCH_SIZE)
dataloader_val = create_dataloader(df_val, tokenizer, MAX_LEN, BATCH_SIZE)
optimizer = AdamW(cls_model.parameters(), lr=2e-5, correct_bias=False)
total_steps = len(dataloader_train) * EPOCHS
ln_scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=0, num_training_steps=total_steps)
loss_func = nn.CrossEntropyLoss().to(device)
train_fn(cls_model, dataloader_train, loss_func, optimizer, device, ln_scheduler, len(df_train))
if __name__ == '__main__':
run()
| 1,650 | 617 |
# Importing all essential libraries
import os
import random
import sys
import pandas as pd
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
from keras.optimizers import RMSprop
from keras.utils.data_utils import get_file
from keras.callbacks import LambdaCallback
from keras.layers import Flatten
#-------------------------------------------------------------------#
# Change working directory
# os.chdir(desired_working_directory)
# print(os.getcwd())
#-------------------------------------------------------------------#
# Reading dataset
dataset = pd.read_csv('dataset_for_training.csv', sep=',')
data = open('dataset.csv', 'r').read()
chars = list(set(data))
data_size, vocab_size = len(data), len(chars)
print('data has %d, %d unique' % (data_size, vocab_size)) # Identifying all unique characters
#-------------------------------------------------------------------#
# Giving a key for all the uniquqe characters
char_indices = {ch:i for i, ch in enumerate(chars)}
indices_char = {i:ch for ch, i in enumerate(chars)}
print(char_indices)
#-------------------------------------------------------------------#
# Vectorizing inputs to pass it in the model
import numpy as np
maxlen = 60
step = 3
smiles = []
corona_smiles = []
for i in range(0, len(data) - maxlen, step):
smiles.append(data[i: i + maxlen])
corona_smiles.append(data[i + maxlen])
print('nb sequences:', len(smiles))
print('Vectorization...')
x = np.zeros((len(smiles), maxlen, len(chars)), dtype=np.bool)
y = np.zeros((len(smiles), len(chars)), dtype=np.bool)
for i, smiles in enumerate(smiles):
for t, char in enumerate(smiles):
x[i, t, char_indices[char]] = 1
y[i, char_indices[corona_smiles[i]]] = 1
#-------------------------------------------------------------------#
# BUILDING THE MODEL
model = Sequential()
model.add(LSTM(units = 128, return_sequences = True, input_shape=(maxlen, len(chars))))
model.add(Dropout(0.3))
model.add(LSTM(units = 128, return_sequences = True, input_shape=(maxlen, len(chars))))
model.add(Dropout(0.3))
model.add(LSTM(units = 128, return_sequences = True, input_shape=(maxlen, len(chars))))
model.add(Dropout(0.3))
model.add(Flatten())
model.add(Dense(len(chars), activation='softmax'))
optimizer = RMSprop(learning_rate=0.01)
model.compile(loss='categorical_crossentropy', optimizer=optimizer)
model.summary()
#-------------------------------------------------------------------#
def sample(preds, temperature=1.0):
# helper function to sample an index from a probability array
preds = np.asarray(preds).astype('float64')
preds = np.log(preds) / temperature
exp_preds = np.exp(preds)
preds = exp_preds / np.sum(exp_preds)
probas = np.random.multinomial(1, preds, 1)
return np.argmax(probas)
def on_epoch_end(epoch, _):
# Function invoked at end of each epoch. Prints generated text.
print()
print('----- Generating text after Epoch: %d' % epoch)
start_index = random.randint(0, len(text) - maxlen - 1)
for diversity in [0.2, 0.5, 1.0, 1.2]:
print('----- diversity:', diversity)
generated = ''
sentence = text[start_index: start_index + maxlen]
generated += sentence
print('----- Generating with seed: "' + sentence + '"')
sys.stdout.write(generated)
for i in range(400):
x_pred = np.zeros((1, maxlen, len(chars)))
for t, char in enumerate(sentence):
x_pred[0, t, char_indices[char]] = 1.
preds = model.predict(x_pred, verbose=0)[0]
next_index = sample(preds, diversity)
next_char = indices_char[next_index]
sentence = sentence[1:] + next_char
sys.stdout.write(next_char)
sys.stdout.flush()
print()
print_callback = LambdaCallback(on_epoch_end=on_epoch_end)
#-------------------------------------------------------------------#
model.fit(x, y,
batch_size=256,
epochs=100,
callbacks=[print_callback])
#-------------------------------------------------------------------#
model.save_weights(desired_working_directory)
print('Saved model')
#-------------------------------------------------------------------#
# load weights into new model
# loaded_model.load_weights("COVID_19.hdf5")
# print("Loaded model")
| 4,430 | 1,428 |
"""
Service API Tests
"""
# pylint: disable=missing-class-docstring
# stdlib
from typing import Tuple
import unittest
# module
from avwx import service
BASE_ATTRS = ("url", "report_type", "_valid_types")
class BaseTestService(unittest.IsolatedAsyncioTestCase):
serv: service.Service
service_class = service.Service
report_type: str = ""
stations: Tuple[str] = tuple()
required_attrs: Tuple[str] = tuple()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.serv = self.service_class(self.report_type)
self.required_attrs = BASE_ATTRS + self.required_attrs
def test_init(self):
"""Tests that the Service class is initialized properly"""
for attr in self.required_attrs:
self.assertTrue(hasattr(self.serv, attr))
self.assertEqual(self.serv.report_type, self.report_type)
def test_fetch(self):
"""Tests that reports are fetched from service"""
try:
station = self.stations[0]
except IndexError:
return
report = self.serv.fetch(station)
self.assertIsInstance(report, str)
self.assertTrue(station in report)
async def test_async_fetch(self):
"""Tests that reports are fetched from async service"""
for station in self.stations:
report = await self.serv.async_fetch(station)
self.assertIsInstance(report, str)
self.assertTrue(station in report)
| 1,494 | 442 |
"""
This is a nifty little tool to create really cool looking Readme files.
Its essentially a templating tool for such. The initial steps to create a template are the following:
- create a super cool ANSI Art design for your readme.
- create a template that uses this image and defines fields where text can be inserted.
After that you can create fill this template with life with another file defining the actions, e.g. inserting text.
Parameters:
templatedir:
desc: this parameter defines where to load the templates from
type: str
is_required: True
actions:
desc: path to the file containing the actions
type: str
is_required: True
template:
desc: name of the template to use
type: str
is_required: True
"""
from src.business.FancyReadmeBuilder import FancyReadmeBuilder
from src.ui.TerminalParser import TerminalParser
__author__ = 'DWI'
def main():
parser = TerminalParser()
args = parser.get_values(__doc__)
readme_builder = FancyReadmeBuilder.get_instance()
print(args.templatedir)
readme_builder.load_templates(args.templatedir)
print(args.actions)
print(args.template)
rendered = readme_builder.apply_actions_and_render(args.actions, args.template)
print(rendered)
if __name__ == "__main__":
main()
| 1,267 | 364 |
# Reescreva o programa anterior para escrever os 10 primeiros múltiplos de 3
lastNumber = 30
n = 3
while n <= lastNumber:
print(n)
n += 3
| 147 | 60 |
def min_wait_time(cl_l):
return min(cl.min_wait_time() for cl in cl_l)
| 73 | 35 |
'''
robofont-extensions-and-scripts
TFSGlyph.py
https://github.com/charlesmchen/robofont-extensions-and-scripts
Copyright (c) 2012 Charles Matthew Chen
charlesmchen@gmail.com
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of this License; and
You must cause any modified files to carry prominent notices stating that You changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
'''
from TFSPoint import *
from TFSPath import *
class TFSGlyph(object):
def __init__(self, rfglyph):
self.rfglyph = rfglyph
unicode = property(lambda self: self.rfglyph.unicode)
def getContours(self, setSelected=False):
def rfPointToTFSPoint(rfpoint):
fiPoint = TFSPoint(rfpoint.x, rfpoint.y)
if setSelected:
fiPoint.selected = rfpoint.selected
return fiPoint
paths = []
for rfcontour in self.rfglyph:
# print 'rfcontour', rfcontour
segments = []
lastPoint = rfPointToTFSPoint(rfcontour[-1].points[-1])
for rfsegment in rfcontour:
# print 'rfsegment', rfsegment.points
fiPoints = [lastPoint,] + [rfPointToTFSPoint(rfpoint) for rfpoint in rfsegment.points]
lastPoint = fiPoints[-1]
if (len(fiPoints) == 2) and fiPoints[0] == fiPoints[-1]:
print 'ignoring empty contour segment in: ' + hex(self.rfglyph.unicode)
continue
rfendpoint = rfsegment.points[-1]
if rfendpoint.type == 'qcurve':
# TrueType "implied on-curve points"
lastOnPoint = fiPoints[0]
offPoints = fiPoints[1:-1]
for index in xrange(len(offPoints) - 1):
offPoint0 = offPoints[index + 0]
offPoint1 = offPoints[index + 1]
impliedPoint = offPoint0.midpoint(offPoint1)
segments.append(TFSSegment(lastOnPoint, offPoint0, impliedPoint))
lastOnPoint = impliedPoint
segments.append(TFSSegment(lastOnPoint, offPoints[-1], fiPoints[-1]))
else:
segments.append(TFSSegment(*fiPoints))
# segments.append(TFSSegment(*fiPoints))
# for rfpoint in rfsegment.points:
# fiPoint = rfPointToTFSPoint(rfpoint)
# print '\t', 'fiPoint', fiPoint
# print '\t', 'fiPoints', fiPoints
paths.append(TFSPath(True, *segments))
return paths
# def glyphNames(self):
# return self.rffont.keys()
#
# def glyphCodePoints(self):
# result = [glyph.unicode for glyph in self.rffont]
# return result
#
# def getGlyphByName(self, key):
# rfglyph = self.rffont.getGlyph(key)
# return PAGlyph(rfglyph)
def setContours(self, paths):
self.rfglyph.clearContours()
glyphPen = self.rfglyph.getPen()
def formatScalar(value):
return int(round(value))
def formatPoint(value):
result = ( formatScalar(value.x),
formatScalar(value.y), )
# print 'result', result
return result
for path in paths:
firstPoint = path.segments[0].points[0]
# print 'writing move to'
glyphPen.moveTo(formatPoint(firstPoint))
for segment in path.segments:
if len(segment.points) == 4:
# print 'writing 4-point segment'
p1 = segment.points[1]
p2 = segment.points[2]
p3 = segment.points[3]
glyphPen.curveTo(formatPoint(p1),
formatPoint(p2),
formatPoint(p3))
# glyphPen.lineTo(formatPoint(p3))
elif len(segment.points) == 3:
# print 'writing 3-point segment'
p1 = segment.points[1]
p2 = segment.points[2]
glyphPen.curveTo(formatPoint(p1),
formatPoint(p2))
# glyphPen.moveTo(formatPoint(p))
# glyphPen.lineTo(formatPoint(p2))
# raise Exception(
# )
elif len(segment.points) == 2:
# print 'writing 2-point segment'
p1 = segment.points[1]
glyphPen.lineTo(formatPoint(p1))
else:
raise Exception('Invalid contour segment point count: ' + str(len(segment.points)))
glyphPen.closePath()
self.rfglyph.update()
# self.rfglyph.correctDirection()
# self.rfglyph.update()
def updateDerivedFromGlyph(self, codePoint, contours, srcGlyph):
opentype_multiplier = 1
def formatOpentypeScalar(value):
return int(round(opentype_multiplier * value))
self.rfglyph.unicode = codePoint
# # glyph.leftMargin = 0
# # glyph.rightMargin = 900
# TODO:
self.rfglyph.width = formatOpentypeScalar(srcGlyph.rfglyph.width)
#glyph.advance = formatOpentypeScalar(glyphAdvance)
# self.rfglyph.rightMargin = formatOpentypeScalar(srcGlyph.rfglyph.rightMargin)
# print 'self.rfglyph.rightMargin', self.rfglyph.rightMargin, type(self.rfglyph.rightMargin)
# print 'srcGlyph.rfglyph.rightMargin', srcGlyph.rfglyph.rightMargin, type(srcGlyph.rfglyph.rightMargin)
# print 'self.rfglyph.width', self.rfglyph.width, type(self.rfglyph.width)
# print 'srcGlyph.rfglyph.width', srcGlyph.rfglyph.width, type(srcGlyph.rfglyph.width)
#print 'glyphAdvance', glyphAdvance, formatOpentypeScalar(glyphAdvance)
#print 'glyphWidth', glyphWidth, formatOpentypeScalar(glyphWidth)
#print 'glyphWidth', sideBearing, formatOpentypeScalar(sideBearing)
#print 'glyph.width', glyph.width
#print 'glyph.advance', glyph.advance
#print 'glyph.rightMargin', glyph.rightMargin
self.setContours(contours)
| 15,165 | 3,967 |
#!/usr/bin/env python3
import json
# If you change this be sure to change it everywhere in the main ahk file too.
bank_size = 8
input_file = "soundboard_data.json"
output_file = "soundboard_data.ahk"
def indent(indent_level):
return (indent_level * 4) * " "
def main():
with open(input_file) as f:
data = json.load(f)
default = data["default"]
if str(data["version"]) == "1.0.0":
flat_banks = data["sounds"]
flat_descriptions = data["bank_descriptions"]
# 2.0.0+
else:
# split multi-list data into single list for easier processing.
flat_banks = []
flat_descriptions = []
for bank in data["banks"]:
# fix slot ids as well.
fixed_slots = []
bi = bank["bank_id"]
for slot in bank["slots"]:
rel_id = slot["slot"]
slot["slot"] = (bank_size * (bi-1)) + rel_id
fixed_slots.append(slot)
flat_banks.extend(fixed_slots)
flat_descriptions.append(bank["description"])
# format for function injection into autohotkey.
output = []
indent_level = 0
output.append("PlaySound(idx) {")
indent_level += 1
output.append(f"{indent(indent_level)} Switch (idx) " + "{")
indent_level += 1
# do bank 1 description first and then do others on mod rollover.
old_bank_idx = 0
bank_idx = 0
output.append("")
output.append(f"{indent(indent_level)}; {flat_descriptions[bank_idx]}")
# I want to permit out-of-order definitions in the JSON,
# as well as leaving gaps in the soundboard for later ideas.
# No promises if you add multiple items with the same slot key.
for sound in sorted(flat_banks, key=lambda x: x["slot"]):
slot = sound["slot"]
old_bank_idx = bank_idx
bank_idx = slot // bank_size
if (old_bank_idx != bank_idx) and (bank_idx < len(flat_descriptions)):
output.append("")
output.append(f"{indent(indent_level)}; {flat_descriptions[bank_idx]}")
output.append("")
output.append(f"{indent(indent_level)}; {sound['description']}")
output.append(f"{indent(indent_level)}case {slot}:")
indent_level += 1
output.append(f"{indent(indent_level)}SoundPlay, {sound['filename']}")
output.append(f"{indent(indent_level)}return")
indent_level -= 1
output.append("")
output.append(f"{indent(indent_level)}; Default!")
output.append(f"{indent(indent_level)}default:")
indent_level += 1
output.append(f"{indent(indent_level)}SoundPlay, {default['filename']}")
output.append(f"{indent(indent_level)}return")
indent_level -= 1
output.append("")
output.append(f"{indent(indent_level)}" + "}")
output.append(f"{indent(indent_level)}return")
indent_level -= 1
output.append("")
output.append(f"{indent(indent_level)}" + "}")
out = "\n".join(output) + "\n"
with open(output_file, "w") as f:
f.write(out)
if __name__ == "__main__":
main()
| 3,080 | 1,017 |
# coding=utf-8
#
# @lc app=leetcode id=1047 lang=python
#
# [1047] Remove All Adjacent Duplicates In String
#
# https://leetcode.com/problems/remove-all-adjacent-duplicates-in-string/description/
#
# algorithms
# Easy (63.77%)
# Likes: 184
# Dislikes: 18
# Total Accepted: 21.7K
# Total Submissions: 34K
# Testcase Example: '"abbaca"'
#
# Given a string S of lowercase letters, a duplicate removal consists of
# choosing two adjacent and equal letters, and removing them.
#
# We repeatedly make duplicate removals on S until we no longer can.
#
# Return the final string after all such duplicate removals have been made. It
# is guaranteed the answer is unique.
#
#
#
# Example 1:
#
#
# Input: "abbaca"
# Output: "ca"
# Explanation:
# For example, in "abbaca" we could remove "bb" since the letters are adjacent
# and equal, and this is the only possible move. The result of this move is
# that the string is "aaca", of which only "aa" is possible, so the final
# string is "ca".
#
#
#
#
# Note:
#
#
# 1 <= S.length <= 20000
# S consists only of English lowercase letters.
#
#
class Solution(object):
def removeDuplicates(self, S):
"""
:type S: str
:rtype: str
"""
while True:
index = 0
current = ""
flag = False
while index < len(S):
if index < len(S) - 1:
if S[index] == S[index+1]:
index += 2
flag = True
continue
current += S[index]
index += 1
if not flag:
return current
S = current
# if __name__ == "__main__":
# s = Solution()
# print s.removeDuplicates("abbaca")
# print s.removeDuplicates("azxxzy")
| 1,822 | 592 |
""" Module for the JsonFormatter """
import json
import logging
import sys
from sap.cf_logging.record.simple_log_record import SimpleLogRecord
def _default_serializer(obj):
return str(obj)
if sys.version_info[0] == 3:
def _encode(obj):
return json.dumps(obj, default=_default_serializer)
else:
def _encode(obj):
return unicode(json.dumps(obj, default=_default_serializer)) # pylint: disable=undefined-variable
class JsonFormatter(logging.Formatter):
"""
Format application log in JSON format
"""
def format(self, record):
""" Format the known log records in JSON format """
if isinstance(record, SimpleLogRecord):
return _encode(record.format())
return super(JsonFormatter, self).format(record)
| 781 | 221 |
"""
********************************************************************************
geometry
********************************************************************************
.. currentmodule:: compas_rhino.geometry
.. rst-class:: lead
Wrappers for Rhino objects that can be used to convert Rhino geometry and data to COMPAS objects.
.. code-block:: python
import compas_rhino
from compas_rhino.geometry import RhinoMesh
guid = compas_rhino.select_mesh()
mesh = RhinoMesh.from_guid(guid).to_compas()
----
BaseRhinoGeometry
=================
.. autoclass:: BaseRhinoGeometry
:members: from_geometry, from_selection, to_compas, from_guid, from_object, transform
----
RhinoPoint
==========
.. autoclass:: RhinoPoint
:members: from_geometry, from_selection, to_compas
:no-show-inheritance:
----
RhinoVector
===========
.. autoclass:: RhinoVector
:members: from_geometry, from_selection, to_compas
:no-show-inheritance:
----
RhinoLine
=========
.. autoclass:: RhinoLine
:members: from_geometry, from_selection, to_compas
:no-show-inheritance:
----
RhinoPlane
==========
.. autoclass:: RhinoPlane
:members: from_geometry, from_selection, to_compas
:no-show-inheritance:
----
RhinoMesh
=========
.. autoclass:: RhinoMesh
:members: from_geometry, from_selection, to_compas
:no-show-inheritance:
----
RhinoCurve
==========
.. autoclass:: RhinoCurve
:members: from_geometry, from_selection, to_compas
:no-show-inheritance:
----
RhinoSurface
============
.. autoclass:: RhinoSurface
:members: from_geometry, from_selection, to_compas
:no-show-inheritance:
"""
from __future__ import absolute_import
from .base import * # noqa: F401 F403
from .curve import * # noqa: F401 F403
from .line import * # noqa: F401 F403
from .mesh import * # noqa: F401 F403
from .plane import * # noqa: F401 F403
from .point import * # noqa: F401 F403
from .surface import * # noqa: F401 F403
from .vector import * # noqa: F401 F403
__all__ = [name for name in dir() if not name.startswith('_')]
| 2,081 | 742 |
import os
import unittest
import protgraph
class FunctionalTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
""" Set base Example Folder """
main_file_path = os.path.dirname(os.path.abspath(protgraph.__file__))
cls.examples_path = os.path.join(main_file_path, "..", "examples")
cls.example_files = [
os.path.abspath(os.path.join(cls.examples_path, "e_coli.dat")),
os.path.abspath(os.path.join(cls.examples_path, "p53_human.txt"))
]
cls.procs_num = ["-n", "1"]
def test_minimal(self):
args = protgraph.parse_args([] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_none(self):
args = protgraph.parse_args(["-ft", "NoNE"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_all(self):
args = protgraph.parse_args(["-ft", "ALl"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_isoforms(self):
args = protgraph.parse_args(["-ft", "VAR_SeQ"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_variants(self):
args = protgraph.parse_args(["-ft", "VARIAnT"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_met(self):
args = protgraph.parse_args(["-ft", "IniT_MET"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_signal(self):
args = protgraph.parse_args(["-ft", "SIGnaL"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_digestion_skip(self):
args = protgraph.parse_args(["-d", "skip"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_digestion_trypsin(self):
args = protgraph.parse_args(["-d", "trypsin"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_digestion_full(self):
args = protgraph.parse_args(["-d", "full"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_no_merge(self):
args = protgraph.parse_args(["-nm"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_annotate_weights(self):
args = protgraph.parse_args(["-aawe", "-amwe"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_replacement(self):
args = protgraph.parse_args(["-raa", "A->b,C,d"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_statistics_possibilites(self):
args = protgraph.parse_args(["-cnp"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_statistics_miscleavages(self):
args = protgraph.parse_args(["-cnpm"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_statistics_hops(self):
args = protgraph.parse_args(["-cnph"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_dot(self):
args = protgraph.parse_args(["-edot"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_csv(self):
args = protgraph.parse_args(["-ecsv"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_lcsv(self):
args = protgraph.parse_args(["-elcsv"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_graphml(self):
args = protgraph.parse_args(["-egraphml"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_gml(self):
args = protgraph.parse_args(["-epickle"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_pickle(self):
args = protgraph.parse_args(["-egml"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_pcsr(self):
args = protgraph.parse_args(["-epcsr"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_bpcsr(self):
args = protgraph.parse_args(["-ebpcsr"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_lpcsr(self):
args = protgraph.parse_args(["-elpcsr"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_lbpcsr(self):
args = protgraph.parse_args(["-elbpcsr"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_pep_fasta(self):
args = protgraph.parse_args(["-epepfasta", "--pep_fasta_hops", "2"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_pep_trie(self):
args = protgraph.parse_args(["-epeptrie", "--pep_fasta_hops", "2"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_export_pep_sqlite(self):
args = protgraph.parse_args(["-epepsqlite", "--pep_sqlite_hops", "2"] + self.procs_num + self.example_files)
protgraph.prot_graph(**args)
def test_issue8(self):
args = protgraph.parse_args(["-n", "1", os.path.join(self.examples_path, "Q9QXS1.txt")])
protgraph.prot_graph(**args)
def test_issue13(self):
args = protgraph.parse_args(["-n", "1", os.path.join(self.examples_path, "F1SN05.txt")])
protgraph.prot_graph(**args)
def test_issue41(self):
args = protgraph.parse_args(["-n", "1", "-epepfasta", os.path.join(self.examples_path, "P49782.txt")])
protgraph.prot_graph(**args)
| 5,757 | 2,124 |
# -*- coding: utf-8 -*-
"""
@author: iceland
"""
import bit
import time
import random
import os
from fastecdsa import curve
from fastecdsa.point import Point
bs_file = 'baby_steps_table.txt'
def Pub2Point(public_key):
x = int(public_key[2:66],16)
if len(public_key) < 70:
y = bit.format.x_to_y(x, int(public_key[:2],16)%2)
else:
y = int(public_key[66:],16)
return Point(x, y, curve=curve.secp256k1)
###############################################################################
# Puzzle 46 : Privatekey Key 0x2ec18388d544
public_key = '04fd5487722d2576cb6d7081426b66a3e2986c1ce8358d479063fb5f2bb6dd5849a004626dffa0fb7b934118ea84bacc3b030332eee083010efa60025e4fde7297'
Q = Pub2Point(public_key)
G = curve.secp256k1.G
###############################################################################
def create_table(m):
# create a table: f(x) => G * x
P = G
baby_steps = []
for x in range(m):
baby_steps.append(P.x)
P = P + G
return baby_steps
###############################################################################
m = 10000000 # default value
valid = os.path.isfile(bs_file)
if valid == True:
print('\nFound the Baby Steps Table file: '+bs_file+'. Will be used directly')
baby_steps = {int(line.split()[0],10):k for k, line in enumerate(open(bs_file,'r'))}
if m != len(baby_steps) and not len(baby_steps) == 0:
m = len(baby_steps)
print('Taken from table. m is adjusted to = ', m)
if len(baby_steps) == 0 :
print('Size of the file was 0. It will be created and overwritten')
valid = False
if valid == False:
print('\nNot Found '+bs_file+'. Will Create This File Now. \
\nIt will save to this file in the First Run. Next run will directly read from this file.')
out = open(bs_file,'w')
baby_steps = create_table(m)
for line in baby_steps: out.write(str(line) + '\n')
out.close()
baby_steps = {line:k for k, line in enumerate(baby_steps)}
# We have to solve P = k.G, we know that k lies in the range ]k1,k2]
# k1 = random.randint(1, curve.secp256k1.q//2) # if you want to start from a random key
k1 = 1 # if you want to start from 1
k2 = k1 + m*m
print('Checking {0} keys from {1}'.format(m*m, hex(k1)))
# m = math.floor(math.sqrt(k2-k1))
# start time
st = time.time()
###############################################################################
k1G = k1 * G
mG = m * G
def findkey(onePoint):
S = onePoint - k1G
if S == Point.IDENTITY_ELEMENT: return k1 # Point at Infinity
found = False
step = 0
while found is False and step<(1+k2-k1):
if S.x in baby_steps:
# b = baby_steps.index(S.x) # if using list
b = baby_steps.get(S.x)
found = True
break
else:
# Giant step
S = S - mG
step = step + m
if found == True:
final_key = k1 + step + b + 1
else:
final_key = -1
return final_key
###############################################################################
final_key = findkey(Q)
if final_key >0:
print("BSGS FOUND PrivateKey : {0}".format(hex(final_key)))
else:
print('PrivateKey Not Found')
print("Time Spent : {0:.2f} seconds".format(time.time()-st))
| 3,444 | 1,322 |