repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
yestech/gae-django-template | django/core/xheaders.py | 518 | 1157 | """
Pages in Django can are served up with custom HTTP headers containing useful
information about those pages -- namely, the content type and object ID.
This module contains utility functions for retrieving and doing interesting
things with these special "X-Headers" (so called because the HTTP spec demands
that custom headers are prefixed with "X-").
Next time you're at slashdot.org, watch out for X-Fry and X-Bender. :)
"""
def populate_xheaders(request, response, model, object_id):
"""
Adds the "X-Object-Type" and "X-Object-Id" headers to the given
HttpResponse according to the given model and object_id -- but only if the
given HttpRequest object has an IP address within the INTERNAL_IPS setting
or if the request is from a logged in staff member.
"""
from django.conf import settings
if (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS
or (hasattr(request, 'user') and request.user.is_active
and request.user.is_staff)):
response['X-Object-Type'] = "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
response['X-Object-Id'] = str(object_id)
| bsd-3-clause |
aequitas/CouchPotato | library/sqlalchemy/orm/strategies.py | 11 | 51396 | # strategies.py
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""sqlalchemy.orm.interfaces.LoaderStrategy
implementations, and related MapperOptions."""
from sqlalchemy import exc as sa_exc
from sqlalchemy import sql, util, log
from sqlalchemy.sql import util as sql_util
from sqlalchemy.sql import visitors, expression, operators
from sqlalchemy.orm import mapper, attributes, interfaces, exc as orm_exc
from sqlalchemy.orm.interfaces import (
LoaderStrategy, StrategizedOption, MapperOption, PropertyOption,
serialize_path, deserialize_path, StrategizedProperty
)
from sqlalchemy.orm import session as sessionlib
from sqlalchemy.orm import util as mapperutil
import itertools
def _register_attribute(strategy, mapper, useobject,
compare_function=None,
typecallable=None,
copy_function=None,
mutable_scalars=False,
uselist=False,
callable_=None,
proxy_property=None,
active_history=False,
impl_class=None,
**kw
):
prop = strategy.parent_property
attribute_ext = list(util.to_list(prop.extension, default=[]))
if useobject and prop.single_parent:
attribute_ext.insert(0, _SingleParentValidator(prop))
if prop.key in prop.parent._validators:
attribute_ext.insert(0,
mapperutil.Validator(prop.key, prop.parent._validators[prop.key])
)
if useobject:
attribute_ext.append(sessionlib.UOWEventHandler(prop.key))
for m in mapper.self_and_descendants:
if prop is m._props.get(prop.key):
attributes.register_attribute_impl(
m.class_,
prop.key,
parent_token=prop,
mutable_scalars=mutable_scalars,
uselist=uselist,
copy_function=copy_function,
compare_function=compare_function,
useobject=useobject,
extension=attribute_ext,
trackparent=useobject,
typecallable=typecallable,
callable_=callable_,
active_history=active_history,
impl_class=impl_class,
doc=prop.doc,
**kw
)
class UninstrumentedColumnLoader(LoaderStrategy):
"""Represent the a non-instrumented MapperProperty.
The polymorphic_on argument of mapper() often results in this,
if the argument is against the with_polymorphic selectable.
"""
def init(self):
self.columns = self.parent_property.columns
def setup_query(self, context, entity, path, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
return None, None, None
class ColumnLoader(LoaderStrategy):
"""Strategize the loading of a plain column-based MapperProperty."""
def init(self):
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
def setup_query(self, context, entity, path, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
def init_class_attribute(self, mapper):
self.is_class_level = True
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
active_history = self.columns[0].primary_key
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
copy_function=coltype.copy_value,
mutable_scalars=self.columns[0].type.is_mutable(),
active_history = active_history
)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
key = self.key
# look through list of columns represented here
# to see which, if any, is present in the row.
for col in self.columns:
if adapter:
col = adapter.columns[col]
if col is not None and col in row:
def new_execute(state, dict_, row):
dict_[key] = row[col]
return new_execute, None, None
else:
def new_execute(state, dict_, row):
state.expire_attribute_pre_commit(dict_, key)
return new_execute, None, None
log.class_logger(ColumnLoader)
class CompositeColumnLoader(ColumnLoader):
"""Strategize the loading of a composite column-based MapperProperty."""
def init_class_attribute(self, mapper):
self.is_class_level = True
self.logger.info("%s register managed composite attribute", self)
def copy(obj):
if obj is None:
return None
return self.parent_property.\
composite_class(*obj.__composite_values__())
def compare(a, b):
if a is None or b is None:
return a is b
for col, aprop, bprop in zip(self.columns,
a.__composite_values__(),
b.__composite_values__()):
if not col.type.compare_values(aprop, bprop):
return False
else:
return True
_register_attribute(self, mapper, useobject=False,
compare_function=compare,
copy_function=copy,
mutable_scalars=True
#active_history ?
)
def create_row_processor(self, selectcontext, path, mapper,
row, adapter):
key = self.key
columns = self.columns
composite_class = self.parent_property.composite_class
if adapter:
columns = [adapter.columns[c] for c in columns]
for c in columns:
if c not in row:
def new_execute(state, dict_, row):
state.expire_attribute_pre_commit(dict_, key)
break
else:
def new_execute(state, dict_, row):
dict_[key] = composite_class(*[row[c] for c in columns])
return new_execute, None, None
log.class_logger(CompositeColumnLoader)
class DeferredColumnLoader(LoaderStrategy):
"""Strategize the loading of a deferred column-based MapperProperty."""
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
col = self.columns[0]
if adapter:
col = adapter.columns[col]
key = self.key
if col in row:
return self.parent_property._get_strategy(ColumnLoader).\
create_row_processor(
selectcontext, path, mapper, row, adapter)
elif not self.is_class_level:
def new_execute(state, dict_, row):
state.set_callable(dict_, key, LoadDeferredColumns(state, key))
else:
def new_execute(state, dict_, row):
# reset state on the key so that deferred callables
# fire off on next access.
state.reset(dict_, key)
return new_execute, None, None
def init(self):
if hasattr(self.parent_property, 'composite_class'):
raise NotImplementedError("Deferred loading for composite "
"types not implemented yet")
self.columns = self.parent_property.columns
self.group = self.parent_property.group
def init_class_attribute(self, mapper):
self.is_class_level = True
_register_attribute(self, mapper, useobject=False,
compare_function=self.columns[0].type.compare_values,
copy_function=self.columns[0].type.copy_value,
mutable_scalars=self.columns[0].type.is_mutable(),
callable_=self._class_level_loader,
expire_missing=False
)
def setup_query(self, context, entity, path, adapter,
only_load_props=None, **kwargs):
if (
self.group is not None and
context.attributes.get(('undefer', self.group), False)
) or (only_load_props and self.key in only_load_props):
self.parent_property._get_strategy(ColumnLoader).\
setup_query(context, entity,
path, adapter, **kwargs)
def _class_level_loader(self, state):
if not state.has_identity:
return None
return LoadDeferredColumns(state, self.key)
log.class_logger(DeferredColumnLoader)
class LoadDeferredColumns(object):
"""serializable loader object used by DeferredColumnLoader"""
def __init__(self, state, key):
self.state, self.key = state, key
def __call__(self, passive=False):
if passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
state = self.state
localparent = mapper._state_mapper(state)
prop = localparent.get_property(self.key)
strategy = prop._get_strategy(DeferredColumnLoader)
if strategy.group:
toload = [
p.key for p in
localparent.iterate_properties
if isinstance(p, StrategizedProperty) and
isinstance(p.strategy, DeferredColumnLoader) and
p.group==strategy.group
]
else:
toload = [self.key]
# narrow the keys down to just those which have no history
group = [k for k in toload if k in state.unmodified]
if strategy._should_log_debug():
strategy.logger.debug(
"deferred load %s group %s",
(mapperutil.state_attribute_str(state, self.key),
group and ','.join(group) or 'None')
)
session = sessionlib._state_session(state)
if session is None:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
"deferred load operation of attribute '%s' cannot proceed" %
(mapperutil.state_str(state), self.key)
)
query = session.query(localparent)
ident = state.key[1]
query._get(None, ident=ident,
only_load_props=group, refresh_state=state)
return attributes.ATTR_WAS_SET
class DeferredOption(StrategizedOption):
propagate_to_loaders = True
def __init__(self, key, defer=False):
super(DeferredOption, self).__init__(key)
self.defer = defer
def get_strategy_class(self):
if self.defer:
return DeferredColumnLoader
else:
return ColumnLoader
class UndeferGroupOption(MapperOption):
propagate_to_loaders = True
def __init__(self, group):
self.group = group
def process_query(self, query):
query._attributes[('undefer', self.group)] = True
class AbstractRelationshipLoader(LoaderStrategy):
"""LoaderStratgies which deal with related objects."""
def init(self):
self.mapper = self.parent_property.mapper
self.target = self.parent_property.target
self.table = self.parent_property.table
self.uselist = self.parent_property.uselist
class NoLoader(AbstractRelationshipLoader):
"""Strategize a relationship() that doesn't load data automatically."""
def init_class_attribute(self, mapper):
self.is_class_level = True
_register_attribute(self, mapper,
useobject=True,
uselist=self.parent_property.uselist,
typecallable = self.parent_property.collection_class,
)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
def new_execute(state, dict_, row):
state.initialize(self.key)
return new_execute, None, None
log.class_logger(NoLoader)
class LazyLoader(AbstractRelationshipLoader):
"""Strategize a relationship() that loads when first accessed."""
def init(self):
super(LazyLoader, self).init()
self.__lazywhere, \
self.__bind_to_col, \
self._equated_columns = self._create_lazy_clause(self.parent_property)
self.logger.info("%s lazy loading clause %s", self, self.__lazywhere)
# determine if our "lazywhere" clause is the same as the mapper's
# get() clause. then we can just use mapper.get()
#from sqlalchemy.orm import query
self.use_get = not self.uselist and \
self.mapper._get_clause[0].compare(
self.__lazywhere,
use_proxies=True,
equivalents=self.mapper._equivalent_columns
)
if self.use_get:
for col in self._equated_columns.keys():
if col in self.mapper._equivalent_columns:
for c in self.mapper._equivalent_columns[col]:
self._equated_columns[c] = self._equated_columns[col]
self.logger.info("%s will use query.get() to "
"optimize instance loads" % self)
def init_class_attribute(self, mapper):
self.is_class_level = True
# MANYTOONE currently only needs the
# "old" value for delete-orphan
# cascades. the required _SingleParentValidator
# will enable active_history
# in that case. otherwise we don't need the
# "old" value during backref operations.
_register_attribute(self,
mapper,
useobject=True,
callable_=self._class_level_loader,
uselist = self.parent_property.uselist,
typecallable = self.parent_property.collection_class,
active_history = \
self.parent_property.direction is not \
interfaces.MANYTOONE or \
not self.use_get,
)
def lazy_clause(self, state, reverse_direction=False,
alias_secondary=False,
adapt_source=None):
if state is None:
return self._lazy_none_clause(
reverse_direction,
adapt_source=adapt_source)
if not reverse_direction:
criterion, bind_to_col, rev = \
self.__lazywhere, \
self.__bind_to_col, \
self._equated_columns
else:
criterion, bind_to_col, rev = \
LazyLoader._create_lazy_clause(
self.parent_property,
reverse_direction=reverse_direction)
if reverse_direction:
mapper = self.parent_property.mapper
else:
mapper = self.parent_property.parent
o = state.obj() # strong ref
dict_ = attributes.instance_dict(o)
# use the "committed state" only if we're in a flush
# for this state.
sess = sessionlib._state_session(state)
if sess is not None and sess._flushing:
def visit_bindparam(bindparam):
if bindparam.key in bind_to_col:
bindparam.value = \
lambda: mapper._get_committed_state_attr_by_column(
state, dict_, bind_to_col[bindparam.key])
else:
def visit_bindparam(bindparam):
if bindparam.key in bind_to_col:
bindparam.value = lambda: mapper._get_state_attr_by_column(
state, dict_, bind_to_col[bindparam.key])
if self.parent_property.secondary is not None and alias_secondary:
criterion = sql_util.ClauseAdapter(
self.parent_property.secondary.alias()).\
traverse(criterion)
criterion = visitors.cloned_traverse(
criterion, {}, {'bindparam':visit_bindparam})
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
if not reverse_direction:
criterion, bind_to_col, rev = \
self.__lazywhere, \
self.__bind_to_col,\
self._equated_columns
else:
criterion, bind_to_col, rev = \
LazyLoader._create_lazy_clause(
self.parent_property,
reverse_direction=reverse_direction)
criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col)
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def _class_level_loader(self, state):
if not state.has_identity and \
(not self.parent_property.load_on_pending or not state.session_id):
return None
return LoadLazyAttribute(state, self.key)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
key = self.key
if not self.is_class_level:
def new_execute(state, dict_, row):
# we are not the primary manager for this attribute
# on this class - set up a
# per-instance lazyloader, which will override the
# class-level behavior.
# this currently only happens when using a
# "lazyload" option on a "no load"
# attribute - "eager" attributes always have a
# class-level lazyloader installed.
state.set_callable(dict_, key, LoadLazyAttribute(state, key))
else:
def new_execute(state, dict_, row):
# we are the primary manager for this attribute on
# this class - reset its
# per-instance attribute state, so that the class-level
# lazy loader is
# executed when next referenced on this instance.
# this is needed in
# populate_existing() types of scenarios to reset
# any existing state.
state.reset(dict_, key)
return new_execute, None, None
@classmethod
def _create_lazy_clause(cls, prop, reverse_direction=False):
binds = util.column_dict()
lookup = util.column_dict()
equated_columns = util.column_dict()
if reverse_direction and prop.secondaryjoin is None:
for l, r in prop.local_remote_pairs:
_list = lookup.setdefault(r, [])
_list.append((r, l))
equated_columns[l] = r
else:
for l, r in prop.local_remote_pairs:
_list = lookup.setdefault(l, [])
_list.append((l, r))
equated_columns[r] = l
def col_to_bind(col):
if col in lookup:
for tobind, equated in lookup[col]:
if equated in binds:
return None
if col not in binds:
binds[col] = sql.bindparam(None, None, type_=col.type)
return binds[col]
return None
lazywhere = prop.primaryjoin
if prop.secondaryjoin is None or not reverse_direction:
lazywhere = visitors.replacement_traverse(
lazywhere, {}, col_to_bind)
if prop.secondaryjoin is not None:
secondaryjoin = prop.secondaryjoin
if reverse_direction:
secondaryjoin = visitors.replacement_traverse(
secondaryjoin, {}, col_to_bind)
lazywhere = sql.and_(lazywhere, secondaryjoin)
bind_to_col = dict((binds[col].key, col) for col in binds)
return lazywhere, bind_to_col, equated_columns
log.class_logger(LazyLoader)
class LoadLazyAttribute(object):
"""serializable loader object used by LazyLoader"""
def __init__(self, state, key):
self.state, self.key = state, key
def __getstate__(self):
return (self.state, self.key)
def __setstate__(self, state):
self.state, self.key = state
def __call__(self, passive=False):
state = self.state
instance_mapper = mapper._state_mapper(state)
prop = instance_mapper.get_property(self.key)
strategy = prop._get_strategy(LazyLoader)
pending = not state.key
if (
passive is attributes.PASSIVE_NO_FETCH and
not strategy.use_get
) or (
passive is attributes.PASSIVE_ONLY_PERSISTENT and
pending
):
return attributes.PASSIVE_NO_RESULT
if strategy._should_log_debug():
strategy.logger.debug("loading %s",
mapperutil.state_attribute_str(
state, self.key))
session = sessionlib._state_session(state)
if session is None:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
"lazy load operation of attribute '%s' cannot proceed" %
(mapperutil.state_str(state), self.key)
)
q = session.query(prop.mapper)._adapt_all_clauses()
# don't autoflush on pending
# this would be something that's prominent in the
# docs and such
if pending:
q = q.autoflush(False)
if state.load_path:
q = q._with_current_path(state.load_path + (self.key,))
# if we have a simple primary key load, use mapper.get()
# to possibly save a DB round trip
if strategy.use_get:
ident = []
allnulls = True
if session._flushing:
get_attr = instance_mapper._get_committed_state_attr_by_column
else:
get_attr = instance_mapper._get_state_attr_by_column
# The many-to-one get is intended to be very fast. Note
# that we don't want to autoflush() if the get() doesn't
# actually have to hit the DB. It is now not necessary
# now that we use the pending attribute state.
for primary_key in prop.mapper.primary_key:
val = get_attr(
state,
state.dict,
strategy._equated_columns[primary_key],
passive=passive)
if val is attributes.PASSIVE_NO_RESULT:
return val
allnulls = allnulls and val is None
ident.append(val)
if allnulls:
return None
if state.load_options:
q = q._conditional_options(*state.load_options)
key = prop.mapper.identity_key_from_primary_key(ident)
return q._get(key, ident, passive=passive)
if prop.order_by:
q = q.order_by(*util.to_list(prop.order_by))
for rev in prop._reverse_property:
# reverse props that are MANYTOONE are loading *this*
# object from get(), so don't need to eager out to those.
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
not isinstance(rev.strategy, LazyLoader):
q = q.options(EagerLazyOption(rev.key, lazy='select'))
if state.load_options:
q = q._conditional_options(*state.load_options)
lazy_clause = strategy.lazy_clause(state)
if pending:
bind_values = sql_util.bind_values(lazy_clause)
if None in bind_values:
return None
q = q.filter(lazy_clause)
result = q.all()
if strategy.uselist:
return result
else:
l = len(result)
if l:
if l > 1:
util.warn(
"Multiple rows returned with "
"uselist=False for lazily-loaded attribute '%s' "
% prop)
return result[0]
else:
return None
class ImmediateLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
path, adapter, column_collection=None,
parentmapper=None, **kwargs):
pass
def create_row_processor(self, context, path, mapper, row, adapter):
def execute(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
return None, None, execute
class SubqueryLoader(AbstractRelationshipLoader):
def init(self):
super(SubqueryLoader, self).init()
self.join_depth = self.parent_property.join_depth
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
path, adapter, column_collection=None,
parentmapper=None, **kwargs):
if not context.query._enable_eagerloads:
return
path = path + (self.key, )
# build up a path indicating the path from the leftmost
# entity to the thing we're subquery loading.
subq_path = context.attributes.get(('subquery_path', None), ())
subq_path = subq_path + path
reduced_path = interfaces._reduce_path(path)
# join-depth / recursion check
if ("loaderstrategy", reduced_path) not in context.attributes:
if self.join_depth:
if len(path) / 2 > self.join_depth:
return
else:
if self.mapper.base_mapper in interfaces._reduce_path(subq_path):
return
orig_query = context.attributes.get(
("orig_query", SubqueryLoader),
context.query)
# determine attributes of the leftmost mapper
if self.parent.isa(subq_path[0]) and self.key==subq_path[1]:
leftmost_mapper, leftmost_prop = \
self.parent, self.parent_property
else:
leftmost_mapper, leftmost_prop = \
subq_path[0], \
subq_path[0].get_property(subq_path[1])
leftmost_cols, remote_cols = self._local_remote_columns(leftmost_prop)
leftmost_attr = [
leftmost_mapper._columntoproperty[c].class_attribute
for c in leftmost_cols
]
# reformat the original query
# to look only for significant columns
q = orig_query._clone()
# TODO: why does polymporphic etc. require hardcoding
# into _adapt_col_list ? Does query.add_columns(...) work
# with polymorphic loading ?
q._set_entities(q._adapt_col_list(leftmost_attr))
# don't need ORDER BY if no limit/offset
if q._limit is None and q._offset is None:
q._order_by = None
# the original query now becomes a subquery
# which we'll join onto.
embed_q = q.with_labels().subquery()
left_alias = mapperutil.AliasedClass(leftmost_mapper, embed_q)
# q becomes a new query. basically doing a longhand
# "from_self()". (from_self() itself not quite industrial
# strength enough for all contingencies...but very close)
q = q.session.query(self.mapper)
q._attributes = {
("orig_query", SubqueryLoader): orig_query,
('subquery_path', None) : subq_path
}
q = q._enable_single_crit(False)
# figure out what's being joined. a.k.a. the fun part
to_join = [
(subq_path[i], subq_path[i+1])
for i in xrange(0, len(subq_path), 2)
]
if len(to_join) < 2:
parent_alias = left_alias
else:
parent_alias = mapperutil.AliasedClass(self.parent)
local_cols, remote_cols = \
self._local_remote_columns(self.parent_property)
local_attr = [
getattr(parent_alias, self.parent._columntoproperty[c].key)
for c in local_cols
]
q = q.order_by(*local_attr)
q = q.add_columns(*local_attr)
for i, (mapper, key) in enumerate(to_join):
# we need to use query.join() as opposed to
# orm.join() here because of the
# rich behavior it brings when dealing with
# "with_polymorphic" mappers. "aliased"
# and "from_joinpoint" take care of most of
# the chaining and aliasing for us.
first = i == 0
middle = i < len(to_join) - 1
second_to_last = i == len(to_join) - 2
if first:
attr = getattr(left_alias, key)
else:
attr = key
if second_to_last:
q = q.join((parent_alias, attr), from_joinpoint=True)
else:
q = q.join(attr, aliased=middle, from_joinpoint=True)
# propagate loader options etc. to the new query.
# these will fire relative to subq_path.
q = q._with_current_path(subq_path)
q = q._conditional_options(*orig_query._with_options)
if self.parent_property.order_by:
# if there's an ORDER BY, alias it the same
# way joinedloader does, but we have to pull out
# the "eagerjoin" from the query.
# this really only picks up the "secondary" table
# right now.
eagerjoin = q._from_obj[0]
eager_order_by = \
eagerjoin._target_adapter.\
copy_and_process(
util.to_list(
self.parent_property.order_by
)
)
q = q.order_by(*eager_order_by)
# add new query to attributes to be picked up
# by create_row_processor
context.attributes[('subquery', reduced_path)] = q
def _local_remote_columns(self, prop):
if prop.secondary is None:
return zip(*prop.local_remote_pairs)
else:
return \
[p[0] for p in prop.synchronize_pairs],\
[
p[0] for p in prop.
secondary_synchronize_pairs
]
def create_row_processor(self, context, path, mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
"population - eager loading cannot be applied." %
self)
path = path + (self.key,)
path = interfaces._reduce_path(path)
if ('subquery', path) not in context.attributes:
return None, None, None
local_cols, remote_cols = self._local_remote_columns(self.parent_property)
remote_attr = [
self.mapper._columntoproperty[c].key
for c in remote_cols]
q = context.attributes[('subquery', path)]
collections = dict(
(k, [v[0] for v in v])
for k, v in itertools.groupby(
q,
lambda x:x[1:]
))
if adapter:
local_cols = [adapter.columns[c] for c in local_cols]
if self.uselist:
def execute(state, dict_, row):
collection = collections.get(
tuple([row[col] for col in local_cols]),
()
)
state.get_impl(self.key).\
set_committed_value(state, dict_, collection)
else:
def execute(state, dict_, row):
collection = collections.get(
tuple([row[col] for col in local_cols]),
(None,)
)
if len(collection) > 1:
util.warn(
"Multiple rows returned with "
"uselist=False for eagerly-loaded attribute '%s' "
% self)
scalar = collection[0]
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
return execute, None, None
log.class_logger(SubqueryLoader)
class EagerLoader(AbstractRelationshipLoader):
"""Strategize a relationship() that loads within the process
of the parent object being selected."""
def init(self):
super(EagerLoader, self).init()
self.join_depth = self.parent_property.join_depth
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy(LazyLoader).init_class_attribute(mapper)
def setup_query(self, context, entity, path, adapter, \
column_collection=None, parentmapper=None,
**kwargs):
"""Add a left outer join to the statement thats being constructed."""
if not context.query._enable_eagerloads:
return
path = path + (self.key,)
reduced_path = interfaces._reduce_path(path)
# check for user-defined eager alias
if ("user_defined_eager_row_processor", reduced_path) in\
context.attributes:
clauses = context.attributes[
("user_defined_eager_row_processor",
reduced_path)]
adapter = entity._get_entity_clauses(context.query, context)
if adapter and clauses:
context.attributes[
("user_defined_eager_row_processor",
reduced_path)] = clauses = clauses.wrap(adapter)
elif adapter:
context.attributes[
("user_defined_eager_row_processor",
reduced_path)] = clauses = adapter
add_to_collection = context.primary_columns
else:
# check for join_depth or basic recursion,
# if the current path was not explicitly stated as
# a desired "loaderstrategy" (i.e. via query.options())
if ("loaderstrategy", reduced_path) not in context.attributes:
if self.join_depth:
if len(path) / 2 > self.join_depth:
return
else:
if self.mapper.base_mapper in reduced_path:
return
clauses = mapperutil.ORMAdapter(
mapperutil.AliasedClass(self.mapper),
equivalents=self.mapper._equivalent_columns,
adapt_required=True)
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
context.create_eager_joins.append(
(self._create_eager_join, context,
entity, path, adapter,
parentmapper, clauses)
)
add_to_collection = context.secondary_columns
context.attributes[
("eager_row_processor", reduced_path)
] = clauses
for value in self.mapper._polymorphic_properties:
value.setup(
context,
entity,
path + (self.mapper,),
clauses,
parentmapper=self.mapper,
column_collection=add_to_collection)
def _create_eager_join(self, context, entity,
path, adapter, parentmapper, clauses):
if parentmapper is None:
localparent = entity.mapper
else:
localparent = parentmapper
# whether or not the Query will wrap the selectable in a subquery,
# and then attach eager load joins to that (i.e., in the case of
# LIMIT/OFFSET etc.)
should_nest_selectable = context.multi_row_eager_loaders and \
context.query._should_nest_selectable
entity_key = None
if entity not in context.eager_joins and \
not should_nest_selectable and \
context.from_clause:
index, clause = \
sql_util.find_join_source(
context.from_clause, entity.selectable)
if clause is not None:
# join to an existing FROM clause on the query.
# key it to its list index in the eager_joins dict.
# Query._compile_context will adapt as needed and
# append to the FROM clause of the select().
entity_key, default_towrap = index, clause
if entity_key is None:
entity_key, default_towrap = entity, entity.selectable
towrap = context.eager_joins.setdefault(entity_key, default_towrap)
join_to_left = False
if adapter:
if getattr(adapter, 'aliased_class', None):
onclause = getattr(
adapter.aliased_class, self.key,
self.parent_property)
else:
onclause = getattr(
mapperutil.AliasedClass(
self.parent,
adapter.selectable
),
self.key, self.parent_property
)
if onclause is self.parent_property:
# TODO: this is a temporary hack to
# account for polymorphic eager loads where
# the eagerload is referencing via of_type().
join_to_left = True
else:
onclause = self.parent_property
innerjoin = context.attributes.get(
("eager_join_type", path),
self.parent_property.innerjoin)
context.eager_joins[entity_key] = eagerjoin = \
mapperutil.join(
towrap,
clauses.aliased_class,
onclause,
join_to_left=join_to_left,
isouter=not innerjoin
)
# send a hint to the Query as to where it may "splice" this join
eagerjoin.stop_on = entity.selectable
if self.parent_property.secondary is None and \
not parentmapper:
# for parentclause that is the non-eager end of the join,
# ensure all the parent cols in the primaryjoin are actually
# in the
# columns clause (i.e. are not deferred), so that aliasing applied
# by the Query propagates those columns outward.
# This has the effect
# of "undefering" those columns.
for col in sql_util.find_columns(
self.parent_property.primaryjoin):
if localparent.mapped_table.c.contains_column(col):
if adapter:
col = adapter.columns[col]
context.primary_columns.append(col)
if self.parent_property.order_by:
context.eager_order_by += \
eagerjoin._target_adapter.\
copy_and_process(
util.to_list(
self.parent_property.order_by
)
)
def _create_eager_adapter(self, context, row, adapter, path):
reduced_path = interfaces._reduce_path(path)
if ("user_defined_eager_row_processor", reduced_path) in \
context.attributes:
decorator = context.attributes[
("user_defined_eager_row_processor",
reduced_path)]
# user defined eagerloads are part of the "primary"
# portion of the load.
# the adapters applied to the Query should be honored.
if context.adapter and decorator:
decorator = decorator.wrap(context.adapter)
elif context.adapter:
decorator = context.adapter
elif ("eager_row_processor", reduced_path) in context.attributes:
decorator = context.attributes[
("eager_row_processor", reduced_path)]
else:
return False
try:
identity_key = self.mapper.identity_key_from_row(row, decorator)
return decorator
except KeyError, k:
# no identity key - dont return a row
# processor, will cause a degrade to lazy
return False
def create_row_processor(self, context, path, mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
"population - eager loading cannot be applied." %
self)
path = path + (self.key,)
eager_adapter = self._create_eager_adapter(
context,
row,
adapter, path)
if eager_adapter is not False:
key = self.key
_instance = self.mapper._instance_processor(
context,
path + (self.mapper,),
eager_adapter)
if not self.uselist:
def new_execute(state, dict_, row):
# set a scalar object instance directly on the parent
# object, bypassing InstrumentedAttribute event handlers.
dict_[key] = _instance(row, None)
def existing_execute(state, dict_, row):
# call _instance on the row, even though the object has
# been created, so that we further descend into properties
existing = _instance(row, None)
if existing is not None \
and key in dict_ \
and existing is not dict_[key]:
util.warn(
"Multiple rows returned with "
"uselist=False for eagerly-loaded attribute '%s' "
% self)
return new_execute, existing_execute, None
else:
def new_execute(state, dict_, row):
collection = attributes.init_state_collection(
state, dict_, key)
result_list = util.UniqueAppender(collection,
'append_without_event')
context.attributes[(state, key)] = result_list
_instance(row, result_list)
def existing_execute(state, dict_, row):
if (state, key) in context.attributes:
result_list = context.attributes[(state, key)]
else:
# appender_key can be absent from context.attributes
# with isnew=False when self-referential eager loading
# is used; the same instance may be present in two
# distinct sets of result columns
collection = attributes.init_state_collection(state,
dict_, key)
result_list = util.UniqueAppender(
collection,
'append_without_event')
context.attributes[(state, key)] = result_list
_instance(row, result_list)
return new_execute, existing_execute, None
else:
return self.parent_property.\
_get_strategy(LazyLoader).\
create_row_processor(
context, path,
mapper, row, adapter)
log.class_logger(EagerLoader)
class EagerLazyOption(StrategizedOption):
def __init__(self, key, lazy=True, chained=False,
propagate_to_loaders=True
):
super(EagerLazyOption, self).__init__(key)
self.lazy = lazy
self.chained = chained
self.propagate_to_loaders = propagate_to_loaders
self.strategy_cls = factory(lazy)
@property
def is_eager(self):
return self.lazy in (False, 'joined', 'subquery')
@property
def is_chained(self):
return self.is_eager and self.chained
def get_strategy_class(self):
return self.strategy_cls
def factory(identifier):
if identifier is False or identifier == 'joined':
return EagerLoader
elif identifier is None or identifier == 'noload':
return NoLoader
elif identifier is False or identifier == 'select':
return LazyLoader
elif identifier == 'subquery':
return SubqueryLoader
elif identifier == 'immediate':
return ImmediateLoader
else:
return LazyLoader
class EagerJoinOption(PropertyOption):
def __init__(self, key, innerjoin, chained=False):
super(EagerJoinOption, self).__init__(key)
self.innerjoin = innerjoin
self.chained = chained
def is_chained(self):
return self.chained
def process_query_property(self, query, paths, mappers):
if self.is_chained():
for path in paths:
query._attributes[("eager_join_type", path)] = self.innerjoin
else:
query._attributes[("eager_join_type", paths[-1])] = self.innerjoin
class LoadEagerFromAliasOption(PropertyOption):
def __init__(self, key, alias=None):
super(LoadEagerFromAliasOption, self).__init__(key)
if alias is not None:
if not isinstance(alias, basestring):
m, alias, is_aliased_class = mapperutil._entity_info(alias)
self.alias = alias
def process_query_property(self, query, paths, mappers):
if self.alias is not None:
if isinstance(self.alias, basestring):
mapper = mappers[-1]
(root_mapper, propname) = paths[-1][-2:]
prop = mapper.get_property(propname, resolve_synonyms=True)
self.alias = prop.target.alias(self.alias)
query._attributes[
("user_defined_eager_row_processor",
interfaces._reduce_path(paths[-1]))
] = sql_util.ColumnAdapter(self.alias)
else:
(root_mapper, propname) = paths[-1][-2:]
mapper = mappers[-1]
prop = mapper.get_property(propname, resolve_synonyms=True)
adapter = query._polymorphic_adapters.get(prop.mapper, None)
query._attributes[
("user_defined_eager_row_processor",
interfaces._reduce_path(paths[-1]))] = adapter
class _SingleParentValidator(interfaces.AttributeExtension):
def __init__(self, prop):
self.prop = prop
def _do_check(self, state, value, oldvalue, initiator):
if value is not None:
hasparent = initiator.hasparent(attributes.instance_state(value))
if hasparent and oldvalue is not value:
raise sa_exc.InvalidRequestError(
"Instance %s is already associated with an instance "
"of %s via its %s attribute, and is only allowed a "
"single parent." %
(mapperutil.instance_str(value), state.class_, self.prop)
)
return value
def append(self, state, value, initiator):
return self._do_check(state, value, None, initiator)
def set(self, state, value, oldvalue, initiator):
return self._do_check(state, value, oldvalue, initiator)
| gpl-3.0 |
tmxdyf/CouchPotatoServer | couchpotato/core/media/_base/search/main.py | 14 | 1791 | from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.variable import mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
class Search(Plugin):
def __init__(self):
addApiView('search', self.search, docs = {
'desc': 'Search the info in providers for a movie',
'params': {
'q': {'desc': 'The (partial) movie name you want to search for'},
'type': {'desc': 'Search for a specific media type. Leave empty to search all.'},
},
'return': {'type': 'object', 'example': """{
'success': True,
'movies': array,
'show': array,
etc
}"""}
})
addEvent('app.load', self.addSingleSearches)
def search(self, q = '', types = None, **kwargs):
# Make sure types is the correct instance
if isinstance(types, (str, unicode)):
types = [types]
elif isinstance(types, (list, tuple, set)):
types = list(types)
if not types:
result = fireEvent('info.search', q = q, merge = True)
else:
result = {}
for media_type in types:
result[media_type] = fireEvent('%s.search' % media_type)
return mergeDicts({
'success': True,
}, result)
def createSingleSearch(self, media_type):
def singleSearch(q, **kwargs):
return self.search(q, type = media_type, **kwargs)
return singleSearch
def addSingleSearches(self):
for media_type in fireEvent('media.types', merge = True):
addApiView('%s.search' % media_type, self.createSingleSearch(media_type))
| gpl-3.0 |
goblincoding/ninja-ide | ninja_ide/core/plugin_manager.py | 9 | 19407 | # -*- coding: utf-8 -*-
#
# This file is part of NINJA-IDE (http://ninja-ide.org).
#
# NINJA-IDE is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NINJA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NINJA-IDE; If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import sys
import shutil
import copy
import zipfile
import traceback
#lint:disable
try:
from urllib.request import urlopen
from urllib.error import URLError
except ImportError:
from urllib2 import urlopen
from urllib2 import URLError
#lint:enable
from ninja_ide import resources
from ninja_ide.tools.logger import NinjaLogger
from ninja_ide.tools import json_manager
logger = NinjaLogger('ninja_ide.core.plugin_manager')
REQUIREMENTS = 'requirements.txt'
COMMAND_FOR_PIP_INSTALL = 'pip install -r %s'
try:
# For Python2
str = unicode # lint:ok
except NameError:
# We are in Python3
pass
class ServiceLocator(object):
'''
Hold the services and allows the interaction between NINJA-IDE and plugins
'''
def __init__(self, services=None):
self.__services = services if services else {}
def get_service(self, name):
return self.__services.get(name)
def get_availables_services(self):
return list(self.__services.keys())
'''
NINJA-IDE Plugin
my_plugin.plugin
{
"module": "my_plugin",
"class": "MyPluginExample",
"authors": "Martin Alderete <malderete@gmail.com>",
"version": "0.1",
"description": "Este plugin es de prueba"
}
class MyPluginExample(Plugin):
def initialize(self):
#Configure the plugin using the NINJA-IDE API!!!
self.editor_s = self.service_locator.get_service('editor')
self.toolbar_s = self.service_locator.get_service('toolbar')
self.toolbar_s.add_action(QAction(...........))
self.appmenu_s = self.service_locator.get_service('appmenu')
self.appmenu_s.add_menu(QMenu(......))
#connect events!
self.editor_s.editorKeyPressEvent.connect(self.my_plugin_key_pressed)
def my_plugin_key_pressed(self, ...):
print 'se apreto alguna tecla en el ide...'
'''
###############################################################################
# NINJA-IDE Plugin Manager
###############################################################################
class PluginManagerException(Exception):
pass
#Singleton
__pluginManagerInstance = None
def PluginManager(*args, **kw):
global __pluginManagerInstance
if __pluginManagerInstance is None:
__pluginManagerInstance = __PluginManager(*args, **kw)
return __pluginManagerInstance
#Extension of the NINJA-IDE plugin
PLUGIN_EXTENSION = '.plugin'
class __PluginManager(object):
'''
Plugin manager allows to load, unload, initialize plugins.
'''
def __init__(self, plugins_dir, service_locator):
'''
@param plugins_dir: Path to search plugins.
@param service_loctor: ServiceLocator object.
'''
self._service_locator = service_locator
#new!
self._plugins_by_dir = {}
#add all the plugins paths
for path in self.__create_list(plugins_dir):
self.add_plugin_dir(path)
#end new!
#self._plugins_dir = plugins_dir
self._errors = []
#found plugins
#example: ["logger", "my_plugin"]
self._found_plugins = []
#active plugins
#example: {"logger": (LoggerIntance, metadata),
# "my_plugin": (MyPluginInstance, metadata)}
self._active_plugins = {}
def __create_list(self, obj):
if isinstance(obj, (list, tuple)):
return obj
#string then returns a list of one item!
return [obj]
def add_plugin_dir(self, plugin_dir):
'''
Add a new directory to search plugins.
@param plugin_dir: absolute path.
'''
if not plugin_dir in self._plugins_by_dir:
self._plugins_by_dir[plugin_dir] = []
def get_actives_plugins(self):
import warnings
warnings.warn("Deprecated in behalf of a TYPO free method name")
return self.get_active_plugins()
def get_active_plugins(self):
'''
Returns a list the instances
'''
return [plugin[0] for plugin in list(self._active_plugins.values())]
def _get_dir_from_plugin_name(self, plugin_name):
'''
Returns the dir of the plugin_name
'''
for dir_, plug_names in list(self._plugins_by_dir.items()):
if plugin_name in plug_names:
return dir_
def __getitem__(self, plugin_name):
'''
Magic method to get a plugin instance
from a given name.
@Note: This method has the logic below.
Check if the plugin is known,
if it is active return it,
otherwise, active it and return it.
If the plugin name does not exist
raise KeyError exception.
@param plugin_name: plugin name.
@return: Plugin instance or None
'''
global PLUGIN_EXTENSION
ext = PLUGIN_EXTENSION
if not plugin_name.endswith(ext):
plugin_name += ext
if plugin_name in self._found_plugins:
if not plugin_name in self._active_plugins:
dir_ = self._get_dir_from_plugin_name(plugin_name)
self.load(plugin_name, dir_)
return self._active_plugins[plugin_name][0]
raise KeyError(plugin_name)
def __contains__(self, plugin_name):
'''
Magic method to know whether the
PluginManager contains
a plugin with a given name.
@param plugin_name: plugin name.
@return: True or False.
'''
return plugin_name in self._found_plugins
def __iter__(self):
'''
Magic method to iterate over all
the plugin's names.
@return: iterator.
'''
return iter(self._found_plugins)
def __len__(self):
'''
Magic method to know the plugins
quantity.
@return: length.
'''
return len(self._found_plugins)
def __bool__(self):
'''
Magic method to indicate that any
instance must pass the if conditional
if x:
'''
return True
def get_plugin_name(self, file_name):
'''
Get the plugin's name from a file name.
@param file_name: A file object name.
@return: A plugin name from a file.
'''
plugin_file_name, file_ext = os.path.splitext(file_name)
return plugin_file_name
def list_plugins(self, dir_name):
'''
Crawl a directory and collect plugins.
@return: List with plugin names.
'''
global PLUGIN_EXTENSION
ext = PLUGIN_EXTENSION
try:
listdir = os.listdir(dir_name)
return [plug for plug in listdir if plug.endswith(ext)]
except OSError:
return ()
def is_plugin_active(self, plugin_name):
'''
Check if a plugin is or not active
@param plugin_name: Plugin name to check.
@return: True or False
'''
return plugin_name in self._active_plugins
def discover(self):
'''
Search all files for directory
and get the valid plugin's names.
'''
for dir_name in self._plugins_by_dir:
for file_name in self.list_plugins(dir_name):
plugin_name = file_name
if not plugin_name in self._found_plugins:
self._found_plugins.append(plugin_name)
self._plugins_by_dir[dir_name].append(plugin_name)
def _load_module(self, module, klassname, metadata, dir_name):
old_syspath = copy.copy(sys.path)
try:
sys.path.insert(1, dir_name)
module = __import__(module, globals(), locals(), [])
klass = getattr(module, klassname)
#Instanciate the plugin
plugin_instance = klass(self._service_locator, metadata=metadata)
#return the plugin instance
return plugin_instance
except(ImportError, AttributeError) as reason:
raise PluginManagerException('Error loading "%s": %s' %
(module, reason))
finally:
sys.path = old_syspath
return None
def load(self, plugin_name, dir_name):
global PLUGIN_EXTENSION
if plugin_name in self._active_plugins:
return
for dir_name, plugin_list in list(self._plugins_by_dir.items()):
if plugin_name in plugin_list:
ext = PLUGIN_EXTENSION
plugin_filename = os.path.join(dir_name, plugin_name)
plugin_structure = json_manager.read_json(plugin_filename)
plugin_structure['name'] = plugin_name.replace(ext, '')
module = plugin_structure.get('module', None)
klassname = plugin_structure.get('class', None)
if module is not None and klassname is not None:
try:
plugin_instance = self._load_module(module,
klassname, plugin_structure, dir_name)
#set a get_plugin method to get the reference to other
#setattr(plugin_instance,'get_plugin',self.__getitem__)
#call a special method *initialize* in the plugin!
plugin_instance.metadata = plugin_structure
logger.info("Calling initialize (%s)", plugin_name)
plugin_instance.initialize()
#tuple (instance, metadata)
plugin_metadata = (plugin_instance, plugin_structure)
self._active_plugins[plugin_name] = plugin_metadata
except (PluginManagerException, Exception) as reason:
logger.error("Not instanciated (%s): %s", plugin_name,
reason)
#remove the plugin because has errors
self._found_plugins.remove(plugin_name)
traceback_msg = traceback.format_exc()
plugin_name = plugin_name.replace(ext, '')
#add the traceback to errors
self._add_error(plugin_name, traceback_msg)
else:
logger.info("Successfuly initialized (%s)",
plugin_name)
def load_all(self):
for dir, pl in list(self._plugins_by_dir.items()):
#Copy the list because may be we REMOVE item while iterate!
found_plugins_aux = copy.copy(pl)
for plugin_name in found_plugins_aux:
self.load(plugin_name, dir)
def load_all_external(self, plugin_path):
#Copy the list because may be we REMOVE item while iterate!
found_plugins_aux = copy.copy(self._found_plugins)
for plugin_name in found_plugins_aux:
self.load(plugin_name, plugin_path)
def unload(self, plugin_name):
try:
plugin_object = self._active_plugins[plugin_name][0]
#call a special method *finish* in the plugin!
plugin_object.finish()
del self._active_plugins[plugin_name]
except Exception as reason:
logger.error("Finishing plugin (%s): %s", plugin_name, reason)
else:
logger.info("Successfuly finished (%s)", plugin_name)
def unload_all(self):
#Copy the list because may be we REMOVE item while iterate!
active_plugins_aux = copy.copy(self._active_plugins)
for plugin_name in active_plugins_aux:
self.unload(plugin_name)
def shutdown(self):
self.unload_all()
def get_availables_services(self):
"""
Returns all services availables
"""
self._service_locator.get_availables_services()
def _add_error(self, plugin_name, traceback_msg):
self._errors.append((plugin_name, traceback_msg))
@property
def errors(self):
"""
Returns a comma separated values of errors
"""
return self._errors
def _availables_plugins(url):
"""
Return the availables plugins from an url in NINJA-IDE web page
"""
try:
descriptor = urlopen(url)
plugins = json_manager.read_json_from_stream(descriptor)
return plugins
except URLError:
return {}
def available_oficial_plugins():
'''
Returns a dict with OFICIAL availables plugins in NINJA-IDE web page
'''
return _availables_plugins(resources.PLUGINS_WEB)
def available_community_plugins():
'''
Returns a dict with COMMUNITY availables plugins in NINJA-IDE web page
'''
return _availables_plugins(resources.PLUGINS_COMMUNITY)
def local_plugins():
'''
Returns the local plugins
'''
if not os.path.isfile(resources.PLUGINS_DESCRIPTOR):
return []
plugins = json_manager.read_json(resources.PLUGINS_DESCRIPTOR)
return plugins
def __get_all_plugin_descriptors():
'''
Returns all the .plugin files
'''
global PLUGIN_EXTENSION
return [pf for pf in os.listdir(resources.PLUGINS)
if pf.endswith(PLUGIN_EXTENSION)]
def download_plugin(file_):
'''
Download a plugin specified by file_
'''
global PLUGIN_EXTENSION
#get all the .plugin files in local filesystem
plugins_installed_before = set(__get_all_plugin_descriptors())
#download the plugin
fileName = os.path.join(resources.PLUGINS, os.path.basename(file_))
content = urlopen(file_)
f = open(fileName, 'wb')
f.write(content.read())
f.close()
#create the zip
zipFile = zipfile.ZipFile(fileName, 'r')
zipFile.extractall(resources.PLUGINS)
zipFile.close()
#clean up the enviroment
os.remove(fileName)
#get the name of the last installed plugin
plugins_installed_after = set(__get_all_plugin_descriptors())
#using set operations get the difference that is the new plugin
new_plugin = (plugins_installed_after - plugins_installed_before).pop()
return new_plugin
def manual_install(file_):
"""Copy zip file and install."""
global PLUGIN_EXTENSION
#get all the .plugin files in local filesystem
plugins_installed_before = set(__get_all_plugin_descriptors())
#copy the plugin
fileName = os.path.join(resources.PLUGINS, os.path.basename(file_))
shutil.copyfile(file_, fileName)
#extract the zip
zipFile = zipfile.ZipFile(fileName, 'r')
zipFile.extractall(resources.PLUGINS)
zipFile.close()
#clean up the enviroment
os.remove(fileName)
#get the name of the last installed plugin
plugins_installed_after = set(__get_all_plugin_descriptors())
#using set operations get the difference that is the new plugin
new_plugin = (plugins_installed_after - plugins_installed_before).pop()
return new_plugin
def has_dependencies(plug):
global REQUIREMENTS, COMMAND_FOR_PIP_INSTALL
plugin_name = plug[0]
structure = []
if os.path.isfile(resources.PLUGINS_DESCRIPTOR):
structure = json_manager.read_json(resources.PLUGINS_DESCRIPTOR)
PLUGINS = resources.PLUGINS
for p in structure:
if p['name'] == plugin_name:
pd_file = os.path.join(PLUGINS, p['plugin-descriptor'])
p_json = json_manager.read_json(pd_file)
module = p_json.get('module')
#plugin_module/requirements.txt
req_file = os.path.join(os.path.join(PLUGINS, module),
REQUIREMENTS)
if os.path.isfile(req_file):
return (True, COMMAND_FOR_PIP_INSTALL % req_file)
#the plugin was found but no requirement then break!
break
return (False, None)
def update_local_plugin_descriptor(plugins):
'''
updates the local plugin description
The description.json file holds the information about the plugins
downloaded with NINJA-IDE
This is a way to track the versions of the plugins
'''
structure = []
if os.path.isfile(resources.PLUGINS_DESCRIPTOR):
structure = json_manager.read_json(resources.PLUGINS_DESCRIPTOR)
for plug_list in plugins:
#create the plugin data
plug = {}
plug['name'] = plug_list[0]
plug['version'] = plug_list[1]
plug['description'] = plug_list[2]
plug['authors'] = plug_list[3]
plug['home'] = plug_list[4]
plug['download'] = plug_list[5]
plug['plugin-descriptor'] = plug_list[6]
#append the plugin data
structure.append(plug)
json_manager.write_json(structure, resources.PLUGINS_DESCRIPTOR)
def uninstall_plugin(plug):
"""
Uninstall the given plugin
"""
plugin_name = plug[0]
structure = []
if os.path.isfile(resources.PLUGINS_DESCRIPTOR):
structure = json_manager.read_json(resources.PLUGINS_DESCRIPTOR)
#copy the strcuture we iterate and remove at the same time
structure_aux = copy.copy(structure)
for plugin in structure_aux:
if plugin["name"] == plugin_name:
fileName = plugin["plugin-descriptor"]
structure.remove(plugin)
break
#open <plugin>.plugin file and get the module to remove
fileName = os.path.join(resources.PLUGINS, fileName)
plugin = json_manager.read_json(fileName)
module = plugin.get('module')
if module:
pluginDir = os.path.join(resources.PLUGINS, module)
folders = [pluginDir]
for root, dirs, files in os.walk(pluginDir):
pluginFiles = [os.path.join(root, f) for f in files]
#remove all files
list(map(os.remove, pluginFiles))
#collect subfolders
folders += [os.path.join(root, d) for d in dirs]
folders.reverse()
for f in folders:
if os.path.isdir(f):
os.removedirs(f)
#remove ths plugin_name.plugin file
os.remove(fileName)
#write the new info
json_manager.write_json(structure, resources.PLUGINS_DESCRIPTOR)
###############################################################################
# Module Test
###############################################################################
if __name__ == '__main__':
folders = resources.PLUGINS
services = {}
sl = ServiceLocator(services)
pm = PluginManager(folders, sl)
#There are not plugins yet...lets discover
pm.discover()
logger.info("listing plugins names...")
for p in pm:
print(p)
logger.info("Activating plugins...")
pm.load_all()
logger.info("Plugins already actives...")
logger.info(pm.get_active_plugins())
| gpl-3.0 |
Rona111/sale_commission | __unported__/stock_block_prodlots/block_prodlot_case.py | 5 | 10632 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 Pexego (<www.pexego.es>). All Rights Reserved
# $Omar Castiñeira Saavedra$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""reasons to block production lots"""
from osv import osv, fields
import netsvc
import time
from tools.translate import _
class block_prodlot_cases(osv.osv):
"""reasons to block production lots"""
_name = "block.prodlot.cases"
_description = "Blockade cases"
_columns = {
'name': fields.char('Name', size=64, required=True, states={'confirm':[('readonly', True)]}),
'description': fields.text('Description', required=True),
'blocked_prodlots_ids': fields.many2many('stock.production.lot', 'blocked_prodlots_cases_ids', 'case_id', 'blocked_prodlot', 'Blocked Prodlots', states={'confirm':[('readonly', True)]}),
'parent_block_prodlot': fields.many2one('stock.production.lot', 'Blockade Root', required=True, ondelete="set null", states={'confirm':[('readonly', True)]}),
'state': fields.selection([('in_review', 'In Review'), ('confirm', 'Confirm'), ('cancelled', 'Cancelled')], 'State', required=True, readonly=True)
}
_defaults = {
'state': 'in_review'
}
def send_blockade_case_notification(self, cr, uid, case_id, state = 'in_review'):
"""send a notification to Production Lots / Blockade Notifications users for blockade cases"""
group_id = self.pool.get('res.groups').search(cr, uid, [('name', '=', 'Production Lots / Blockade Notifications')])
if group_id:
group_id = self.pool.get('res.groups').browse(cr, uid, group_id[0])
obj_case_id = self.browse(cr, uid, case_id)
#get a string comma list from object case prodlots collection
# pylint: disable-msg=W0141
lots_affected_names = u','.join(map(str, map(lambda x:x.name, obj_case_id.blocked_prodlots_ids)))
if state == 'in_review':
message = _("New production lots in review, will raise a warning meanwhile be in this state.\n\nLots names: %s\n\nBlockade Description: %s\n\n \
Blockade was raised from production_lot: %s.") % (lots_affected_names, obj_case_id.description, obj_case_id.parent_block_prodlot.name)
else:
message = _("New production lots blocked. Now not can you use this prodlots definitely.\n\nLots names: %s\n\nBlockade Description: %s\n\n \
Blockade was raised from production_lot: %s.") % (lots_affected_names, obj_case_id.description, obj_case_id.parent_block_prodlot.name)
for user in group_id.user_ids:
self.pool.get('res.request').create(cr, uid, {
'name': _("Blockade Case %s: %s") % (obj_case_id.id, obj_case_id.name),
'body': message,
'state': 'waiting',
'act_from': uid,
'act_to': user.id,
'ref_doc1': 'block.prodlot.cases,%d' % (obj_case_id.id,),
'priority': '2'
})
return True
return False
def confirm_blockade_case(self, cr, uid, ids, context = None):
"""confirm blockade case and block definitely prodlots in alert affected by case"""
if context is None: context = {}
if isinstance(ids, (int, long)):
ids = [ids]
for obj_block_prodlot_case_id in self.browse(cr, uid, ids):
created_move_ids = []
for obj_blocked_prodlot_id in obj_block_prodlot_case_id.blocked_prodlots_ids:
#searches if prodlots have other blockade cases that interrumpt his blockade
cr.execute("select * from blocked_prodlots_cases_ids inner join block_prodlot_cases on id = case_id \
where blocked_prodlot = %s and case_id != %s and state not in ('confirm','cancelled')", (obj_blocked_prodlot_id.id, obj_block_prodlot_case_id.id))
#if prodlot have another blockade cases in review it cannot block
if cr.rowcount:
continue
obj_real_report_prodlots_ids = self.pool.get('stock.report.prodlots').search(cr, uid, [('prodlot_id', '=', obj_blocked_prodlot_id.id),('qty','>',0)])
for obj_real_report_prodlots_id in self.pool.get('stock.report.prodlots').browse(cr, uid, obj_real_report_prodlots_ids):
if obj_real_report_prodlots_id.location_id.usage not in ('internal'):
continue
move_id = self.pool.get('stock.move').create(cr, uid, {
'product_uom': obj_real_report_prodlots_id.product_id.uom_id.id,
'date' : time.strftime("%Y-%m-%d"),
'date_expected' : time.strftime("%Y-%m-%d"),
'prodlot_id': obj_blocked_prodlot_id.id,
'product_qty': obj_real_report_prodlots_id.qty,
'location_id': obj_real_report_prodlots_id.location_id.id,
'product_id': obj_real_report_prodlots_id.product_id.id,
'name': _("BLOCK: ") + obj_real_report_prodlots_id.prodlot_id.name + obj_real_report_prodlots_id.location_id.name,
'state': 'draft',
'location_dest_id': obj_real_report_prodlots_id.product_id.product_tmpl_id.property_waste.id
})
created_move_ids.append(move_id)
#for update block and in_alert store attribute
self.pool.get('stock.production.lot').write(cr, uid, [obj_blocked_prodlot_id.id], {'date': time.strftime("%Y-%m-%d %H:%M:%S")})
if created_move_ids:
picking_id = self.pool.get('stock.picking').create(cr, uid, {
'origin': _("BLOCKCASE:") + str(obj_block_prodlot_case_id.id),
'state': 'draft',
'type': 'internal',
'move_type': 'direct',
})
self.pool.get('stock.move').write(cr, uid, created_move_ids, {'picking_id': picking_id})
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'stock.picking', picking_id, 'button_confirm', cr)
self.write(cr, uid, [obj_block_prodlot_case_id.id], {
'state': 'confirm'
})
#send block notification to users
self.send_blockade_case_notification(cr, uid, obj_block_prodlot_case_id.id, 'confirm')
#for update block and in_alert store attribute
self.pool.get('stock.production.lot').write(cr, uid, [x.id for x in obj_block_prodlot_case_id.blocked_prodlots_ids], {})
return True
def cancel_blockade_case(self, cr, uid, ids, context = None):
"""cancelled blockade cases"""
if context is None: context = {}
self.write(cr, uid, ids, {'state': 'cancelled'})
for obj_block_prodlot_case_id in self.browse(cr, uid, ids):
self.pool.get('stock.production.lot').write(cr, uid, [x.id for x in obj_block_prodlot_case_id.blocked_prodlots_ids], {})
return True
def write(self, cr, uid, ids, vals, context = None):
"""overwrites write method for update production lots when case updating"""
if context is None: context = {}
moves_to_update = []
if isinstance(ids, (int, long)):
ids = [ids]
for obj_case_id in self.browse(cr, uid, ids):
moves_to_update = list(set(moves_to_update + [x.id for x in obj_case_id.blocked_prodlots_ids]))
res = super(block_prodlot_cases, self).write(cr, uid, ids, vals, context)
self.pool.get('stock.production.lot').write(cr, uid, moves_to_update, {})
return res
def create(self, cr, uid, vals, context = None):
"""overwrites this method to send notification informative with context of in alert prodlots and case"""
if context is None: context = {}
case_id = super(block_prodlot_cases, self).create(cr, uid, vals, context=context)
#send in_review notification
self.send_blockade_case_notification(cr, uid, case_id, 'in_review')
return case_id
def unlink(self, cr, uid, ids, context = None):
"""overwrites unlink function to update prodlots_state"""
if context is None: context = {}
affected_lots = []
for blockade_case_id in self.browse(cr, uid, ids):
if blockade_case_id.state == 'confirm':
raise osv.except_osv(_("Warning!"), _("Can't delete confirmed blockade case."))
affected_lots.extend([x.id for x in blockade_case_id.blocked_prodlots_ids])
res = super(block_prodlot_cases, self).unlink(cr, uid, ids, context = context)
if affected_lots:
self.pool.get('stock.production.lot').write(cr, uid, affected_lots, {})
return res
block_prodlot_cases()
class stock_production_lot(osv.osv):
"""inherit object to add many2many relationship with block.prodlot.cases"""
_inherit = "stock.production.lot"
_columns = {
'blocked_prodlots_cases_ids': fields.many2many('block.prodlot.cases', 'blocked_prodlots_cases_ids', 'blocked_prodlot', 'case_id', "Blockade Cases"),
}
stock_production_lot()
| agpl-3.0 |
wyc/django | django/core/files/locks.py | 725 | 3516 | """
Portable file locking utilities.
Based partially on an example by Jonathan Feignberg in the Python
Cookbook [1] (licensed under the Python Software License) and a ctypes port by
Anatoly Techtonik for Roundup [2] (license [3]).
[1] http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65203
[2] http://sourceforge.net/p/roundup/code/ci/default/tree/roundup/backends/portalocker.py
[3] http://sourceforge.net/p/roundup/code/ci/default/tree/COPYING.txt
Example Usage::
>>> from django.core.files import locks
>>> with open('./file', 'wb') as f:
... locks.lock(f, locks.LOCK_EX)
... f.write('Django')
"""
import os
__all__ = ('LOCK_EX', 'LOCK_SH', 'LOCK_NB', 'lock', 'unlock')
def _fd(f):
"""Get a filedescriptor from something which could be a file or an fd."""
return f.fileno() if hasattr(f, 'fileno') else f
if os.name == 'nt':
import msvcrt
from ctypes import (sizeof, c_ulong, c_void_p, c_int64,
Structure, Union, POINTER, windll, byref)
from ctypes.wintypes import BOOL, DWORD, HANDLE
LOCK_SH = 0 # the default
LOCK_NB = 0x1 # LOCKFILE_FAIL_IMMEDIATELY
LOCK_EX = 0x2 # LOCKFILE_EXCLUSIVE_LOCK
# --- Adapted from the pyserial project ---
# detect size of ULONG_PTR
if sizeof(c_ulong) != sizeof(c_void_p):
ULONG_PTR = c_int64
else:
ULONG_PTR = c_ulong
PVOID = c_void_p
# --- Union inside Structure by stackoverflow:3480240 ---
class _OFFSET(Structure):
_fields_ = [
('Offset', DWORD),
('OffsetHigh', DWORD)]
class _OFFSET_UNION(Union):
_anonymous_ = ['_offset']
_fields_ = [
('_offset', _OFFSET),
('Pointer', PVOID)]
class OVERLAPPED(Structure):
_anonymous_ = ['_offset_union']
_fields_ = [
('Internal', ULONG_PTR),
('InternalHigh', ULONG_PTR),
('_offset_union', _OFFSET_UNION),
('hEvent', HANDLE)]
LPOVERLAPPED = POINTER(OVERLAPPED)
# --- Define function prototypes for extra safety ---
LockFileEx = windll.kernel32.LockFileEx
LockFileEx.restype = BOOL
LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, LPOVERLAPPED]
UnlockFileEx = windll.kernel32.UnlockFileEx
UnlockFileEx.restype = BOOL
UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, LPOVERLAPPED]
def lock(f, flags):
hfile = msvcrt.get_osfhandle(_fd(f))
overlapped = OVERLAPPED()
ret = LockFileEx(hfile, flags, 0, 0, 0xFFFF0000, byref(overlapped))
return bool(ret)
def unlock(f):
hfile = msvcrt.get_osfhandle(_fd(f))
overlapped = OVERLAPPED()
ret = UnlockFileEx(hfile, 0, 0, 0xFFFF0000, byref(overlapped))
return bool(ret)
else:
try:
import fcntl
LOCK_SH = fcntl.LOCK_SH # shared lock
LOCK_NB = fcntl.LOCK_NB # non-blocking
LOCK_EX = fcntl.LOCK_EX
except (ImportError, AttributeError):
# File locking is not supported.
LOCK_EX = LOCK_SH = LOCK_NB = 0
# Dummy functions that don't do anything.
def lock(f, flags):
# File is not locked
return False
def unlock(f):
# File is unlocked
return True
else:
def lock(f, flags):
ret = fcntl.flock(_fd(f), flags)
return (ret == 0)
def unlock(f):
ret = fcntl.flock(_fd(f), fcntl.LOCK_UN)
return (ret == 0)
| bsd-3-clause |
40423135/2016fallcadp_hw | plugin/sitemap/sitemap.py | 292 | 8774 | # -*- coding: utf-8 -*-
'''
Sitemap
-------
The sitemap plugin generates plain-text or XML sitemaps.
'''
from __future__ import unicode_literals
import re
import collections
import os.path
from datetime import datetime
from logging import warning, info
from codecs import open
from pytz import timezone
from pelican import signals, contents
from pelican.utils import get_date
TXT_HEADER = """{0}/index.html
{0}/archives.html
{0}/tags.html
{0}/categories.html
"""
XML_HEADER = """<?xml version="1.0" encoding="utf-8"?>
<urlset xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"
xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
"""
XML_URL = """
<url>
<loc>{0}/{1}</loc>
<lastmod>{2}</lastmod>
<changefreq>{3}</changefreq>
<priority>{4}</priority>
</url>
"""
XML_FOOTER = """
</urlset>
"""
def format_date(date):
if date.tzinfo:
tz = date.strftime('%z')
tz = tz[:-2] + ':' + tz[-2:]
else:
tz = "-00:00"
return date.strftime("%Y-%m-%dT%H:%M:%S") + tz
class SitemapGenerator(object):
def __init__(self, context, settings, path, theme, output_path, *null):
self.output_path = output_path
self.context = context
self.now = datetime.now()
self.siteurl = settings.get('SITEURL')
self.default_timezone = settings.get('TIMEZONE', 'UTC')
self.timezone = getattr(self, 'timezone', self.default_timezone)
self.timezone = timezone(self.timezone)
self.format = 'xml'
self.changefreqs = {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
self.priorities = {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
}
self.sitemapExclude = []
config = settings.get('SITEMAP', {})
if not isinstance(config, dict):
warning("sitemap plugin: the SITEMAP setting must be a dict")
else:
fmt = config.get('format')
pris = config.get('priorities')
chfreqs = config.get('changefreqs')
self.sitemapExclude = config.get('exclude', [])
if fmt not in ('xml', 'txt'):
warning("sitemap plugin: SITEMAP['format'] must be `txt' or `xml'")
warning("sitemap plugin: Setting SITEMAP['format'] on `xml'")
elif fmt == 'txt':
self.format = fmt
return
valid_keys = ('articles', 'indexes', 'pages')
valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly',
'yearly', 'never')
if isinstance(pris, dict):
# We use items for Py3k compat. .iteritems() otherwise
for k, v in pris.items():
if k in valid_keys and not isinstance(v, (int, float)):
default = self.priorities[k]
warning("sitemap plugin: priorities must be numbers")
warning("sitemap plugin: setting SITEMAP['priorities']"
"['{0}'] on {1}".format(k, default))
pris[k] = default
self.priorities.update(pris)
elif pris is not None:
warning("sitemap plugin: SITEMAP['priorities'] must be a dict")
warning("sitemap plugin: using the default values")
if isinstance(chfreqs, dict):
# .items() for py3k compat.
for k, v in chfreqs.items():
if k in valid_keys and v not in valid_chfreqs:
default = self.changefreqs[k]
warning("sitemap plugin: invalid changefreq `{0}'".format(v))
warning("sitemap plugin: setting SITEMAP['changefreqs']"
"['{0}'] on '{1}'".format(k, default))
chfreqs[k] = default
self.changefreqs.update(chfreqs)
elif chfreqs is not None:
warning("sitemap plugin: SITEMAP['changefreqs'] must be a dict")
warning("sitemap plugin: using the default values")
def write_url(self, page, fd):
if getattr(page, 'status', 'published') != 'published':
return
# We can disable categories/authors/etc by using False instead of ''
if not page.save_as:
return
page_path = os.path.join(self.output_path, page.save_as)
if not os.path.exists(page_path):
return
lastdate = getattr(page, 'date', self.now)
try:
lastdate = self.get_date_modified(page, lastdate)
except ValueError:
warning("sitemap plugin: " + page.save_as + " has invalid modification date,")
warning("sitemap plugin: using date value as lastmod.")
lastmod = format_date(lastdate)
if isinstance(page, contents.Article):
pri = self.priorities['articles']
chfreq = self.changefreqs['articles']
elif isinstance(page, contents.Page):
pri = self.priorities['pages']
chfreq = self.changefreqs['pages']
else:
pri = self.priorities['indexes']
chfreq = self.changefreqs['indexes']
pageurl = '' if page.url == 'index.html' else page.url
#Exclude URLs from the sitemap:
if self.format == 'xml':
flag = False
for regstr in self.sitemapExclude:
if re.match(regstr, pageurl):
flag = True
break
if not flag:
fd.write(XML_URL.format(self.siteurl, pageurl, lastmod, chfreq, pri))
else:
fd.write(self.siteurl + '/' + pageurl + '\n')
def get_date_modified(self, page, default):
if hasattr(page, 'modified'):
if isinstance(page.modified, datetime):
return page.modified
return get_date(page.modified)
else:
return default
def set_url_wrappers_modification_date(self, wrappers):
for (wrapper, articles) in wrappers:
lastmod = datetime.min.replace(tzinfo=self.timezone)
for article in articles:
lastmod = max(lastmod, article.date.replace(tzinfo=self.timezone))
try:
modified = self.get_date_modified(article, datetime.min).replace(tzinfo=self.timezone)
lastmod = max(lastmod, modified)
except ValueError:
# Supressed: user will be notified.
pass
setattr(wrapper, 'modified', str(lastmod))
def generate_output(self, writer):
path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format))
pages = self.context['pages'] + self.context['articles'] \
+ [ c for (c, a) in self.context['categories']] \
+ [ t for (t, a) in self.context['tags']] \
+ [ a for (a, b) in self.context['authors']]
self.set_url_wrappers_modification_date(self.context['categories'])
self.set_url_wrappers_modification_date(self.context['tags'])
self.set_url_wrappers_modification_date(self.context['authors'])
for article in self.context['articles']:
pages += article.translations
info('writing {0}'.format(path))
with open(path, 'w', encoding='utf-8') as fd:
if self.format == 'xml':
fd.write(XML_HEADER)
else:
fd.write(TXT_HEADER.format(self.siteurl))
FakePage = collections.namedtuple('FakePage',
['status',
'date',
'url',
'save_as'])
for standard_page_url in ['index.html',
'archives.html',
'tags.html',
'categories.html']:
fake = FakePage(status='published',
date=self.now,
url=standard_page_url,
save_as=standard_page_url)
self.write_url(fake, fd)
for page in pages:
self.write_url(page, fd)
if self.format == 'xml':
fd.write(XML_FOOTER)
def get_generators(generators):
return SitemapGenerator
def register():
signals.get_generators.connect(get_generators)
| gpl-3.0 |
kool79/intellij-community | python/lib/Lib/site-packages/django/template/context.py | 78 | 5383 | from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
# Cache of actual callables.
_standard_context_processors = None
# We need the CSRF processor no matter what the user has in their settings,
# because otherwise it is a security vulnerability, and we can't afford to leave
# this to human error or failure to read migration instructions.
_builtin_context_processors = ('django.core.context_processors.csrf',)
class ContextPopException(Exception):
"pop() has been called more times than push()"
pass
class BaseContext(object):
def __init__(self, dict_=None):
dict_ = dict_ or {}
self.dicts = [dict_]
def __repr__(self):
return repr(self.dicts)
def __iter__(self):
for d in reversed(self.dicts):
yield d
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
"Set a variable in the current context"
self.dicts[-1][key] = value
def __getitem__(self, key):
"Get a variable's value, starting at the current context and going upward"
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError(key)
def __delitem__(self, key):
"Delete a variable from the current context"
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, otherwise=None):
for d in reversed(self.dicts):
if key in d:
return d[key]
return otherwise
class Context(BaseContext):
"A stack container for variable context"
def __init__(self, dict_=None, autoescape=True, current_app=None, use_l10n=None):
self.autoescape = autoescape
self.use_l10n = use_l10n
self.current_app = current_app
self.render_context = RenderContext()
super(Context, self).__init__(dict_)
def update(self, other_dict):
"Pushes other_dict to the stack of dictionaries in the Context"
if not hasattr(other_dict, '__getitem__'):
raise TypeError('other_dict must be a mapping (dictionary-like) object.')
self.dicts.append(other_dict)
return other_dict
class RenderContext(BaseContext):
"""
A stack container for storing Template state.
RenderContext simplifies the implementation of template Nodes by providing a
safe place to store state between invocations of a node's `render` method.
The RenderContext also provides scoping rules that are more sensible for
'template local' variables. The render context stack is pushed before each
template is rendered, creating a fresh scope with nothing in it. Name
resolution fails if a variable is not found at the top of the RequestContext
stack. Thus, variables are local to a specific template and don't affect the
rendering of other templates as they would if they were stored in the normal
template context.
"""
def __iter__(self):
for d in self.dicts[-1]:
yield d
def has_key(self, key):
return key in self.dicts[-1]
def get(self, key, otherwise=None):
d = self.dicts[-1]
if key in d:
return d[key]
return otherwise
# This is a function rather than module-level procedural code because we only
# want it to execute if somebody uses RequestContext.
def get_standard_processors():
from django.conf import settings
global _standard_context_processors
if _standard_context_processors is None:
processors = []
collect = []
collect.extend(_builtin_context_processors)
collect.extend(settings.TEMPLATE_CONTEXT_PROCESSORS)
for path in collect:
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error importing request processor module %s: "%s"' % (module, e))
try:
func = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" callable request processor' % (module, attr))
processors.append(func)
_standard_context_processors = tuple(processors)
return _standard_context_processors
class RequestContext(Context):
"""
This subclass of template.Context automatically populates itself using
the processors defined in TEMPLATE_CONTEXT_PROCESSORS.
Additional processors can be specified as a list of callables
using the "processors" keyword argument.
"""
def __init__(self, request, dict=None, processors=None, current_app=None, use_l10n=None):
Context.__init__(self, dict, current_app=current_app, use_l10n=use_l10n)
if processors is None:
processors = ()
else:
processors = tuple(processors)
for processor in get_standard_processors() + processors:
self.update(processor(request))
| apache-2.0 |
pasqualguerrero/django | django/forms/widgets.py | 184 | 37166 | """
HTML Widget classes
"""
from __future__ import unicode_literals
import copy
import datetime
import re
from itertools import chain
from django.conf import settings
from django.forms.utils import flatatt, to_current_timezone
from django.utils import datetime_safe, formats, six
from django.utils.datastructures import MultiValueDict
from django.utils.dates import MONTHS
from django.utils.encoding import (
force_str, force_text, python_2_unicode_compatible,
)
from django.utils.formats import get_format
from django.utils.html import conditional_escape, format_html, html_safe
from django.utils.safestring import mark_safe
from django.utils.six.moves import range
from django.utils.six.moves.urllib.parse import urljoin
from django.utils.translation import ugettext_lazy
__all__ = (
'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'NumberInput',
'EmailInput', 'URLInput', 'PasswordInput', 'HiddenInput',
'MultipleHiddenInput', 'FileInput', 'ClearableFileInput', 'Textarea',
'DateInput', 'DateTimeInput', 'TimeInput', 'CheckboxInput', 'Select',
'NullBooleanSelect', 'SelectMultiple', 'RadioSelect',
'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget',
'SplitHiddenDateTimeWidget', 'SelectDateWidget',
)
MEDIA_TYPES = ('css', 'js')
@html_safe
@python_2_unicode_compatible
class Media(object):
def __init__(self, media=None, **kwargs):
if media:
media_attrs = media.__dict__
else:
media_attrs = kwargs
self._css = {}
self._js = []
for name in MEDIA_TYPES:
getattr(self, 'add_' + name)(media_attrs.get(name))
def __str__(self):
return self.render()
def render(self):
return mark_safe('\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES])))
def render_js(self):
return [
format_html(
'<script type="text/javascript" src="{}"></script>',
self.absolute_path(path)
) for path in self._js
]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css.keys())
return chain(*[[
format_html(
'<link href="{}" type="text/css" media="{}" rel="stylesheet" />',
self.absolute_path(path), medium
) for path in self._css[medium]
] for medium in media])
def absolute_path(self, path, prefix=None):
if path.startswith(('http://', 'https://', '/')):
return path
if prefix is None:
if settings.STATIC_URL is None:
# backwards compatibility
prefix = settings.MEDIA_URL
else:
prefix = settings.STATIC_URL
return urljoin(prefix, path)
def __getitem__(self, name):
"Returns a Media object that only contains media of the given type"
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, '_' + name)})
raise KeyError('Unknown media type "%s"' % name)
def add_js(self, data):
if data:
for path in data:
if path not in self._js:
self._js.append(path)
def add_css(self, data):
if data:
for medium, paths in data.items():
for path in paths:
if not self._css.get(medium) or path not in self._css[medium]:
self._css.setdefault(medium, []).append(path)
def __add__(self, other):
combined = Media()
for name in MEDIA_TYPES:
getattr(combined, 'add_' + name)(getattr(self, '_' + name, None))
getattr(combined, 'add_' + name)(getattr(other, '_' + name, None))
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
sup_cls = super(cls, self)
try:
base = sup_cls.media
except AttributeError:
base = Media()
# Get the media definition for this class
definition = getattr(cls, 'Media', None)
if definition:
extend = getattr(definition, 'extend', True)
if extend:
if extend is True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
else:
return Media(definition)
else:
return base
return property(_media)
class MediaDefiningClass(type):
"""
Metaclass for classes that can have media definitions.
"""
def __new__(mcs, name, bases, attrs):
new_class = (super(MediaDefiningClass, mcs)
.__new__(mcs, name, bases, attrs))
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
@html_safe
@python_2_unicode_compatible
class SubWidget(object):
"""
Some widgets are made of multiple HTML elements -- namely, RadioSelect.
This is a class that represents the "inner" HTML element of a widget.
"""
def __init__(self, parent_widget, name, value, attrs, choices):
self.parent_widget = parent_widget
self.name, self.value = name, value
self.attrs, self.choices = attrs, choices
def __str__(self):
args = [self.name, self.value, self.attrs]
if self.choices:
args.append(self.choices)
return self.parent_widget.render(*args)
class Widget(six.with_metaclass(MediaDefiningClass)):
needs_multipart_form = False # Determines does this widget need multipart form
is_localized = False
is_required = False
supports_microseconds = True
def __init__(self, attrs=None):
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.input_type == 'hidden' if hasattr(self, 'input_type') else False
def subwidgets(self, name, value, attrs=None, choices=()):
"""
Yields all "subwidgets" of this widget. Used only by RadioSelect to
allow template access to individual <input type="radio"> buttons.
Arguments are the same as for render().
"""
yield SubWidget(self, name, value, attrs, choices)
def render(self, name, value, attrs=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
raise NotImplementedError('subclasses of Widget must provide a render() method')
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
attrs = dict(self.attrs, **kwargs)
if extra_attrs:
attrs.update(extra_attrs)
return attrs
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, returns the value
of this widget. Returns None if it's not provided.
"""
return data.get(name)
def id_for_label(self, id_):
"""
Returns the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Returns None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
class Input(Widget):
"""
Base class for all <input> widgets (except type='checkbox' and
type='radio', which are special).
"""
input_type = None # Subclasses must define this.
def _format_value(self, value):
if self.is_localized:
return formats.localize_input(value)
return value
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(self._format_value(value))
return format_html('<input{} />', flatatt(final_attrs))
class TextInput(Input):
input_type = 'text'
def __init__(self, attrs=None):
if attrs is not None:
self.input_type = attrs.pop('type', self.input_type)
super(TextInput, self).__init__(attrs)
class NumberInput(TextInput):
input_type = 'number'
class EmailInput(TextInput):
input_type = 'email'
class URLInput(TextInput):
input_type = 'url'
class PasswordInput(TextInput):
input_type = 'password'
def __init__(self, attrs=None, render_value=False):
super(PasswordInput, self).__init__(attrs)
self.render_value = render_value
def render(self, name, value, attrs=None):
if not self.render_value:
value = None
return super(PasswordInput, self).render(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
class MultipleHiddenInput(HiddenInput):
"""
A widget that handles <input type="hidden"> for fields that have a list
of values.
"""
def __init__(self, attrs=None, choices=()):
super(MultipleHiddenInput, self).__init__(attrs)
# choices can be any iterable
self.choices = choices
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
id_ = final_attrs.get('id')
inputs = []
for i, v in enumerate(value):
input_attrs = dict(value=force_text(v), **final_attrs)
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
input_attrs['id'] = '%s_%s' % (id_, i)
inputs.append(format_html('<input{} />', flatatt(input_attrs)))
return mark_safe('\n'.join(inputs))
def value_from_datadict(self, data, files, name):
if isinstance(data, MultiValueDict):
return data.getlist(name)
return data.get(name)
class FileInput(Input):
input_type = 'file'
needs_multipart_form = True
def render(self, name, value, attrs=None):
return super(FileInput, self).render(name, None, attrs=attrs)
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name)
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = (
'%(initial_text)s: <a href="%(initial_url)s">%(initial)s</a> '
'%(clear_template)s<br />%(input_text)s: %(input)s'
)
template_with_clear = '%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>'
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + '-clear'
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + '_id'
def is_initial(self, value):
"""
Return whether value is considered to be initial value.
"""
# hasattr() masks exceptions on Python 2.
if six.PY2:
try:
getattr(value, 'url')
except AttributeError:
return False
else:
return bool(value)
return bool(value and hasattr(value, 'url'))
def get_template_substitution_values(self, value):
"""
Return value-related substitutions.
"""
return {
'initial': conditional_escape(value),
'initial_url': conditional_escape(value.url),
}
def render(self, name, value, attrs=None):
substitutions = {
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_template': '',
'clear_checkbox_label': self.clear_checkbox_label,
}
template = '%(input)s'
substitutions['input'] = super(ClearableFileInput, self).render(name, value, attrs)
if self.is_initial(value):
template = self.template_with_initial
substitutions.update(self.get_template_substitution_values(value))
if not self.is_required:
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
substitutions['clear_checkbox_name'] = conditional_escape(checkbox_name)
substitutions['clear_checkbox_id'] = conditional_escape(checkbox_id)
substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id})
substitutions['clear_template'] = self.template_with_clear % substitutions
return mark_safe(template % substitutions)
def value_from_datadict(self, data, files, name):
upload = super(ClearableFileInput, self).value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
class Textarea(Widget):
def __init__(self, attrs=None):
# Use slightly better defaults than HTML's 20x2 box
default_attrs = {'cols': '40', 'rows': '10'}
if attrs:
default_attrs.update(attrs)
super(Textarea, self).__init__(default_attrs)
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return format_html('<textarea{}>\r\n{}</textarea>',
flatatt(final_attrs),
force_text(value))
class DateTimeBaseInput(TextInput):
format_key = ''
supports_microseconds = False
def __init__(self, attrs=None, format=None):
super(DateTimeBaseInput, self).__init__(attrs)
self.format = format if format else None
def _format_value(self, value):
return formats.localize_input(value,
self.format or formats.get_format(self.format_key)[0])
class DateInput(DateTimeBaseInput):
format_key = 'DATE_INPUT_FORMATS'
class DateTimeInput(DateTimeBaseInput):
format_key = 'DATETIME_INPUT_FORMATS'
class TimeInput(DateTimeBaseInput):
format_key = 'TIME_INPUT_FORMATS'
# Defined at module level so that CheckboxInput is picklable (#17976)
def boolean_check(v):
return not (v is False or v is None or v == '')
class CheckboxInput(Widget):
def __init__(self, attrs=None, check_test=None):
super(CheckboxInput, self).__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = boolean_check if check_test is None else check_test
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
if self.check_test(value):
final_attrs['checked'] = 'checked'
if not (value is True or value is False or value is None or value == ''):
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(value)
return format_html('<input{} />', flatatt(final_attrs))
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {'true': True, 'false': False}
if isinstance(value, six.string_types):
value = values.get(value.lower(), value)
return bool(value)
class Select(Widget):
allow_multiple_selected = False
def __init__(self, attrs=None, choices=()):
super(Select, self).__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
obj.choices = copy.copy(self.choices)
memo[id(self)] = obj
return obj
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select{}>', flatatt(final_attrs))]
options = self.render_options(choices, [value])
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def render_option(self, selected_choices, option_value, option_label):
if option_value is None:
option_value = ''
option_value = force_text(option_value)
if option_value in selected_choices:
selected_html = mark_safe(' selected="selected"')
if not self.allow_multiple_selected:
# Only allow for a single selection.
selected_choices.remove(option_value)
else:
selected_html = ''
return format_html('<option value="{}"{}>{}</option>',
option_value,
selected_html,
force_text(option_label))
def render_options(self, choices, selected_choices):
# Normalize to strings.
selected_choices = set(force_text(v) for v in selected_choices)
output = []
for option_value, option_label in chain(self.choices, choices):
if isinstance(option_label, (list, tuple)):
output.append(format_html('<optgroup label="{}">', force_text(option_value)))
for option in option_label:
output.append(self.render_option(selected_choices, *option))
output.append('</optgroup>')
else:
output.append(self.render_option(selected_choices, option_value, option_label))
return '\n'.join(output)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = (('1', ugettext_lazy('Unknown')),
('2', ugettext_lazy('Yes')),
('3', ugettext_lazy('No')))
super(NullBooleanSelect, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
try:
value = {True: '2', False: '3', '2': '2', '3': '3'}[value]
except KeyError:
value = '1'
return super(NullBooleanSelect, self).render(name, value, attrs, choices)
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {'2': True,
True: True,
'True': True,
'3': False,
'False': False,
False: False}.get(value)
class SelectMultiple(Select):
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select multiple="multiple"{}>', flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def value_from_datadict(self, data, files, name):
if isinstance(data, MultiValueDict):
return data.getlist(name)
return data.get(name)
@html_safe
@python_2_unicode_compatible
class ChoiceInput(SubWidget):
"""
An object used by ChoiceFieldRenderer that represents a single
<input type='$input_type'>.
"""
input_type = None # Subclasses must define this
def __init__(self, name, value, attrs, choice, index):
self.name = name
self.value = value
self.attrs = attrs
self.choice_value = force_text(choice[0])
self.choice_label = force_text(choice[1])
self.index = index
if 'id' in self.attrs:
self.attrs['id'] += "_%d" % self.index
def __str__(self):
return self.render()
def render(self, name=None, value=None, attrs=None, choices=()):
if self.id_for_label:
label_for = format_html(' for="{}"', self.id_for_label)
else:
label_for = ''
attrs = dict(self.attrs, **attrs) if attrs else self.attrs
return format_html(
'<label{}>{} {}</label>', label_for, self.tag(attrs), self.choice_label
)
def is_checked(self):
return self.value == self.choice_value
def tag(self, attrs=None):
attrs = attrs or self.attrs
final_attrs = dict(attrs, type=self.input_type, name=self.name, value=self.choice_value)
if self.is_checked():
final_attrs['checked'] = 'checked'
return format_html('<input{} />', flatatt(final_attrs))
@property
def id_for_label(self):
return self.attrs.get('id', '')
class RadioChoiceInput(ChoiceInput):
input_type = 'radio'
def __init__(self, *args, **kwargs):
super(RadioChoiceInput, self).__init__(*args, **kwargs)
self.value = force_text(self.value)
class CheckboxChoiceInput(ChoiceInput):
input_type = 'checkbox'
def __init__(self, *args, **kwargs):
super(CheckboxChoiceInput, self).__init__(*args, **kwargs)
self.value = set(force_text(v) for v in self.value)
def is_checked(self):
return self.choice_value in self.value
@html_safe
@python_2_unicode_compatible
class ChoiceFieldRenderer(object):
"""
An object used by RadioSelect to enable customization of radio widgets.
"""
choice_input_class = None
outer_html = '<ul{id_attr}>{content}</ul>'
inner_html = '<li>{choice_value}{sub_widgets}</li>'
def __init__(self, name, value, attrs, choices):
self.name = name
self.value = value
self.attrs = attrs
self.choices = choices
def __getitem__(self, idx):
choice = self.choices[idx] # Let the IndexError propagate
return self.choice_input_class(self.name, self.value, self.attrs.copy(), choice, idx)
def __str__(self):
return self.render()
def render(self):
"""
Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
"""
id_ = self.attrs.get('id')
output = []
for i, choice in enumerate(self.choices):
choice_value, choice_label = choice
if isinstance(choice_label, (tuple, list)):
attrs_plus = self.attrs.copy()
if id_:
attrs_plus['id'] += '_{}'.format(i)
sub_ul_renderer = self.__class__(
name=self.name,
value=self.value,
attrs=attrs_plus,
choices=choice_label,
)
sub_ul_renderer.choice_input_class = self.choice_input_class
output.append(format_html(self.inner_html, choice_value=choice_value,
sub_widgets=sub_ul_renderer.render()))
else:
w = self.choice_input_class(self.name, self.value,
self.attrs.copy(), choice, i)
output.append(format_html(self.inner_html,
choice_value=force_text(w), sub_widgets=''))
return format_html(self.outer_html,
id_attr=format_html(' id="{}"', id_) if id_ else '',
content=mark_safe('\n'.join(output)))
class RadioFieldRenderer(ChoiceFieldRenderer):
choice_input_class = RadioChoiceInput
class CheckboxFieldRenderer(ChoiceFieldRenderer):
choice_input_class = CheckboxChoiceInput
class RendererMixin(object):
renderer = None # subclasses must define this
_empty_value = None
def __init__(self, *args, **kwargs):
# Override the default renderer if we were passed one.
renderer = kwargs.pop('renderer', None)
if renderer:
self.renderer = renderer
super(RendererMixin, self).__init__(*args, **kwargs)
def subwidgets(self, name, value, attrs=None, choices=()):
for widget in self.get_renderer(name, value, attrs, choices):
yield widget
def get_renderer(self, name, value, attrs=None, choices=()):
"""Returns an instance of the renderer."""
if value is None:
value = self._empty_value
final_attrs = self.build_attrs(attrs)
choices = list(chain(self.choices, choices))
return self.renderer(name, value, final_attrs, choices)
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices).render()
def id_for_label(self, id_):
# Widgets using this RendererMixin are made of a collection of
# subwidgets, each with their own <label>, and distinct ID.
# The IDs are made distinct by y "_X" suffix, where X is the zero-based
# index of the choice field. Thus, the label for the main widget should
# reference the first subwidget, hence the "_0" suffix.
if id_:
id_ += '_0'
return id_
class RadioSelect(RendererMixin, Select):
renderer = RadioFieldRenderer
_empty_value = ''
class CheckboxSelectMultiple(RendererMixin, SelectMultiple):
renderer = CheckboxFieldRenderer
_empty_value = []
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
Its render() method is different than other widgets', because it has to
figure out how to split a single value for display in multiple widgets.
The ``value`` argument can be one of two things:
* A list.
* A normal value (e.g., a string) that has been "compressed" from
a list of values.
In the second case -- i.e., if the value is NOT a list -- render() will
first "decompress" the value into a list before rendering it. It does so by
calling the decompress() method, which MultiWidget subclasses must
implement. This method takes a single "compressed" value and returns a
list.
When render() does its HTML rendering, each value in the list is rendered
with the corresponding widget -- the first value is rendered in the first
widget, the second value is rendered in the second widget, etc.
Subclasses may implement format_output(), which takes the list of rendered
widgets and returns a string of HTML that formats them any way you'd like.
You'll probably want to use this class with MultiValueField.
"""
def __init__(self, widgets, attrs=None):
self.widgets = [w() if isinstance(w, type) else w for w in widgets]
super(MultiWidget, self).__init__(attrs)
@property
def is_hidden(self):
return all(w.is_hidden for w in self.widgets)
def render(self, name, value, attrs=None):
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, list):
value = self.decompress(value)
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id')
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
output.append(widget.render(name + '_%s' % i, widget_value, final_attrs))
return mark_safe(self.format_output(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
def value_from_datadict(self, data, files, name):
return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)]
def format_output(self, rendered_widgets):
"""
Given a list of rendered widgets (as strings), returns a Unicode string
representing the HTML for the whole lot.
This hook allows you to format the HTML design of the widgets, if
needed.
"""
return ''.join(rendered_widgets)
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super(MultiWidget, self).__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
@property
def needs_multipart_form(self):
return any(w.needs_multipart_form for w in self.widgets)
class SplitDateTimeWidget(MultiWidget):
"""
A Widget that splits datetime input into two <input type="text"> boxes.
"""
supports_microseconds = False
def __init__(self, attrs=None, date_format=None, time_format=None):
widgets = (DateInput(attrs=attrs, format=date_format),
TimeInput(attrs=attrs, format=time_format))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
value = to_current_timezone(value)
return [value.date(), value.time().replace(microsecond=0)]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A Widget that splits datetime input into two <input type="hidden"> inputs.
"""
def __init__(self, attrs=None, date_format=None, time_format=None):
super(SplitHiddenDateTimeWidget, self).__init__(attrs, date_format, time_format)
for widget in self.widgets:
widget.input_type = 'hidden'
class SelectDateWidget(Widget):
"""
A Widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = (0, '---')
month_field = '%s_month'
day_field = '%s_day'
year_field = '%s_year'
select_widget = Select
date_re = re.compile(r'(\d{4})-(\d\d?)-(\d\d?)$')
def __init__(self, attrs=None, years=None, months=None, empty_label=None):
self.attrs = attrs or {}
# Optional list or tuple of years to use in the "year" select box.
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year + 10)
# Optional dict of months to use in the "month" select box.
if months:
self.months = months
else:
self.months = MONTHS
# Optional string, list, or tuple to use as empty_label.
if isinstance(empty_label, (list, tuple)):
if not len(empty_label) == 3:
raise ValueError('empty_label list/tuple must have 3 elements.')
self.year_none_value = (0, empty_label[0])
self.month_none_value = (0, empty_label[1])
self.day_none_value = (0, empty_label[2])
else:
if empty_label is not None:
self.none_value = (0, empty_label)
self.year_none_value = self.none_value
self.month_none_value = self.none_value
self.day_none_value = self.none_value
@staticmethod
def _parse_date_fmt():
fmt = get_format('DATE_FORMAT')
escaped = False
for char in fmt:
if escaped:
escaped = False
elif char == '\\':
escaped = True
elif char in 'Yy':
yield 'year'
elif char in 'bEFMmNn':
yield 'month'
elif char in 'dj':
yield 'day'
def render(self, name, value, attrs=None):
try:
year_val, month_val, day_val = value.year, value.month, value.day
except AttributeError:
year_val = month_val = day_val = None
if isinstance(value, six.string_types):
if settings.USE_L10N:
try:
input_format = get_format('DATE_INPUT_FORMATS')[0]
v = datetime.datetime.strptime(force_str(value), input_format)
year_val, month_val, day_val = v.year, v.month, v.day
except ValueError:
pass
if year_val is None:
match = self.date_re.match(value)
if match:
year_val, month_val, day_val = [int(val) for val in match.groups()]
html = {}
choices = [(i, i) for i in self.years]
html['year'] = self.create_select(name, self.year_field, value, year_val, choices, self.year_none_value)
choices = list(self.months.items())
html['month'] = self.create_select(name, self.month_field, value, month_val, choices, self.month_none_value)
choices = [(i, i) for i in range(1, 32)]
html['day'] = self.create_select(name, self.day_field, value, day_val, choices, self.day_none_value)
output = []
for field in self._parse_date_fmt():
output.append(html[field])
return mark_safe('\n'.join(output))
def id_for_label(self, id_):
for first_select in self._parse_date_fmt():
return '%s_%s' % (id_, first_select)
else:
return '%s_month' % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == "0":
return None
if y and m and d:
if settings.USE_L10N:
input_format = get_format('DATE_INPUT_FORMATS')[0]
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
return '%s-%s-%s' % (y, m, d)
else:
date_value = datetime_safe.new_date(date_value)
return date_value.strftime(input_format)
else:
return '%s-%s-%s' % (y, m, d)
return data.get(name)
def create_select(self, name, field, value, val, choices, none_value):
if 'id' in self.attrs:
id_ = self.attrs['id']
else:
id_ = 'id_%s' % name
if not self.is_required:
choices.insert(0, none_value)
local_attrs = self.build_attrs(id=field % id_)
s = self.select_widget(choices=choices)
select_html = s.render(field % name, val, local_attrs)
return select_html
| bsd-3-clause |
listamilton/supermilton.repository | script.module.youtube.dl/lib/youtube_dl/extractor/youporn.py | 9 | 6873 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
sanitized_Request,
str_to_int,
unescapeHTML,
unified_strdate,
)
from ..aes import aes_decrypt_text
class YouPornIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?youporn\.com/watch/(?P<id>\d+)/(?P<display_id>[^/?#&]+)'
_TESTS = [{
'url': 'http://www.youporn.com/watch/505835/sex-ed-is-it-safe-to-masturbate-daily/',
'md5': '3744d24c50438cf5b6f6d59feb5055c2',
'info_dict': {
'id': '505835',
'display_id': 'sex-ed-is-it-safe-to-masturbate-daily',
'ext': 'mp4',
'title': 'Sex Ed: Is It Safe To Masturbate Daily?',
'description': 'Love & Sex Answers: http://bit.ly/DanAndJenn -- Is It Unhealthy To Masturbate Daily?',
'thumbnail': 're:^https?://.*\.jpg$',
'uploader': 'Ask Dan And Jennifer',
'upload_date': '20101221',
'average_rating': int,
'view_count': int,
'comment_count': int,
'categories': list,
'tags': list,
'age_limit': 18,
},
}, {
# Anonymous User uploader
'url': 'http://www.youporn.com/watch/561726/big-tits-awesome-brunette-on-amazing-webcam-show/?from=related3&al=2&from_id=561726&pos=4',
'info_dict': {
'id': '561726',
'display_id': 'big-tits-awesome-brunette-on-amazing-webcam-show',
'ext': 'mp4',
'title': 'Big Tits Awesome Brunette On amazing webcam show',
'description': 'http://sweetlivegirls.com Big Tits Awesome Brunette On amazing webcam show.mp4',
'thumbnail': 're:^https?://.*\.jpg$',
'uploader': 'Anonymous User',
'upload_date': '20111125',
'average_rating': int,
'view_count': int,
'comment_count': int,
'categories': list,
'tags': list,
'age_limit': 18,
},
'params': {
'skip_download': True,
},
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
request = sanitized_Request(url)
request.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(request, display_id)
title = self._search_regex(
[r'(?:video_titles|videoTitle)\s*[:=]\s*(["\'])(?P<title>.+?)\1',
r'<h1[^>]+class=["\']heading\d?["\'][^>]*>([^<])<'],
webpage, 'title', group='title')
links = []
sources = self._search_regex(
r'(?s)sources\s*:\s*({.+?})', webpage, 'sources', default=None)
if sources:
for _, link in re.findall(r'[^:]+\s*:\s*(["\'])(http.+?)\1', sources):
links.append(link)
# Fallback #1
for _, link in re.findall(
r'(?:videoUrl|videoSrc|videoIpadUrl|html5PlayerSrc)\s*[:=]\s*(["\'])(http.+?)\1', webpage):
links.append(link)
# Fallback #2, this also contains extra low quality 180p format
for _, link in re.findall(r'<a[^>]+href=(["\'])(http.+?)\1[^>]+title=["\']Download [Vv]ideo', webpage):
links.append(link)
# Fallback #3, encrypted links
for _, encrypted_link in re.findall(
r'encryptedQuality\d{3,4}URL\s*=\s*(["\'])([\da-zA-Z+/=]+)\1', webpage):
links.append(aes_decrypt_text(encrypted_link, title, 32).decode('utf-8'))
formats = []
for video_url in set(unescapeHTML(link) for link in links):
f = {
'url': video_url,
}
# Video URL's path looks like this:
# /201012/17/505835/720p_1500k_505835/YouPorn%20-%20Sex%20Ed%20Is%20It%20Safe%20To%20Masturbate%20Daily.mp4
# /201012/17/505835/vl_240p_240k_505835/YouPorn%20-%20Sex%20Ed%20Is%20It%20Safe%20To%20Masturbate%20Daily.mp4
# We will benefit from it by extracting some metadata
mobj = re.search(r'(?P<height>\d{3,4})[pP]_(?P<bitrate>\d+)[kK]_\d+/', video_url)
if mobj:
height = int(mobj.group('height'))
bitrate = int(mobj.group('bitrate'))
f.update({
'format_id': '%dp-%dk' % (height, bitrate),
'height': height,
'tbr': bitrate,
})
formats.append(f)
self._sort_formats(formats)
description = self._og_search_description(webpage, default=None)
thumbnail = self._search_regex(
r'(?:imageurl\s*=|poster\s*:)\s*(["\'])(?P<thumbnail>.+?)\1',
webpage, 'thumbnail', fatal=False, group='thumbnail')
uploader = self._html_search_regex(
r'(?s)<div[^>]+class=["\']submitByLink["\'][^>]*>(.+?)</div>',
webpage, 'uploader', fatal=False)
upload_date = unified_strdate(self._html_search_regex(
r'(?s)<div[^>]+class=["\']videoInfo(?:Date|Time)["\'][^>]*>(.+?)</div>',
webpage, 'upload date', fatal=False))
age_limit = self._rta_search(webpage)
average_rating = int_or_none(self._search_regex(
r'<div[^>]+class=["\']videoRatingPercentage["\'][^>]*>(\d+)%</div>',
webpage, 'average rating', fatal=False))
view_count = str_to_int(self._search_regex(
r'(?s)<div[^>]+class=(["\']).*?\bvideoInfoViews\b.*?\1[^>]*>.*?(?P<count>[\d,.]+)<',
webpage, 'view count', fatal=False, group='count'))
comment_count = str_to_int(self._search_regex(
r'>All [Cc]omments? \(([\d,.]+)\)',
webpage, 'comment count', fatal=False))
def extract_tag_box(title):
tag_box = self._search_regex(
(r'<div[^>]+class=["\']tagBoxTitle["\'][^>]*>\s*%s\b.*?</div>\s*'
'<div[^>]+class=["\']tagBoxContent["\']>(.+?)</div>') % re.escape(title),
webpage, '%s tag box' % title, default=None)
if not tag_box:
return []
return re.findall(r'<a[^>]+href=[^>]+>([^<]+)', tag_box)
categories = extract_tag_box('Category')
tags = extract_tag_box('Tags')
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'upload_date': upload_date,
'average_rating': average_rating,
'view_count': view_count,
'comment_count': comment_count,
'categories': categories,
'tags': tags,
'age_limit': age_limit,
'formats': formats,
}
| gpl-2.0 |
HyperloopTeam/FullOpenMDAO | lib/python2.7/site-packages/traits-4.3.0-py2.7-macosx-10.10-x86_64.egg/traits/tests/test_event_order.py | 1 | 3111 | #-------------------------------------------------------------------------------
#
# Copyright (c) 2007, Enthought, Inc.
# All rights reserved.
#
# This Software is provided without warranty under the terms of the
# BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The
# license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
#-------------------------------------------------------------------------------
from __future__ import absolute_import
from traits.testing.unittest_tools import unittest
from ..api import HasTraits, Str, Instance, Any
class TestEventOrder( unittest.TestCase ):
""" Tests that demonstrate that trait events are delivered in LIFO
order rather than FIFO order.
Baz receives the "effect" event before it receives the "cause" event.
"""
def setUp(self):
foo = Foo( cause='ORIGINAL')
bar = Bar( foo=foo, test=self )
baz = Baz( bar=bar, test=self )
self.events_delivered = []
foo.cause = 'CHANGE'
return
def test_lifo_order(self):
lifo = ['Bar._caused_changed',
'Baz._effect_changed',
'Baz._caused_changed']
self.failUnlessEqual( self.events_delivered, lifo)
return
def test_not_fifo_order(self):
fifo = ['Bar._caused_changed',
'Baz._caused_changed',
'Baz._effect_changed']
self.failIfEqual( self.events_delivered, fifo)
return
class Foo(HasTraits):
cause = Str
class Bar(HasTraits):
foo = Instance(Foo)
effect = Str
test = Any
def _foo_changed(self, obj, old, new):
if old is not None and old is not new:
old.on_trait_change( self._cause_changed, name='cause', remove=True)
if new is not None:
new.on_trait_change( self._cause_changed, name='cause')
return
def _cause_changed(self, obj, name, old, new):
self.test.events_delivered.append( 'Bar._caused_changed' )
self.effect = new.lower()
return
class Baz(HasTraits):
bar = Instance(Bar)
test = Any
def _bar_changed(self, obj, old, new):
if old is not None and old is not new:
old.on_trait_change( self._effect_changed, name='effect',
remove=True)
old.foo.on_trait_change( self._cause_changed, name='cause',
remove=True)
if new is not None:
new.foo.on_trait_change( self._cause_changed, name='cause')
new.on_trait_change( self._effect_changed, name='effect')
return
def _cause_changed(self, obj, name, old, new):
self.test.events_delivered.append( 'Baz._caused_changed' )
return
def _effect_changed(self, obj, name, old, new):
self.test.events_delivered.append( 'Baz._effect_changed' )
return
### EOF #######################################################################
| gpl-2.0 |
lenstr/rethinkdb | external/v8_3.30.33.16/build/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py | 216 | 1427 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that relinking a solib doesn't relink a dependent executable if the
solib's public API hasn't changed.
"""
import os
import sys
import TestCommon
import TestGyp
# NOTE(fischman): This test will not work with other generators because the
# API-hash-based-mtime-preservation optimization is only implemented in
# ninja.py. It could be extended to the make.py generator as well pretty
# easily, probably.
# (also, it tests ninja-specific out paths, which would have to be generalized
# if this was extended to other generators).
test = TestGyp.TestGyp(formats=['ninja'])
test.run_gyp('solibs_avoid_relinking.gyp')
# Build the executable, grab its timestamp, touch the solib's source, rebuild
# executable, ensure timestamp hasn't changed.
test.build('solibs_avoid_relinking.gyp', 'b')
test.built_file_must_exist('b' + TestCommon.exe_suffix)
pre_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
os.utime(os.path.join(test.workdir, 'solib.cc'),
(pre_stat.st_atime, pre_stat.st_mtime + 100))
test.sleep()
test.build('solibs_avoid_relinking.gyp', 'b')
post_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
if pre_stat.st_mtime != post_stat.st_mtime:
test.fail_test()
else:
test.pass_test()
| agpl-3.0 |
anryko/ansible | lib/ansible/modules/network/fortios/fortios_dnsfilter_domain_filter.py | 7 | 12762 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_dnsfilter_domain_filter
short_description: Configure DNS domain filters in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify dnsfilter feature and domain_filter category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
dnsfilter_domain_filter:
description:
- Configure DNS domain filters.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
comment:
description:
- Optional comments.
type: str
entries:
description:
- DNS domain filter entries.
type: list
suboptions:
action:
description:
- Action to take for domain filter matches.
type: str
choices:
- block
- allow
- monitor
domain:
description:
- Domain entries to be filtered.
type: str
id:
description:
- Id.
required: true
type: int
status:
description:
- Enable/disable this domain filter.
type: str
choices:
- enable
- disable
type:
description:
- DNS domain filter type.
type: str
choices:
- simple
- regex
- wildcard
id:
description:
- ID.
required: true
type: int
name:
description:
- Name of table.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure DNS domain filters.
fortios_dnsfilter_domain_filter:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
dnsfilter_domain_filter:
comment: "Optional comments."
entries:
-
action: "block"
domain: "<your_own_value>"
id: "7"
status: "enable"
type: "simple"
id: "10"
name: "default_name_11"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_dnsfilter_domain_filter_data(json):
option_list = ['comment', 'entries', 'id',
'name']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def dnsfilter_domain_filter(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['dnsfilter_domain_filter'] and data['dnsfilter_domain_filter']:
state = data['dnsfilter_domain_filter']['state']
else:
state = True
dnsfilter_domain_filter_data = data['dnsfilter_domain_filter']
filtered_data = underscore_to_hyphen(filter_dnsfilter_domain_filter_data(dnsfilter_domain_filter_data))
if state == "present":
return fos.set('dnsfilter',
'domain-filter',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('dnsfilter',
'domain-filter',
mkey=filtered_data['id'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_dnsfilter(data, fos):
if data['dnsfilter_domain_filter']:
resp = dnsfilter_domain_filter(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"dnsfilter_domain_filter": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"comment": {"required": False, "type": "str"},
"entries": {"required": False, "type": "list",
"options": {
"action": {"required": False, "type": "str",
"choices": ["block", "allow", "monitor"]},
"domain": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"type": {"required": False, "type": "str",
"choices": ["simple", "regex", "wildcard"]}
}},
"id": {"required": True, "type": "int"},
"name": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_dnsfilter(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_dnsfilter(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
BackupGGCode/python-for-android | python-modules/twisted/twisted/lore/process.py | 57 | 3831 | # -*- test-case-name: twisted.lore.test.test_lore -*-
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
import sys, os
import tree #todo: get rid of this later
import indexer
class NoProcessorError(Exception):
pass
class ProcessingFailure(Exception):
pass
cols = 79
def dircount(d):
return len([1 for el in d.split("/") if el != '.'])
class Walker:
def __init__(self, df, fext, linkrel):
self.df = df
self.linkrel = linkrel
self.fext = fext
self.walked = []
self.failures = []
def walkdir(self, topdir, prefix=''):
self.basecount = dircount(topdir)
os.path.walk(topdir, self.walk, prefix)
def walk(self, prefix, d, names):
linkrel = prefix + '../' * (dircount(d) - self.basecount)
for name in names:
fullpath = os.path.join(d, name)
fext = os.path.splitext(name)[1]
if fext == self.fext:
self.walked.append((linkrel, fullpath))
def generate(self):
i = 0
indexer.clearEntries()
tree.filenum = 0
for linkrel, fullpath in self.walked:
linkrel = self.linkrel + linkrel
i += 1
fname = os.path.splitext(fullpath)[0]
self.percentdone((float(i) / len(self.walked)), fname)
try:
self.df(fullpath, linkrel)
except ProcessingFailure, e:
self.failures.append((fullpath, e))
indexer.generateIndex()
self.percentdone(1., None)
def percentdone(self, percent, fname):
# override for neater progress bars
proglen = 40
hashes = int(percent * proglen)
spaces = proglen - hashes
progstat = "[%s%s] (%s)" %('#' * hashes, ' ' * spaces,fname or "*Done*")
progstat += (cols - len(progstat)) * ' '
progstat += '\r'
sys.stdout.write(progstat)
sys.stdout.flush()
if fname is None:
print
class PlainReportingWalker(Walker):
def percentdone(self, percent, fname):
if fname:
print fname
class NullReportingWalker(Walker):
def percentdone(self, percent, fname):
pass
def parallelGenerator(originalFileName, outputExtension):
return os.path.splitext(originalFileName)[0]+outputExtension
def fooAddingGenerator(originalFileName, outputExtension):
return os.path.splitext(originalFileName)[0]+"foo"+outputExtension
def outputdirGenerator(originalFileName, outputExtension, inputdir, outputdir):
originalFileName = os.path.abspath(originalFileName)
abs_inputdir = os.path.abspath(inputdir)
if os.path.commonprefix((originalFileName, abs_inputdir)) != abs_inputdir:
raise ValueError("Original file name '" + originalFileName +
"' not under input directory '" + abs_inputdir + "'")
adjustedPath = os.path.join(outputdir, os.path.basename(originalFileName))
return tree.getOutputFileName(adjustedPath, outputExtension)
def getFilenameGenerator(config, outputExt):
if config.get('outputdir'):
return (lambda originalFileName, outputExtension:
outputdirGenerator(originalFileName, outputExtension,
os.path.abspath(config.get('inputdir')),
os.path.abspath(config.get('outputdir'))))
else:
return tree.getOutputFileName
def getProcessor(module, output, config):
try:
m = getattr(module.factory, 'generate_'+output)
except AttributeError:
raise NoProcessorError("cannot generate "+output+" output")
if config.get('ext'):
ext = config['ext']
else:
from default import htmlDefault
ext = htmlDefault['ext']
return m(config, getFilenameGenerator(config, ext))
| apache-2.0 |
Team02-TeamGuinness/BIOE421_RoboHand | Printrun/printrun/projectlayer.py | 20 | 38361 | # This file is part of the Printrun suite.
#
# Printrun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Printrun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Printrun. If not, see <http://www.gnu.org/licenses/>.
import xml.etree.ElementTree
import wx
import wx.lib.agw.floatspin as floatspin
import os
import time
import zipfile
import tempfile
import shutil
from cairosvg.surface import PNGSurface
import cStringIO
import imghdr
import copy
import re
from collections import OrderedDict
import itertools
import math
class DisplayFrame(wx.Frame):
def __init__(self, parent, title, res = (1024, 768), printer = None, scale = 1.0, offset = (0, 0)):
wx.Frame.__init__(self, parent = parent, title = title, size = res)
self.printer = printer
self.control_frame = parent
self.pic = wx.StaticBitmap(self)
self.bitmap = wx.EmptyBitmap(*res)
self.bbitmap = wx.EmptyBitmap(*res)
self.slicer = 'bitmap'
self.dpi = 96
dc = wx.MemoryDC()
dc.SelectObject(self.bbitmap)
dc.SetBackground(wx.Brush("black"))
dc.Clear()
dc.SelectObject(wx.NullBitmap)
self.SetBackgroundColour("black")
self.pic.Hide()
self.SetDoubleBuffered(True)
self.SetPosition((self.control_frame.GetSize().x, 0))
self.Show()
self.scale = scale
self.index = 0
self.size = res
self.offset = offset
self.running = False
self.layer_red = False
def clear_layer(self):
try:
dc = wx.MemoryDC()
dc.SelectObject(self.bitmap)
dc.SetBackground(wx.Brush("black"))
dc.Clear()
self.pic.SetBitmap(self.bitmap)
self.pic.Show()
self.Refresh()
except:
raise
pass
def resize(self, res = (1024, 768)):
self.bitmap = wx.EmptyBitmap(*res)
self.bbitmap = wx.EmptyBitmap(*res)
dc = wx.MemoryDC()
dc.SelectObject(self.bbitmap)
dc.SetBackground(wx.Brush("black"))
dc.Clear()
dc.SelectObject(wx.NullBitmap)
def draw_layer(self, image):
try:
dc = wx.MemoryDC()
dc.SelectObject(self.bitmap)
dc.SetBackground(wx.Brush("black"))
dc.Clear()
if self.slicer == 'Slic3r' or self.slicer == 'Skeinforge':
if self.scale != 1.0:
layercopy = copy.deepcopy(image)
height = float(layercopy.get('height').replace('m', ''))
width = float(layercopy.get('width').replace('m', ''))
layercopy.set('height', str(height * self.scale) + 'mm')
layercopy.set('width', str(width * self.scale) + 'mm')
layercopy.set('viewBox', '0 0 ' + str(width * self.scale) + ' ' + str(height * self.scale))
g = layercopy.find("{http://www.w3.org/2000/svg}g")
g.set('transform', 'scale(' + str(self.scale) + ')')
stream = cStringIO.StringIO(PNGSurface.convert(dpi = self.dpi, bytestring = xml.etree.ElementTree.tostring(layercopy)))
else:
stream = cStringIO.StringIO(PNGSurface.convert(dpi = self.dpi, bytestring = xml.etree.ElementTree.tostring(image)))
pngImage = wx.ImageFromStream(stream)
# print "w:", pngImage.Width, ", dpi:", self.dpi, ", w (mm): ",(pngImage.Width / self.dpi) * 25.4
if self.layer_red:
pngImage = pngImage.AdjustChannels(1, 0, 0, 1)
dc.DrawBitmap(wx.BitmapFromImage(pngImage), self.offset[0], self.offset[1], True)
elif self.slicer == 'bitmap':
if isinstance(image, str):
image = wx.Image(image)
if self.layer_red:
image = image.AdjustChannels(1, 0, 0, 1)
dc.DrawBitmap(wx.BitmapFromImage(image.Scale(image.Width * self.scale, image.Height * self.scale)), self.offset[0], -self.offset[1], True)
else:
raise Exception(self.slicer + " is an unknown method.")
self.pic.SetBitmap(self.bitmap)
self.pic.Show()
self.Refresh()
except:
raise
pass
def show_img_delay(self, image):
print "Showing", str(time.clock())
self.control_frame.set_current_layer(self.index)
self.draw_layer(image)
wx.FutureCall(1000 * self.interval, self.hide_pic_and_rise)
def rise(self):
if (self.direction == "Top Down"):
print "Lowering", str(time.clock())
else:
print "Rising", str(time.clock())
if self.printer is not None and self.printer.online:
self.printer.send_now("G91")
if (self.prelift_gcode):
for line in self.prelift_gcode.split('\n'):
if line:
self.printer.send_now(line)
if (self.direction == "Top Down"):
self.printer.send_now("G1 Z-%f F%g" % (self.overshoot, self.z_axis_rate,))
self.printer.send_now("G1 Z%f F%g" % (self.overshoot - self.thickness, self.z_axis_rate,))
else: # self.direction == "Bottom Up"
self.printer.send_now("G1 Z%f F%g" % (self.overshoot, self.z_axis_rate,))
self.printer.send_now("G1 Z-%f F%g" % (self.overshoot - self.thickness, self.z_axis_rate,))
if (self.postlift_gcode):
for line in self.postlift_gcode.split('\n'):
if line:
self.printer.send_now(line)
self.printer.send_now("G90")
else:
time.sleep(self.pause)
wx.FutureCall(1000 * self.pause, self.next_img)
def hide_pic(self):
print "Hiding", str(time.clock())
self.pic.Hide()
def hide_pic_and_rise(self):
wx.CallAfter(self.hide_pic)
wx.FutureCall(500, self.rise)
def next_img(self):
if not self.running:
return
if self.index < len(self.layers):
print self.index
wx.CallAfter(self.show_img_delay, self.layers[self.index])
self.index += 1
else:
print "end"
wx.CallAfter(self.pic.Hide)
wx.CallAfter(self.Refresh)
def present(self,
layers,
interval = 0.5,
pause = 0.2,
overshoot = 0.0,
z_axis_rate = 200,
prelift_gcode = "",
postlift_gcode = "",
direction = "Top Down",
thickness = 0.4,
scale = 1,
size = (1024, 768),
offset = (0, 0),
layer_red = False):
wx.CallAfter(self.pic.Hide)
wx.CallAfter(self.Refresh)
self.layers = layers
self.scale = scale
self.thickness = thickness
self.size = size
self.interval = interval
self.pause = pause
self.overshoot = overshoot
self.z_axis_rate = z_axis_rate
self.prelift_gcode = prelift_gcode
self.postlift_gcode = postlift_gcode
self.direction = direction
self.layer_red = layer_red
self.offset = offset
self.index = 0
self.running = True
self.next_img()
class SettingsFrame(wx.Frame):
def _set_setting(self, name, value):
if self.pronterface:
self.pronterface.set(name, value)
def _get_setting(self, name, val):
if self.pronterface:
try:
return getattr(self.pronterface.settings, name)
except AttributeError:
return val
else:
return val
def __init__(self, parent, printer = None):
wx.Frame.__init__(self, parent, title = "ProjectLayer Control", style = (wx.DEFAULT_FRAME_STYLE | wx.WS_EX_CONTEXTHELP))
self.SetExtraStyle(wx.FRAME_EX_CONTEXTHELP)
self.pronterface = parent
self.display_frame = DisplayFrame(self, title = "ProjectLayer Display", printer = printer)
self.panel = wx.Panel(self)
vbox = wx.BoxSizer(wx.VERTICAL)
buttonbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, label = "Controls"), wx.HORIZONTAL)
load_button = wx.Button(self.panel, -1, "Load")
load_button.Bind(wx.EVT_BUTTON, self.load_file)
load_button.SetHelpText("Choose an SVG file created from Slic3r or Skeinforge, or a zip file of bitmap images (with extension: .3dlp.zip).")
buttonbox.Add(load_button, flag = wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 5)
present_button = wx.Button(self.panel, -1, "Present")
present_button.Bind(wx.EVT_BUTTON, self.start_present)
present_button.SetHelpText("Starts the presentation of the slices.")
buttonbox.Add(present_button, flag = wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 5)
self.pause_button = wx.Button(self.panel, -1, "Pause")
self.pause_button.Bind(wx.EVT_BUTTON, self.pause_present)
self.pause_button.SetHelpText("Pauses the presentation. Can be resumed afterwards by clicking this button, or restarted by clicking present again.")
buttonbox.Add(self.pause_button, flag = wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 5)
stop_button = wx.Button(self.panel, -1, "Stop")
stop_button.Bind(wx.EVT_BUTTON, self.stop_present)
stop_button.SetHelpText("Stops presenting the slices.")
buttonbox.Add(stop_button, flag = wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 5)
self.help_button = wx.ContextHelpButton(self.panel)
buttonbox.Add(self.help_button, flag = wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 5)
fieldboxsizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, label = "Settings"), wx.VERTICAL)
fieldsizer = wx.GridBagSizer(10, 10)
# Left Column
fieldsizer.Add(wx.StaticText(self.panel, -1, "Layer (mm):"), pos = (0, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.thickness = wx.TextCtrl(self.panel, -1, str(self._get_setting("project_layer", "0.1")), size = (80, -1))
self.thickness.Bind(wx.EVT_TEXT, self.update_thickness)
self.thickness.SetHelpText("The thickness of each slice. Should match the value used to slice the model. SVG files update this value automatically, 3dlp.zip files have to be manually entered.")
fieldsizer.Add(self.thickness, pos = (0, 1))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Exposure (s):"), pos = (1, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.interval = wx.TextCtrl(self.panel, -1, str(self._get_setting("project_interval", "0.5")), size = (80, -1))
self.interval.Bind(wx.EVT_TEXT, self.update_interval)
self.interval.SetHelpText("How long each slice should be displayed.")
fieldsizer.Add(self.interval, pos = (1, 1))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Blank (s):"), pos = (2, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.pause = wx.TextCtrl(self.panel, -1, str(self._get_setting("project_pause", "0.5")), size = (80, -1))
self.pause.Bind(wx.EVT_TEXT, self.update_pause)
self.pause.SetHelpText("The pause length between slices. This should take into account any movement of the Z axis, plus time to prepare the resin surface (sliding, tilting, sweeping, etc).")
fieldsizer.Add(self.pause, pos = (2, 1))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Scale:"), pos = (3, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.scale = floatspin.FloatSpin(self.panel, -1, value = self._get_setting('project_scale', 1.0), increment = 0.1, digits = 3, size = (80, -1))
self.scale.Bind(floatspin.EVT_FLOATSPIN, self.update_scale)
self.scale.SetHelpText("The additional scaling of each slice.")
fieldsizer.Add(self.scale, pos = (3, 1))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Direction:"), pos = (4, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.direction = wx.ComboBox(self.panel, -1, choices = ["Top Down", "Bottom Up"], value = self._get_setting('project_direction', "Top Down"), size = (80, -1))
self.direction.Bind(wx.EVT_COMBOBOX, self.update_direction)
self.direction.SetHelpText("The direction the Z axis should move. Top Down is where the projector is above the model, Bottom up is where the projector is below the model.")
fieldsizer.Add(self.direction, pos = (4, 1), flag = wx.ALIGN_CENTER_VERTICAL)
fieldsizer.Add(wx.StaticText(self.panel, -1, "Overshoot (mm):"), pos = (5, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.overshoot = floatspin.FloatSpin(self.panel, -1, value = self._get_setting('project_overshoot', 3.0), increment = 0.1, digits = 1, min_val = 0, size = (80, -1))
self.overshoot.Bind(floatspin.EVT_FLOATSPIN, self.update_overshoot)
self.overshoot.SetHelpText("How far the axis should move beyond the next slice position for each slice. For Top Down printers this would dunk the model under the resi and then return. For Bottom Up printers this would raise the base away from the vat and then return.")
fieldsizer.Add(self.overshoot, pos = (5, 1))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Pre-lift Gcode:"), pos = (6, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.prelift_gcode = wx.TextCtrl(self.panel, -1, str(self._get_setting("project_prelift_gcode", "").replace("\\n", '\n')), size = (-1, 35), style = wx.TE_MULTILINE)
self.prelift_gcode.SetHelpText("Additional gcode to run before raising the Z axis. Be sure to take into account any additional time needed in the pause value, and be careful what gcode is added!")
self.prelift_gcode.Bind(wx.EVT_TEXT, self.update_prelift_gcode)
fieldsizer.Add(self.prelift_gcode, pos = (6, 1), span = (2, 1))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Post-lift Gcode:"), pos = (6, 2), flag = wx.ALIGN_CENTER_VERTICAL)
self.postlift_gcode = wx.TextCtrl(self.panel, -1, str(self._get_setting("project_postlift_gcode", "").replace("\\n", '\n')), size = (-1, 35), style = wx.TE_MULTILINE)
self.postlift_gcode.SetHelpText("Additional gcode to run after raising the Z axis. Be sure to take into account any additional time needed in the pause value, and be careful what gcode is added!")
self.postlift_gcode.Bind(wx.EVT_TEXT, self.update_postlift_gcode)
fieldsizer.Add(self.postlift_gcode, pos = (6, 3), span = (2, 1))
# Right Column
fieldsizer.Add(wx.StaticText(self.panel, -1, "X (px):"), pos = (0, 2), flag = wx.ALIGN_CENTER_VERTICAL)
projectX = int(math.floor(float(self._get_setting("project_x", 1920))))
self.X = wx.SpinCtrl(self.panel, -1, str(projectX), max = 999999, size = (80, -1))
self.X.Bind(wx.EVT_SPINCTRL, self.update_resolution)
self.X.SetHelpText("The projector resolution in the X axis.")
fieldsizer.Add(self.X, pos = (0, 3))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Y (px):"), pos = (1, 2), flag = wx.ALIGN_CENTER_VERTICAL)
projectY = int(math.floor(float(self._get_setting("project_y", 1200))))
self.Y = wx.SpinCtrl(self.panel, -1, str(projectY), max = 999999, size = (80, -1))
self.Y.Bind(wx.EVT_SPINCTRL, self.update_resolution)
self.Y.SetHelpText("The projector resolution in the Y axis.")
fieldsizer.Add(self.Y, pos = (1, 3))
fieldsizer.Add(wx.StaticText(self.panel, -1, "OffsetX (mm):"), pos = (2, 2), flag = wx.ALIGN_CENTER_VERTICAL)
self.offset_X = floatspin.FloatSpin(self.panel, -1, value = self._get_setting("project_offset_x", 0.0), increment = 1, digits = 1, size = (80, -1))
self.offset_X.Bind(floatspin.EVT_FLOATSPIN, self.update_offset)
self.offset_X.SetHelpText("How far the slice should be offset from the edge in the X axis.")
fieldsizer.Add(self.offset_X, pos = (2, 3))
fieldsizer.Add(wx.StaticText(self.panel, -1, "OffsetY (mm):"), pos = (3, 2), flag = wx.ALIGN_CENTER_VERTICAL)
self.offset_Y = floatspin.FloatSpin(self.panel, -1, value = self._get_setting("project_offset_y", 0.0), increment = 1, digits = 1, size = (80, -1))
self.offset_Y.Bind(floatspin.EVT_FLOATSPIN, self.update_offset)
self.offset_Y.SetHelpText("How far the slice should be offset from the edge in the Y axis.")
fieldsizer.Add(self.offset_Y, pos = (3, 3))
fieldsizer.Add(wx.StaticText(self.panel, -1, "ProjectedX (mm):"), pos = (4, 2), flag = wx.ALIGN_CENTER_VERTICAL)
self.projected_X_mm = floatspin.FloatSpin(self.panel, -1, value = self._get_setting("project_projected_x", 505.0), increment = 1, digits = 1, size = (80, -1))
self.projected_X_mm.Bind(floatspin.EVT_FLOATSPIN, self.update_projected_Xmm)
self.projected_X_mm.SetHelpText("The actual width of the entire projected image. Use the Calibrate grid to show the full size of the projected image, and measure the width at the same level where the slice will be projected onto the resin.")
fieldsizer.Add(self.projected_X_mm, pos = (4, 3))
fieldsizer.Add(wx.StaticText(self.panel, -1, "Z Axis Speed (mm/min):"), pos = (5, 2), flag = wx.ALIGN_CENTER_VERTICAL)
self.z_axis_rate = wx.SpinCtrl(self.panel, -1, str(self._get_setting("project_z_axis_rate", 200)), max = 9999, size = (80, -1))
self.z_axis_rate.Bind(wx.EVT_SPINCTRL, self.update_z_axis_rate)
self.z_axis_rate.SetHelpText("Speed of the Z axis in mm/minute. Take into account that slower rates may require a longer pause value.")
fieldsizer.Add(self.z_axis_rate, pos = (5, 3))
fieldboxsizer.Add(fieldsizer)
# Display
displayboxsizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, label = "Display"), wx.VERTICAL)
displaysizer = wx.GridBagSizer(10, 10)
displaysizer.Add(wx.StaticText(self.panel, -1, "Fullscreen:"), pos = (0, 0), flag = wx.ALIGN_CENTER_VERTICAL)
self.fullscreen = wx.CheckBox(self.panel, -1)
self.fullscreen.Bind(wx.EVT_CHECKBOX, self.update_fullscreen)
self.fullscreen.SetHelpText("Toggles the project screen to full size.")
displaysizer.Add(self.fullscreen, pos = (0, 1), flag = wx.ALIGN_CENTER_VERTICAL)
displaysizer.Add(wx.StaticText(self.panel, -1, "Calibrate:"), pos = (0, 2), flag = wx.ALIGN_CENTER_VERTICAL)
self.calibrate = wx.CheckBox(self.panel, -1)
self.calibrate.Bind(wx.EVT_CHECKBOX, self.show_calibrate)
self.calibrate.SetHelpText("Toggles the calibration grid. Each grid should be 10mmx10mm in size. Use the grid to ensure the projected size is correct. See also the help for the ProjectedX field.")
displaysizer.Add(self.calibrate, pos = (0, 3), flag = wx.ALIGN_CENTER_VERTICAL)
displaysizer.Add(wx.StaticText(self.panel, -1, "1st Layer:"), pos = (0, 4), flag = wx.ALIGN_CENTER_VERTICAL)
first_layer_boxer = wx.BoxSizer(wx.HORIZONTAL)
self.first_layer = wx.CheckBox(self.panel, -1)
self.first_layer.Bind(wx.EVT_CHECKBOX, self.show_first_layer)
self.first_layer.SetHelpText("Displays the first layer of the model. Use this to project the first layer for longer so it holds to the base. Note: this value does not affect the first layer when the \"Present\" run is started, it should be used manually.")
first_layer_boxer.Add(self.first_layer, flag = wx.ALIGN_CENTER_VERTICAL)
first_layer_boxer.Add(wx.StaticText(self.panel, -1, " (s):"), flag = wx.ALIGN_CENTER_VERTICAL)
self.show_first_layer_timer = floatspin.FloatSpin(self.panel, -1, value=-1, increment = 1, digits = 1, size = (55, -1))
self.show_first_layer_timer.SetHelpText("How long to display the first layer for. -1 = unlimited.")
first_layer_boxer.Add(self.show_first_layer_timer, flag = wx.ALIGN_CENTER_VERTICAL)
displaysizer.Add(first_layer_boxer, pos = (0, 6), flag = wx.ALIGN_CENTER_VERTICAL)
displaysizer.Add(wx.StaticText(self.panel, -1, "Red:"), pos = (0, 7), flag = wx.ALIGN_CENTER_VERTICAL)
self.layer_red = wx.CheckBox(self.panel, -1)
self.layer_red.Bind(wx.EVT_CHECKBOX, self.show_layer_red)
self.layer_red.SetHelpText("Toggles whether the image should be red. Useful for positioning whilst resin is in the printer as it should not cause a reaction.")
displaysizer.Add(self.layer_red, pos = (0, 8), flag = wx.ALIGN_CENTER_VERTICAL)
displayboxsizer.Add(displaysizer)
# Info
infosizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, label = "Info"), wx.VERTICAL)
infofieldsizer = wx.GridBagSizer(10, 10)
filelabel = wx.StaticText(self.panel, -1, "File:")
filelabel.SetHelpText("The name of the model currently loaded.")
infofieldsizer.Add(filelabel, pos = (0, 0))
self.filename = wx.StaticText(self.panel, -1, "")
infofieldsizer.Add(self.filename, pos = (0, 1))
totallayerslabel = wx.StaticText(self.panel, -1, "Total Layers:")
totallayerslabel.SetHelpText("The total number of layers found in the model.")
infofieldsizer.Add(totallayerslabel, pos = (1, 0))
self.total_layers = wx.StaticText(self.panel, -1)
infofieldsizer.Add(self.total_layers, pos = (1, 1))
currentlayerlabel = wx.StaticText(self.panel, -1, "Current Layer:")
currentlayerlabel.SetHelpText("The current layer being displayed.")
infofieldsizer.Add(currentlayerlabel, pos = (2, 0))
self.current_layer = wx.StaticText(self.panel, -1, "0")
infofieldsizer.Add(self.current_layer, pos = (2, 1))
estimatedtimelabel = wx.StaticText(self.panel, -1, "Estimated Time:")
estimatedtimelabel.SetHelpText("An estimate of the remaining time until print completion.")
infofieldsizer.Add(estimatedtimelabel, pos = (3, 0))
self.estimated_time = wx.StaticText(self.panel, -1, "")
infofieldsizer.Add(self.estimated_time, pos = (3, 1))
infosizer.Add(infofieldsizer)
#
vbox.Add(buttonbox, flag = wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP | wx.BOTTOM, border = 10)
vbox.Add(fieldboxsizer, flag = wx.EXPAND | wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 10)
vbox.Add(displayboxsizer, flag = wx.EXPAND | wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 10)
vbox.Add(infosizer, flag = wx.EXPAND | wx.LEFT | wx.RIGHT | wx.BOTTOM, border = 10)
self.panel.SetSizer(vbox)
self.panel.Fit()
self.Fit()
self.SetPosition((0, 0))
self.Show()
def __del__(self):
if hasattr(self, 'image_dir') and self.image_dir != '':
shutil.rmtree(self.image_dir)
if self.display_frame:
self.display_frame.Destroy()
def set_total_layers(self, total):
self.total_layers.SetLabel(str(total))
self.set_estimated_time()
def set_current_layer(self, index):
self.current_layer.SetLabel(str(index))
self.set_estimated_time()
def display_filename(self, name):
self.filename.SetLabel(name)
def set_estimated_time(self):
if not hasattr(self, 'layers'):
return
current_layer = int(self.current_layer.GetLabel())
remaining_layers = len(self.layers[0]) - current_layer
# 0.5 for delay between hide and rise
estimated_time = remaining_layers * (float(self.interval.GetValue()) + float(self.pause.GetValue()) + 0.5)
self.estimated_time.SetLabel(time.strftime("%H:%M:%S", time.gmtime(estimated_time)))
def parse_svg(self, name):
et = xml.etree.ElementTree.ElementTree(file = name)
# xml.etree.ElementTree.dump(et)
slicer = 'Slic3r' if et.getroot().find('{http://www.w3.org/2000/svg}metadata') is None else 'Skeinforge'
zlast = 0
zdiff = 0
ol = []
if (slicer == 'Slic3r'):
height = et.getroot().get('height').replace('m', '')
width = et.getroot().get('width').replace('m', '')
for i in et.findall("{http://www.w3.org/2000/svg}g"):
z = float(i.get('{http://slic3r.org/namespaces/slic3r}z'))
zdiff = z - zlast
zlast = z
svgSnippet = xml.etree.ElementTree.Element('{http://www.w3.org/2000/svg}svg')
svgSnippet.set('height', height + 'mm')
svgSnippet.set('width', width + 'mm')
svgSnippet.set('viewBox', '0 0 ' + width + ' ' + height)
svgSnippet.set('style', 'background-color:black;fill:white;')
svgSnippet.append(i)
ol += [svgSnippet]
else:
slice_layers = et.findall("{http://www.w3.org/2000/svg}metadata")[0].findall("{http://www.reprap.org/slice}layers")[0]
minX = slice_layers.get('minX')
maxX = slice_layers.get('maxX')
minY = slice_layers.get('minY')
maxY = slice_layers.get('maxY')
height = str(abs(float(minY)) + abs(float(maxY)))
width = str(abs(float(minX)) + abs(float(maxX)))
for g in et.findall("{http://www.w3.org/2000/svg}g")[0].findall("{http://www.w3.org/2000/svg}g"):
g.set('transform', '')
text_element = g.findall("{http://www.w3.org/2000/svg}text")[0]
g.remove(text_element)
path_elements = g.findall("{http://www.w3.org/2000/svg}path")
for p in path_elements:
p.set('transform', 'translate(' + maxX + ',' + maxY + ')')
p.set('fill', 'white')
z = float(g.get('id').split("z:")[-1])
zdiff = z - zlast
zlast = z
svgSnippet = xml.etree.ElementTree.Element('{http://www.w3.org/2000/svg}svg')
svgSnippet.set('height', height + 'mm')
svgSnippet.set('width', width + 'mm')
svgSnippet.set('viewBox', '0 0 ' + width + ' ' + height)
svgSnippet.set('style', 'background-color:black;fill:white;')
svgSnippet.append(g)
ol += [svgSnippet]
return ol, zdiff, slicer
def parse_3DLP_zip(self, name):
if not zipfile.is_zipfile(name):
raise Exception(name + " is not a zip file!")
accepted_image_types = ['gif', 'tiff', 'jpg', 'jpeg', 'bmp', 'png']
zipFile = zipfile.ZipFile(name, 'r')
self.image_dir = tempfile.mkdtemp()
zipFile.extractall(self.image_dir)
ol = []
# Note: the following funky code extracts any numbers from the filenames, matches
# them with the original then sorts them. It allows for filenames of the
# format: abc_1.png, which would be followed by abc_10.png alphabetically.
os.chdir(self.image_dir)
vals = filter(os.path.isfile, os.listdir('.'))
keys = map(lambda p: int(re.search('\d+', p).group()), vals)
imagefilesDict = dict(itertools.izip(keys, vals))
imagefilesOrderedDict = OrderedDict(sorted(imagefilesDict.items(), key = lambda t: t[0]))
for f in imagefilesOrderedDict.values():
path = os.path.join(self.image_dir, f)
if os.path.isfile(path) and imghdr.what(path) in accepted_image_types:
ol.append(path)
return ol, -1, "bitmap"
def load_file(self, event):
dlg = wx.FileDialog(self, ("Open file to print"), style = wx.FD_OPEN | wx.FD_FILE_MUST_EXIST)
dlg.SetWildcard(("Slic3r or Skeinforge svg files (;*.svg;*.SVG;);3DLP Zip (;*.3dlp.zip;)"))
if dlg.ShowModal() == wx.ID_OK:
name = dlg.GetPath()
if not(os.path.exists(name)):
self.status.SetStatusText(("File not found!"))
return
if name.endswith(".3dlp.zip"):
layers = self.parse_3DLP_zip(name)
layerHeight = float(self.thickness.GetValue())
else:
layers = self.parse_svg(name)
layerHeight = layers[1]
self.thickness.SetValue(str(layers[1]))
print "Layer thickness detected:", layerHeight, "mm"
print len(layers[0]), "layers found, total height", layerHeight * len(layers[0]), "mm"
self.layers = layers
self.set_total_layers(len(layers[0]))
self.set_current_layer(0)
self.current_filename = os.path.basename(name)
self.display_filename(self.current_filename)
self.slicer = layers[2]
self.display_frame.slicer = self.slicer
dlg.Destroy()
def show_calibrate(self, event):
if self.calibrate.IsChecked():
self.present_calibrate(event)
else:
if hasattr(self, 'layers'):
self.display_frame.slicer = self.layers[2]
self.display_frame.scale = float(self.scale.GetValue())
self.display_frame.clear_layer()
def show_first_layer(self, event):
if self.first_layer.IsChecked():
self.present_first_layer(event)
else:
if hasattr(self, 'layers'):
self.display_frame.slicer = self.layers[2]
self.display_frame.scale = float(self.scale.GetValue())
self.display_frame.clear_layer()
def show_layer_red(self, event):
self.display_frame.layer_red = self.layer_red.IsChecked()
def present_calibrate(self, event):
if self.calibrate.IsChecked():
self.display_frame.Raise()
self.display_frame.offset = (float(self.offset_X.GetValue()), -float(self.offset_Y.GetValue()))
self.display_frame.scale = 1.0
resolution_x_pixels = int(self.X.GetValue())
resolution_y_pixels = int(self.Y.GetValue())
gridBitmap = wx.EmptyBitmap(resolution_x_pixels, resolution_y_pixels)
dc = wx.MemoryDC()
dc.SelectObject(gridBitmap)
dc.SetBackground(wx.Brush("black"))
dc.Clear()
dc.SetPen(wx.Pen("red", 7))
dc.DrawLine(0, 0, resolution_x_pixels, 0)
dc.DrawLine(0, 0, 0, resolution_y_pixels)
dc.DrawLine(resolution_x_pixels, 0, resolution_x_pixels, resolution_y_pixels)
dc.DrawLine(0, resolution_y_pixels, resolution_x_pixels, resolution_y_pixels)
dc.SetPen(wx.Pen("red", 2))
aspectRatio = float(resolution_x_pixels) / float(resolution_y_pixels)
projectedXmm = float(self.projected_X_mm.GetValue())
projectedYmm = round(projectedXmm / aspectRatio)
pixelsXPerMM = resolution_x_pixels / projectedXmm
pixelsYPerMM = resolution_y_pixels / projectedYmm
gridCountX = int(projectedXmm / 10)
gridCountY = int(projectedYmm / 10)
for y in xrange(0, gridCountY + 1):
for x in xrange(0, gridCountX + 1):
dc.DrawLine(0, y * (pixelsYPerMM * 10), resolution_x_pixels, y * (pixelsYPerMM * 10))
dc.DrawLine(x * (pixelsXPerMM * 10), 0, x * (pixelsXPerMM * 10), resolution_y_pixels)
self.first_layer.SetValue(False)
self.display_frame.slicer = 'bitmap'
self.display_frame.draw_layer(gridBitmap.ConvertToImage())
def present_first_layer(self, event):
if (self.first_layer.GetValue()):
if not hasattr(self, "layers"):
print "No model loaded!"
self.first_layer.SetValue(False)
return
self.display_frame.offset = (float(self.offset_X.GetValue()), float(self.offset_Y.GetValue()))
self.display_frame.scale = float(self.scale.GetValue())
self.display_frame.slicer = self.layers[2]
self.display_frame.dpi = self.get_dpi()
self.display_frame.draw_layer(copy.deepcopy(self.layers[0][0]))
self.calibrate.SetValue(False)
if self.show_first_layer_timer != -1.0:
def unpresent_first_layer():
self.display_frame.clear_layer()
self.first_layer.SetValue(False)
wx.CallLater(self.show_first_layer_timer.GetValue() * 1000, unpresent_first_layer)
def update_offset(self, event):
offset_x = float(self.offset_X.GetValue())
offset_y = float(self.offset_Y.GetValue())
self.display_frame.offset = (offset_x, offset_y)
self._set_setting('project_offset_x', offset_x)
self._set_setting('project_offset_y', offset_y)
self.refresh_display(event)
def refresh_display(self, event):
self.present_calibrate(event)
self.present_first_layer(event)
def update_thickness(self, event):
self._set_setting('project_layer', self.thickness.GetValue())
self.refresh_display(event)
def update_projected_Xmm(self, event):
self._set_setting('project_projected_x', self.projected_X_mm.GetValue())
self.refresh_display(event)
def update_scale(self, event):
scale = float(self.scale.GetValue())
self.display_frame.scale = scale
self._set_setting('project_scale', scale)
self.refresh_display(event)
def update_interval(self, event):
interval = float(self.interval.GetValue())
self.display_frame.interval = interval
self._set_setting('project_interval', interval)
self.set_estimated_time()
self.refresh_display(event)
def update_pause(self, event):
pause = float(self.pause.GetValue())
self.display_frame.pause = pause
self._set_setting('project_pause', pause)
self.set_estimated_time()
self.refresh_display(event)
def update_overshoot(self, event):
overshoot = float(self.overshoot.GetValue())
self.display_frame.pause = overshoot
self._set_setting('project_overshoot', overshoot)
def update_prelift_gcode(self, event):
prelift_gcode = self.prelift_gcode.GetValue().replace('\n', "\\n")
self.display_frame.prelift_gcode = prelift_gcode
self._set_setting('project_prelift_gcode', prelift_gcode)
def update_postlift_gcode(self, event):
postlift_gcode = self.postlift_gcode.GetValue().replace('\n', "\\n")
self.display_frame.postlift_gcode = postlift_gcode
self._set_setting('project_postlift_gcode', postlift_gcode)
def update_z_axis_rate(self, event):
z_axis_rate = int(self.z_axis_rate.GetValue())
self.display_frame.z_axis_rate = z_axis_rate
self._set_setting('project_z_axis_rate', z_axis_rate)
def update_direction(self, event):
direction = self.direction.GetValue()
self.display_frame.direction = direction
self._set_setting('project_direction', direction)
def update_fullscreen(self, event):
if (self.fullscreen.GetValue()):
self.display_frame.ShowFullScreen(1)
else:
self.display_frame.ShowFullScreen(0)
self.refresh_display(event)
def update_resolution(self, event):
x = int(self.X.GetValue())
y = int(self.Y.GetValue())
self.display_frame.resize((x, y))
self._set_setting('project_x', x)
self._set_setting('project_y', y)
self.refresh_display(event)
def get_dpi(self):
resolution_x_pixels = int(self.X.GetValue())
projected_x_mm = float(self.projected_X_mm.GetValue())
projected_x_inches = projected_x_mm / 25.4
return resolution_x_pixels / projected_x_inches
def start_present(self, event):
if not hasattr(self, "layers"):
print "No model loaded!"
return
self.pause_button.SetLabel("Pause")
self.set_current_layer(0)
self.display_frame.Raise()
if (self.fullscreen.GetValue()):
self.display_frame.ShowFullScreen(1)
self.display_frame.slicer = self.layers[2]
self.display_frame.dpi = self.get_dpi()
self.display_frame.present(self.layers[0][:],
thickness = float(self.thickness.GetValue()),
interval = float(self.interval.GetValue()),
scale = float(self.scale.GetValue()),
pause = float(self.pause.GetValue()),
overshoot = float(self.overshoot.GetValue()),
z_axis_rate = int(self.z_axis_rate.GetValue()),
prelift_gcode = self.prelift_gcode.GetValue(),
postlift_gcode = self.postlift_gcode.GetValue(),
direction = self.direction.GetValue(),
size = (float(self.X.GetValue()), float(self.Y.GetValue())),
offset = (float(self.offset_X.GetValue()), float(self.offset_Y.GetValue())),
layer_red = self.layer_red.IsChecked())
def stop_present(self, event):
print "Stop"
self.pause_button.SetLabel("Pause")
self.set_current_layer(0)
self.display_frame.running = False
def pause_present(self, event):
if self.pause_button.GetLabel() == 'Pause':
print "Pause"
self.pause_button.SetLabel("Continue")
self.display_frame.running = False
else:
print "Continue"
self.pause_button.SetLabel("Pause")
self.display_frame.running = True
self.display_frame.next_img()
if __name__ == "__main__":
provider = wx.SimpleHelpProvider()
wx.HelpProvider_Set(provider)
a = wx.App()
SettingsFrame(None).Show()
a.MainLoop()
| gpl-2.0 |
EdDev/vdsm | lib/vdsm/network/netlink/addr.py | 1 | 2889 | # Copyright 2014-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from functools import partial
import errno
from . import _cache_manager
from . import _pool
from . import libnl
from .link import _nl_link_cache, _link_index_to_name
def iter_addrs():
"""Generator that yields an information dictionary for each network address
in the system."""
with _pool.socket() as sock:
with _nl_addr_cache(sock) as addr_cache:
with _nl_link_cache(sock) as link_cache: # for index to label
addr = libnl.nl_cache_get_first(addr_cache)
while addr:
yield _addr_info(addr, link_cache=link_cache)
addr = libnl.nl_cache_get_next(addr)
def _addr_info(addr, link_cache=None):
"""Returns a dictionary with the address information."""
index = libnl.rtnl_addr_get_ifindex(addr)
local_address = libnl.rtnl_addr_get_local(addr)
data = {
'index': index,
'family': libnl.nl_af2str(libnl.rtnl_addr_get_family(addr)),
'prefixlen': libnl.rtnl_addr_get_prefixlen(addr),
'scope': libnl.rtnl_scope2str(libnl.rtnl_addr_get_scope(addr)),
'flags': _addr_flags(addr),
'address': libnl.nl_addr2str(local_address) if local_address else None
}
try:
data['label'] = _link_index_to_name(index, cache=link_cache)
except IOError as err:
if err.errno != errno.ENODEV:
raise
return data
def split(addr):
"""Split an addr dict from iter_addrs"""
# for 32bits address, the address field is slashless
return addr['address'].split('/')[0], addr['prefixlen']
def cidr_form(addr):
return '{}/{}'.format(*split(addr))
def is_primary(addr):
return 'secondary' not in addr['flags']
def is_permanent(addr):
return 'permanent' in addr['flags']
def _addr_flags(addr):
"""Returns the textual representation of the address flags"""
return frozenset(
libnl.rtnl_addr_flags2str(libnl.rtnl_addr_get_flags(addr)).split(','))
_nl_addr_cache = partial(_cache_manager, libnl.rtnl_addr_alloc_cache)
| gpl-2.0 |
rvianello/rdkit | rdkit/Chem/Features/ShowFeats.py | 5 | 12247 | # $Id: ShowFeats.py 537 2007-08-20 14:54:35Z landrgr1 $
#
# Created by Greg Landrum Aug 2006
#
#
from __future__ import print_function
_version = "0.3.2"
_usage = """
ShowFeats [optional args] <filenames>
if "-" is provided as a filename, data will be read from stdin (the console)
"""
_welcomeMessage = "This is ShowFeats version %s" % (_version)
import math
#set up the logger:
from rdkit import RDLogger as logging
logger = logging.logger()
logger.setLevel(logging.INFO)
from rdkit import Geometry
from rdkit.Chem.Features import FeatDirUtilsRD as FeatDirUtils
_featColors = {
'Donor': (0, 1, 1),
'Acceptor': (1, 0, 1),
'NegIonizable': (1, 0, 0),
'PosIonizable': (0, 0, 1),
'ZnBinder': (1, .5, .5),
'Aromatic': (1, .8, .2),
'LumpedHydrophobe': (.5, .25, 0),
'Hydrophobe': (.5, .25, 0),
}
def _getVectNormal(v, tol=1e-4):
if math.fabs(v.x) > tol:
res = Geometry.Point3D(v.y, -v.x, 0)
elif math.fabs(v.y) > tol:
res = Geometry.Point3D(-v.y, v.x, 0)
elif math.fabs(v.z) > tol:
res = Geometry.Point3D(1, 0, 0)
else:
raise ValueError('cannot find normal to the null vector')
res.Normalize()
return res
_canonArrowhead = None
def _buildCanonArrowhead(headFrac, nSteps, aspect):
global _canonArrowhead
startP = RDGeometry.Point3D(0, 0, headFrac)
_canonArrowhead = [startP]
scale = headFrac * aspect
baseV = RDGeometry.Point3D(scale, 0, 0)
_canonArrowhead.append(baseV)
twopi = 2 * math.pi
for i in range(1, nSteps):
v = RDGeometry.Point3D(scale * math.cos(i * twopi), scale * math.sin(i * twopi), 0)
_canonArrowhead.append(v)
_globalArrowCGO = []
_globalSphereCGO = []
# taken from pymol's cgo.py
BEGIN = 2
END = 3
TRIANGLE_FAN = 6
COLOR = 6
VERTEX = 4
NORMAL = 5
SPHERE = 7
CYLINDER = 9
ALPHA = 25
def _cgoArrowhead(viewer, tail, head, radius, color, label, headFrac=0.3, nSteps=10, aspect=.5):
global _globalArrowCGO
delta = head - tail
normal = _getVectNormal(delta)
delta.Normalize()
dv = head - tail
dv.Normalize()
dv *= headFrac
startP = head
normal *= headFrac * aspect
cgo = [BEGIN, TRIANGLE_FAN, COLOR, color[0], color[1], color[2], NORMAL, dv.x, dv.y, dv.z, VERTEX,
head.x + dv.x, head.y + dv.y, head.z + dv.z]
base = [BEGIN, TRIANGLE_FAN, COLOR, color[0], color[1], color[2], NORMAL, -dv.x, -dv.y, -dv.z,
VERTEX, head.x, head.y, head.z]
v = startP + normal
cgo.extend([NORMAL, normal.x, normal.y, normal.z])
cgo.extend([VERTEX, v.x, v.y, v.z])
base.extend([VERTEX, v.x, v.y, v.z])
for i in range(1, nSteps):
v = FeatDirUtils.ArbAxisRotation(360. / nSteps * i, delta, normal)
cgo.extend([NORMAL, v.x, v.y, v.z])
v += startP
cgo.extend([VERTEX, v.x, v.y, v.z])
base.extend([VERTEX, v.x, v.y, v.z])
cgo.extend([NORMAL, normal.x, normal.y, normal.z])
cgo.extend([VERTEX, startP.x + normal.x, startP.y + normal.y, startP.z + normal.z])
base.extend([VERTEX, startP.x + normal.x, startP.y + normal.y, startP.z + normal.z])
cgo.append(END)
base.append(END)
cgo.extend(base)
#viewer.server.renderCGO(cgo,label)
_globalArrowCGO.extend(cgo)
def ShowArrow(viewer, tail, head, radius, color, label, transparency=0, includeArrowhead=True):
global _globalArrowCGO
if transparency:
_globalArrowCGO.extend([ALPHA, 1 - transparency])
else:
_globalArrowCGO.extend([ALPHA, 1])
_globalArrowCGO.extend([CYLINDER,
tail.x,
tail.y,
tail.z,
head.x,
head.y,
head.z,
radius * .10,
color[0],
color[1],
color[2],
color[0],
color[1],
color[2], ])
if includeArrowhead:
_cgoArrowhead(viewer, tail, head, radius, color, label)
def ShowMolFeats(mol, factory, viewer, radius=0.5, confId=-1, showOnly=True, name='',
transparency=0.0, colors=None, excludeTypes=[], useFeatDirs=True, featLabel=None,
dirLabel=None, includeArrowheads=True, writeFeats=False, showMol=True,
featMapFile=False):
global _globalSphereCGO
if not name:
if mol.HasProp('_Name'):
name = mol.GetProp('_Name')
else:
name = 'molecule'
if not colors:
colors = _featColors
if showMol:
viewer.ShowMol(mol, name=name, showOnly=showOnly, confId=confId)
molFeats = factory.GetFeaturesForMol(mol)
if not featLabel:
featLabel = '%s-feats' % name
viewer.server.resetCGO(featLabel)
if not dirLabel:
dirLabel = featLabel + "-dirs"
viewer.server.resetCGO(dirLabel)
for i, feat in enumerate(molFeats):
family = feat.GetFamily()
if family in excludeTypes:
continue
pos = feat.GetPos(confId)
color = colors.get(family, (.5, .5, .5))
nm = '%s(%d)' % (family, i + 1)
if transparency:
_globalSphereCGO.extend([ALPHA, 1 - transparency])
else:
_globalSphereCGO.extend([ALPHA, 1])
_globalSphereCGO.extend([COLOR, color[0], color[1], color[2], SPHERE, pos.x, pos.y, pos.z,
radius])
if writeFeats:
aidText = ' '.join([str(x + 1) for x in feat.GetAtomIds()])
print('%s\t%.3f\t%.3f\t%.3f\t1.0\t# %s' % (family, pos.x, pos.y, pos.z, aidText))
if featMapFile:
print(" family=%s pos=(%.3f,%.3f,%.3f) weight=1.0" % (family, pos.x, pos.y, pos.z), end='',
file=featMapFile)
if useFeatDirs:
ps = []
if family == 'Aromatic':
ps, fType = FeatDirUtils.GetAromaticFeatVects(
mol.GetConformer(confId), feat.GetAtomIds(), pos, scale=1.0)
elif family == 'Donor':
aids = feat.GetAtomIds()
if len(aids) == 1:
featAtom = mol.GetAtomWithIdx(aids[0])
hvyNbrs = [x for x in featAtom.GetNeighbors() if x.GetAtomicNum() != 1]
if len(hvyNbrs) == 1:
ps, fType = FeatDirUtils.GetDonor1FeatVects(mol.GetConformer(confId), aids, scale=1.0)
elif len(hvyNbrs) == 2:
ps, fType = FeatDirUtils.GetDonor2FeatVects(mol.GetConformer(confId), aids, scale=1.0)
elif len(hvyNbrs) == 3:
ps, fType = FeatDirUtils.GetDonor3FeatVects(mol.GetConformer(confId), aids, scale=1.0)
elif family == 'Acceptor':
aids = feat.GetAtomIds()
if len(aids) == 1:
featAtom = mol.GetAtomWithIdx(aids[0])
hvyNbrs = [x for x in featAtom.GetNeighbors() if x.GetAtomicNum() != 1]
if len(hvyNbrs) == 1:
ps, fType = FeatDirUtils.GetAcceptor1FeatVects(
mol.GetConformer(confId), aids, scale=1.0)
elif len(hvyNbrs) == 2:
ps, fType = FeatDirUtils.GetAcceptor2FeatVects(
mol.GetConformer(confId), aids, scale=1.0)
elif len(hvyNbrs) == 3:
ps, fType = FeatDirUtils.GetAcceptor3FeatVects(
mol.GetConformer(confId), aids, scale=1.0)
for tail, head in ps:
ShowArrow(viewer, tail, head, radius, color, dirLabel, transparency=transparency,
includeArrowhead=includeArrowheads)
if featMapFile:
vect = head - tail
print('dir=(%.3f,%.3f,%.3f)' % (vect.x, vect.y, vect.z), end='', file=featMapFile)
if featMapFile:
aidText = ' '.join([str(x + 1) for x in feat.GetAtomIds()])
print('# %s' % (aidText), file=featMapFile)
# --- ---- --- ---- --- ---- --- ---- --- ---- --- ----
import sys, os, getopt
from rdkit import RDConfig
from optparse import OptionParser
parser = OptionParser(_usage, version='%prog ' + _version)
parser.add_option('-x', '--exclude', default='',
help='provide a list of feature names that should be excluded')
parser.add_option('-f', '--fdef', default=os.path.join(RDConfig.RDDataDir, 'BaseFeatures.fdef'),
help='provide the name of the feature definition (fdef) file.')
parser.add_option('--noDirs', '--nodirs', dest='useDirs', default=True, action='store_false',
help='do not draw feature direction indicators')
parser.add_option('--noHeads', dest='includeArrowheads', default=True, action='store_false',
help='do not draw arrowheads on the feature direction indicators')
parser.add_option('--noClear', '--noClear', dest='clearAll', default=False, action='store_true',
help='do not clear PyMol on startup')
parser.add_option('--noMols', '--nomols', default=False, action='store_true',
help='do not draw the molecules')
parser.add_option('--writeFeats', '--write', default=False, action='store_true',
help='print the feature information to the console')
parser.add_option('--featMapFile', '--mapFile', default='',
help='save a feature map definition to the specified file')
parser.add_option('--verbose', default=False, action='store_true', help='be verbose')
if __name__ == '__main__':
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.Chem.PyMol import MolViewer
options, args = parser.parse_args()
if len(args) < 1:
parser.error('please provide either at least one sd or mol file')
try:
v = MolViewer()
except Exception:
logger.error(
'Unable to connect to PyMol server.\nPlease run ~landrgr1/extern/PyMol/launch.sh to start it.')
sys.exit(1)
if options.clearAll:
v.DeleteAll()
try:
fdef = open(options.fdef, 'r').read()
except IOError:
logger.error('ERROR: Could not open fdef file %s' % options.fdef)
sys.exit(1)
factory = AllChem.BuildFeatureFactoryFromString(fdef)
if options.writeFeats:
print('# Family \tX \tY \tZ \tRadius\t # Atom_ids')
if options.featMapFile:
if options.featMapFile == '-':
options.featMapFile = sys.stdout
else:
options.featMapFile = file(options.featMapFile, 'w+')
print('# Feature map generated by ShowFeats v%s' % _version, file=options.featMapFile)
print("ScoreMode=All", file=options.featMapFile)
print("DirScoreMode=Ignore", file=options.featMapFile)
print("BeginParams", file=options.featMapFile)
for family in factory.GetFeatureFamilies():
print(" family=%s width=1.0 radius=3.0" % family, file=options.featMapFile)
print("EndParams", file=options.featMapFile)
print("BeginPoints", file=options.featMapFile)
i = 1
for midx, molN in enumerate(args):
if molN != '-':
featLabel = '%s_Feats' % molN
else:
featLabel = 'Mol%d_Feats' % (midx + 1)
v.server.resetCGO(featLabel)
# this is a big of kludgery to work around what seems to be a pymol cgo bug:
v.server.sphere((0, 0, 0), .01, (1, 0, 1), featLabel)
dirLabel = featLabel + "-dirs"
v.server.resetCGO(dirLabel)
# this is a big of kludgery to work around what seems to be a pymol cgo bug:
v.server.cylinder((0, 0, 0), (.01, .01, .01), .01, (1, 0, 1), dirLabel)
if molN != '-':
try:
ms = Chem.SDMolSupplier(molN)
except Exception:
logger.error('Problems reading input file: %s' % molN)
ms = []
else:
ms = Chem.SDMolSupplier()
ms.SetData(sys.stdin.read())
for m in ms:
nm = 'Mol_%d' % (i)
if m.HasProp('_Name'):
nm += '_' + m.GetProp('_Name')
if options.verbose:
if m.HasProp('_Name'):
print("#Molecule: %s" % m.GetProp('_Name'))
else:
print("#Molecule: %s" % nm)
ShowMolFeats(m, factory, v, transparency=0.25, excludeTypes=options.exclude, name=nm,
showOnly=False, useFeatDirs=options.useDirs, featLabel=featLabel,
dirLabel=dirLabel, includeArrowheads=options.includeArrowheads,
writeFeats=options.writeFeats, showMol=not options.noMols,
featMapFile=options.featMapFile)
i += 1
if not i % 100:
logger.info("Done %d poses" % i)
if ms:
v.server.renderCGO(_globalSphereCGO, featLabel, 1)
if options.useDirs:
v.server.renderCGO(_globalArrowCGO, dirLabel, 1)
if options.featMapFile:
print("EndPoints", file=options.featMapFile)
sys.exit(0)
| bsd-3-clause |
walterdejong/synctool | src/synctool/overlay.py | 1 | 12401 | #
# synctool.overlay.py WJ111
#
# synctool Copyright 2015 Walter de Jong <walter@heiho.net>
#
# synctool COMES WITH NO WARRANTY. synctool IS FREE SOFTWARE.
# synctool is distributed under terms described in the GNU General Public
# License.
#
'''synctool.overlay maps the repository onto the root directory.
Consider this tree:
$overlay/all/etc/ntp.conf._n1
$overlay/all/etc._n1/ntp.conf._all
$overlay/all/etc._n1/ntp.conf._n1
$overlay/n1/etc/ntp.conf._all
$overlay/n1/etc/ntp.conf._n1
$overlay/n1/etc._n1/ntp.conf._all
$overlay/n1/etc._n1/ntp.conf._n1
[Ideally] synctool selects the final entry. It accomplishes this with
the following procedure:
1. foreach direntry split the extension; get the 'importance'
2. sort by importance
3. first come, first served; first encountered entry is best choice
4. register destination as 'already handled' (duplicate)
5. if already handled, skip this entry
.post scripts are sorted in first so that a dictionary can be built
before it needs to be consulted. This dictionary only contains .post
scripts that are in the current directory. Additionally, if the current
directory itself has a .post script (which is in the parent directory),
then the .post script is passed in the dict as well.
'''
import os
import fnmatch
try:
from typing import List, Dict, Tuple, Set, Callable
except ImportError:
pass
import synctool.lib
from synctool.lib import verbose, warning, terse, prettypath
import synctool.object
from synctool.object import SyncObject
import synctool.param
# const enum object types
OV_REG = 0
OV_PRE = 1
OV_POST = 2
OV_TEMPLATE = 3
OV_TEMPLATE_POST = 4
OV_NO_EXT = 5
OV_IGNORE = 6
def _sort_by_importance(item1, item2):
# type: (Tuple[str, int], Tuple[str, int]) -> int
'''item is a tuple (x, importance)'''
return cmp(item1[1], item2[1])
def _toplevel(overlay):
# type: (str) -> List[str]
'''Returns sorted list of fullpath directories under overlay/'''
arr = [] # type: List[Tuple[str, int]]
for entry in os.listdir(overlay):
fullpath = os.path.join(overlay, entry)
try:
importance = synctool.param.MY_GROUPS.index(entry)
except ValueError:
verbose('%s/ is not one of my groups, skipping' %
prettypath(fullpath))
continue
arr.append((fullpath, importance))
arr.sort(_sort_by_importance)
# return list of only the directory names
return [x[0] for x in arr]
def _group_all():
# type: () -> int
'''Return the importance level of group 'all' '''
# it is the final group in MY_GROUPS
return len(synctool.param.MY_GROUPS) - 1
def _split_extension(filename, src_dir):
# type: (str, str) -> Tuple[SyncObject, int]
'''filename in the overlay tree, without leading path
src_dir is passed for the purpose of printing error messages
Returns tuple: SyncObject, importance
'''
(name, ext) = os.path.splitext(filename)
if not ext:
return SyncObject(filename, name, OV_NO_EXT), _group_all()
if ext == '.pre':
# it's a generic .pre script
return SyncObject(filename, name, OV_PRE), _group_all()
if ext == '.post':
(name2, ext) = os.path.splitext(name)
if ext == '._template':
# it's a generic template generator
return SyncObject(filename, name, OV_TEMPLATE_POST), _group_all()
# it's a generic .post script
return SyncObject(filename, name, OV_POST), _group_all()
if ext[:2] != '._':
return SyncObject(filename, filename, OV_NO_EXT), _group_all()
ext = ext[2:]
if not ext:
return SyncObject(filename, filename, OV_NO_EXT), _group_all()
if ext == 'template':
return SyncObject(filename, name, OV_TEMPLATE), _group_all()
try:
importance = synctool.param.MY_GROUPS.index(ext)
except ValueError:
if ext not in synctool.param.ALL_GROUPS:
src_path = os.path.join(src_dir, filename)
if synctool.param.TERSE:
terse(synctool.lib.TERSE_ERROR, ('invalid group on %s' %
src_path))
else:
warning('unknown group on %s, skipped' % prettypath(src_path))
return None, -1
# it is not one of my groups
verbose('skipping %s, it is not one of my groups' %
prettypath(os.path.join(src_dir, filename)))
return None, -1
(name2, ext) = os.path.splitext(name)
if ext == '.pre':
# register group-specific .pre script
return SyncObject(filename, name2, OV_PRE), importance
elif ext == '.post':
_, ext = os.path.splitext(name2)
if ext == '._template':
# it's a group-specific template generator
return (SyncObject(filename, name2, OV_TEMPLATE_POST), importance)
# register group-specific .post script
return SyncObject(filename, name2, OV_POST), importance
elif ext == '._template':
return SyncObject(filename, name2, OV_TEMPLATE), importance
return SyncObject(filename, name), importance
def _sort_by_importance_post_first(item1, item2):
# type: (Tuple[SyncObject, int], Tuple[SyncObject, int]) -> int
'''sort by importance, but always put .post scripts first'''
# after the .post scripts come ._template.post scripts
# then come regular files
# This order is important
obj1, importance1 = item1
obj2, importance2 = item2
if obj1.ov_type == OV_PRE:
if obj2.ov_type == OV_PRE:
return cmp(importance1, importance2)
return -1
if obj2.ov_type == OV_PRE:
return 1
if obj1.ov_type == OV_POST:
if obj2.ov_type == OV_POST:
return cmp(importance1, importance2)
return -1
if obj2.ov_type == OV_POST:
return 1
if obj1.ov_type == OV_TEMPLATE_POST:
if obj2.ov_type == OV_TEMPLATE_POST:
return cmp(importance1, importance2)
return -1
if obj2.ov_type == OV_TEMPLATE_POST:
return 1
if obj1.ov_type == OV_TEMPLATE:
if obj2.ov_type == OV_TEMPLATE:
return cmp(importance1, importance2)
return -1
if obj2.ov_type == OV_TEMPLATE:
return 1
return cmp(importance1, importance2)
def _walk_subtree(src_dir, dest_dir, duplicates, callback):
# type: (str, str, Set[str], Callable[[SyncObject, Dict[str, str], Dict[str, str]], Tuple[bool, bool]]) -> Tuple[bool, bool]
'''walk subtree under overlay/group/
duplicates is a set that keeps us from selecting any duplicate matches
Returns pair of booleans: ok, dir was updated
'''
# verbose('_walk_subtree(%s)' % src_dir)
arr = []
for entry in os.listdir(src_dir):
if entry in synctool.param.IGNORE_FILES:
verbose('ignoring %s' % prettypath(os.path.join(src_dir, entry)))
continue
# check any ignored files with wildcards
# before any group extension is examined
wildcard_match = False
for wildcard_entry in synctool.param.IGNORE_FILES_WITH_WILDCARDS:
if fnmatch.fnmatchcase(entry, wildcard_entry):
wildcard_match = True
verbose('ignoring %s (pattern match)' %
prettypath(os.path.join(src_dir, entry)))
break
if wildcard_match:
continue
obj, importance = _split_extension(entry, src_dir)
if not obj:
continue
arr.append((obj, importance))
# sort with .pre and .post scripts first
# this ensures that post_dict will have the required script when needed
arr.sort(_sort_by_importance_post_first)
pre_dict = {} # type: Dict[str, str]
post_dict = {} # type: Dict[str, str]
dir_changed = False
for obj, importance in arr:
obj.make(src_dir, dest_dir)
if obj.ov_type == OV_PRE:
# register the .pre script and continue
if obj.dest_path in pre_dict:
continue
pre_dict[obj.dest_path] = obj.src_path
continue
if obj.ov_type == OV_POST:
# register the .post script and continue
if obj.dest_path in post_dict:
continue
post_dict[obj.dest_path] = obj.src_path
continue
if obj.ov_type == OV_TEMPLATE_POST:
# register the template generator and continue
# put the dest for the template in the overlay (source) dir
obj.dest_path = os.path.join(os.path.dirname(obj.src_path),
os.path.basename(obj.dest_path))
if obj.dest_path in post_dict:
continue
post_dict[obj.dest_path] = obj.src_path
continue
if obj.src_stat.is_dir():
if synctool.param.IGNORE_DOTDIRS:
name = os.path.basename(obj.src_path)
if name[0] == '.':
verbose('ignoring dotdir %s' % obj.print_src())
continue
updated = False
if obj.dest_path not in duplicates:
# this is the most important source for this dir
duplicates.add(obj.dest_path)
# run callback on the directory itself
# this will create or fix directory entry if needed
# a .pre script may be run
# a .post script should not be run
ok, updated = callback(obj, pre_dict, {})
if not ok:
# quick exit
return False, dir_changed
# recurse down into the directory
# with empty pre_dict and post_dict parameters
ok, updated2 = _walk_subtree(obj.src_path, obj.dest_path,
duplicates, callback)
if not ok:
# quick exit
return False, dir_changed
# we still need to run the .post script on the dir (if any)
if updated or updated2:
obj.run_script(post_dict)
# finished checking directory
continue
if synctool.param.IGNORE_DOTFILES:
name = os.path.basename(obj.src_path)
if name[0] == '.':
verbose('ignoring dotfile %s' % obj.print_src())
continue
if synctool.param.REQUIRE_EXTENSION and obj.ov_type == OV_NO_EXT:
if synctool.param.TERSE:
terse(synctool.lib.TERSE_ERROR, ('no group on %s' %
obj.src_path))
else:
warning('no group extension on %s, skipped' % obj.print_src())
continue
if obj.dest_path in duplicates:
# there already was a more important source for this destination
continue
duplicates.add(obj.dest_path)
ok, updated = callback(obj, pre_dict, post_dict)
if not ok:
# quick exit
return False, dir_changed
if obj.ov_type == OV_IGNORE:
# OV_IGNORE may be set by templates that didn't finish
continue
if obj.ov_type == OV_TEMPLATE:
# a new file was generated
# call callback on the generated file
obj.ov_type = OV_REG
obj.make(src_dir, dest_dir)
ok, updated = callback(obj, pre_dict, post_dict)
if not ok:
# quick exit
return False, dir_changed
if updated:
dir_changed = True
return True, dir_changed
def visit(overlay, callback):
# type: (str, Callable[[SyncObject, Dict[str, str], Dict[str, str]], Tuple[bool, bool]]) -> None
'''visit all entries in the overlay tree
overlay is either synctool.param.OVERLAY_DIR or synctool.param.DELETE_DIR
callback will called with arguments: (SyncObject, pre_dict, post_dict)
callback must return a two booleans: ok, updated
'''
duplicates = set() # type: Set[str]
for d in _toplevel(overlay):
ok, _ = _walk_subtree(d, os.sep, duplicates, callback)
if not ok:
# quick exit
break
# EOB
| gpl-2.0 |
hachreak/invenio-demosite | invenio_demosite/testsuite/regression/test_bibauthority.py | 7 | 4791 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio Demosite.
# Copyright (C) 2006, 2007, 2008, 2010, 2011, 2013 CERN.
#
# Invenio Demosite is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio Demosite is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibAuthority Regression Test Suite."""
__revision__ = "$Id$"
from invenio.legacy.bibauthority.config import \
CFG_BIBAUTHORITY_RECORD_CONTROL_NUMBER_FIELD, \
CFG_BIBAUTHORITY_TYPE_NAMES, \
CFG_BIBAUTHORITY_PREFIX_SEP
from invenio.testsuite import make_test_suite, run_test_suite, \
InvenioTestCase
from invenio.base.wrappers import lazy_import
is_authority_record = lazy_import('invenio.legacy.bibauthority.engine:is_authority_record')
get_dependent_records_for_control_no = lazy_import('invenio.legacy.bibauthority.engine:get_dependent_records_for_control_no')
get_dependent_records_for_recID = lazy_import('invenio.legacy.bibauthority.engine:get_dependent_records_for_recID')
guess_authority_types = lazy_import('invenio.legacy.bibauthority.engine:guess_authority_types')
get_low_level_recIDs_from_control_no = lazy_import('invenio.legacy.bibauthority.engine:get_low_level_recIDs_from_control_no')
get_control_nos_from_recID = lazy_import('invenio.legacy.bibauthority.engine:get_control_nos_from_recID')
get_index_strings_by_control_no = lazy_import('invenio.legacy.bibauthority.engine:get_index_strings_by_control_no')
guess_main_name_from_authority_recID = lazy_import('invenio.legacy.bibauthority.engine:guess_main_name_from_authority_recID')
get_fieldvalues = lazy_import('invenio.legacy.bibrecord:get_fieldvalues')
class BibAuthorityEngineTest(InvenioTestCase):
"""Check BibEdit web pages whether they are up or not."""
def test_bibauthority_is_authority_record(self):
"""bibauthority - test is_authority_record()"""
self.assertFalse(is_authority_record(1))
self.assertTrue(is_authority_record(118))
def test_bibauthority_get_dependent_records_for_control_no(self):
"""bibauthority - test get_dependent_records_for_control_no()"""
control_no_field = CFG_BIBAUTHORITY_RECORD_CONTROL_NUMBER_FIELD
control_nos = get_fieldvalues(118, control_no_field)
count = 0
for control_no in control_nos:
count += len(get_dependent_records_for_control_no(control_no))
self.assertTrue(count)
def test_bibauthority_get_dependent_records_for_recID(self):
"""bibauthority - test get_dependent_records_for_recID()"""
self.assertTrue(len(get_dependent_records_for_recID(118)))
def test_bibauthority_guess_authority_types(self):
"""bibauthority - test guess_authority_types()"""
_type = CFG_BIBAUTHORITY_TYPE_NAMES['AUTHOR']
self.assertEqual(guess_authority_types(118), [_type])
def test_bibauthority_get_low_level_recIDs(self):
"""bibauthority - test get_low_level_recIDs_from_control_no()"""
_type = CFG_BIBAUTHORITY_TYPE_NAMES['INSTITUTION']
control_no = _type + CFG_BIBAUTHORITY_PREFIX_SEP + "(SzGeCERN)iii0002"
recIDs = [121]
self.assertEqual(get_low_level_recIDs_from_control_no(control_no),
recIDs)
def test_bibauthority_get_control_nos_from_recID(self):
"""bibauthority - test get_control_nos_from_recID()"""
self.assertTrue(len(get_control_nos_from_recID(118)))
def test_bibauthority_guess_main_name(self):
"""bibauthority - test guess_main_name_from_authority_recID()"""
recID = 118
main_name = 'Ellis, John'
self.assertEqual(guess_main_name_from_authority_recID(recID),
main_name)
def test_authority_record_string_by_control_no(self):
"""bibauthority - simple test of get_index_strings_by_control_no()"""
# vars
_type = CFG_BIBAUTHORITY_TYPE_NAMES['AUTHOR']
control_no = _type + CFG_BIBAUTHORITY_PREFIX_SEP + '(SzGeCERN)aaa0005'
string = 'Ellis, Jonathan Richard'
# run test
self.assertTrue(string in get_index_strings_by_control_no(control_no))
TEST_SUITE = make_test_suite(
BibAuthorityEngineTest,
)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
| gpl-2.0 |
kamma/ace_kernel | scripts/rt-tester/rt-tester.py | 904 | 5366 | #!/usr/bin/env python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"lockbkl" : "9",
"unlockbkl" : "10",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Seperate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
yavalvas/yav_com | build/matplotlib/doc/mpl_examples/api/scatter_piecharts.py | 6 | 1194 | """
This example makes custom 'pie charts' as the markers for a scatter plotqu
Thanks to Manuel Metz for the example
"""
import math
import numpy as np
import matplotlib.pyplot as plt
# first define the ratios
r1 = 0.2 # 20%
r2 = r1 + 0.4 # 40%
# define some sizes of the scatter marker
sizes = [60,80,120]
# calculate the points of the first pie marker
#
# these are just the origin (0,0) +
# some points on a circle cos,sin
x = [0] + np.cos(np.linspace(0, 2*math.pi*r1, 10)).tolist()
y = [0] + np.sin(np.linspace(0, 2*math.pi*r1, 10)).tolist()
xy1 = list(zip(x,y))
# ...
x = [0] + np.cos(np.linspace(2*math.pi*r1, 2*math.pi*r2, 10)).tolist()
y = [0] + np.sin(np.linspace(2*math.pi*r1, 2*math.pi*r2, 10)).tolist()
xy2 = list(zip(x,y))
x = [0] + np.cos(np.linspace(2*math.pi*r2, 2*math.pi, 10)).tolist()
y = [0] + np.sin(np.linspace(2*math.pi*r2, 2*math.pi, 10)).tolist()
xy3 = list(zip(x,y))
fig, ax = plt.subplots()
ax.scatter( np.arange(3), np.arange(3), marker=(xy1,0), s=sizes, facecolor='blue' )
ax.scatter( np.arange(3), np.arange(3), marker=(xy2,0), s=sizes, facecolor='green' )
ax.scatter( np.arange(3), np.arange(3), marker=(xy3,0), s=sizes, facecolor='red' )
plt.show()
| mit |
philipbl/home-assistant | tests/util/test_distance.py | 42 | 3525 | """Test homeasssitant distance utility functions."""
import unittest
import homeassistant.util.distance as distance_util
from homeassistant.const import (LENGTH_KILOMETERS, LENGTH_METERS, LENGTH_FEET,
LENGTH_MILES)
INVALID_SYMBOL = 'bob'
VALID_SYMBOL = LENGTH_KILOMETERS
class TestDistanceUtil(unittest.TestCase):
"""Test the distance utility functions."""
def test_convert_same_unit(self):
"""Test conversion from any unit to same unit."""
self.assertEqual(5,
distance_util.convert(5, LENGTH_KILOMETERS,
LENGTH_KILOMETERS))
self.assertEqual(2,
distance_util.convert(2, LENGTH_METERS,
LENGTH_METERS))
self.assertEqual(10,
distance_util.convert(10, LENGTH_MILES, LENGTH_MILES))
self.assertEqual(9,
distance_util.convert(9, LENGTH_FEET, LENGTH_FEET))
def test_convert_invalid_unit(self):
"""Test exception is thrown for invalid units."""
with self.assertRaises(ValueError):
distance_util.convert(5, INVALID_SYMBOL,
VALID_SYMBOL)
with self.assertRaises(ValueError):
distance_util.convert(5, VALID_SYMBOL,
INVALID_SYMBOL)
def test_convert_nonnumeric_value(self):
"""Test exception is thrown for nonnumeric type."""
with self.assertRaises(TypeError):
distance_util.convert('a', LENGTH_KILOMETERS, LENGTH_METERS)
def test_convert_from_miles(self):
"""Test conversion from miles to other units."""
miles = 5
self.assertEqual(
distance_util.convert(miles, LENGTH_MILES, LENGTH_KILOMETERS),
8.04672)
self.assertEqual(
distance_util.convert(miles, LENGTH_MILES, LENGTH_METERS),
8046.72)
self.assertEqual(
distance_util.convert(miles, LENGTH_MILES, LENGTH_FEET),
26400.0008448)
def test_convert_from_feet(self):
"""Test conversion from feet to other units."""
feet = 5000
self.assertEqual(
distance_util.convert(feet, LENGTH_FEET, LENGTH_KILOMETERS),
1.524)
self.assertEqual(
distance_util.convert(feet, LENGTH_FEET, LENGTH_METERS),
1524)
self.assertEqual(
distance_util.convert(feet, LENGTH_FEET, LENGTH_MILES),
0.9469694040000001)
def test_convert_from_kilometers(self):
"""Test conversion from kilometers to other units."""
km = 5
self.assertEqual(
distance_util.convert(km, LENGTH_KILOMETERS, LENGTH_FEET),
16404.2)
self.assertEqual(
distance_util.convert(km, LENGTH_KILOMETERS, LENGTH_METERS),
5000)
self.assertEqual(
distance_util.convert(km, LENGTH_KILOMETERS, LENGTH_MILES),
3.106855)
def test_convert_from_meters(self):
"""Test conversion from meters to other units."""
m = 5000
self.assertEqual(distance_util.convert(m, LENGTH_METERS, LENGTH_FEET),
16404.2)
self.assertEqual(
distance_util.convert(m, LENGTH_METERS, LENGTH_KILOMETERS),
5)
self.assertEqual(distance_util.convert(m, LENGTH_METERS, LENGTH_MILES),
3.106855)
| mit |
msingh172/youtube-dl | youtube_dl/extractor/soundcloud.py | 23 | 17239 | # encoding: utf-8
from __future__ import unicode_literals
import re
import itertools
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urlparse,
compat_urllib_parse,
)
from ..utils import (
ExtractorError,
int_or_none,
unified_strdate,
)
class SoundcloudIE(InfoExtractor):
"""Information extractor for soundcloud.com
To access the media, the uid of the song and a stream token
must be extracted from the page source and the script must make
a request to media.soundcloud.com/crossdomain.xml. Then
the media can be grabbed by requesting from an url composed
of the stream token and uid
"""
_VALID_URL = r'''(?x)^(?:https?://)?
(?:(?:(?:www\.|m\.)?soundcloud\.com/
(?P<uploader>[\w\d-]+)/
(?!(?:tracks|sets(?:/[^/?#]+)?|reposts|likes|spotlight)/?(?:$|[?#]))
(?P<title>[\w\d-]+)/?
(?P<token>[^?]+?)?(?:[?].*)?$)
|(?:api\.soundcloud\.com/tracks/(?P<track_id>\d+)
(?:/?\?secret_token=(?P<secret_token>[^&]+))?)
|(?P<player>(?:w|player|p.)\.soundcloud\.com/player/?.*?url=.*)
)
'''
IE_NAME = 'soundcloud'
_TESTS = [
{
'url': 'http://soundcloud.com/ethmusic/lostin-powers-she-so-heavy',
'md5': 'ebef0a451b909710ed1d7787dddbf0d7',
'info_dict': {
'id': '62986583',
'ext': 'mp3',
'upload_date': '20121011',
'description': 'No Downloads untill we record the finished version this weekend, i was too pumped n i had to post it , earl is prolly gonna b hella p.o\'d',
'uploader': 'E.T. ExTerrestrial Music',
'title': 'Lostin Powers - She so Heavy (SneakPreview) Adrian Ackers Blueprint 1',
'duration': 143,
}
},
# not streamable song
{
'url': 'https://soundcloud.com/the-concept-band/goldrushed-mastered?in=the-concept-band/sets/the-royal-concept-ep',
'info_dict': {
'id': '47127627',
'ext': 'mp3',
'title': 'Goldrushed',
'description': 'From Stockholm Sweden\r\nPovel / Magnus / Filip / David\r\nwww.theroyalconcept.com',
'uploader': 'The Royal Concept',
'upload_date': '20120521',
'duration': 227,
},
'params': {
# rtmp
'skip_download': True,
},
},
# private link
{
'url': 'https://soundcloud.com/jaimemf/youtube-dl-test-video-a-y-baw/s-8Pjrp',
'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604',
'info_dict': {
'id': '123998367',
'ext': 'mp3',
'title': 'Youtube - Dl Test Video \'\' Ä↭',
'uploader': 'jaimeMF',
'description': 'test chars: \"\'/\\ä↭',
'upload_date': '20131209',
'duration': 9,
},
},
# private link (alt format)
{
'url': 'https://api.soundcloud.com/tracks/123998367?secret_token=s-8Pjrp',
'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604',
'info_dict': {
'id': '123998367',
'ext': 'mp3',
'title': 'Youtube - Dl Test Video \'\' Ä↭',
'uploader': 'jaimeMF',
'description': 'test chars: \"\'/\\ä↭',
'upload_date': '20131209',
'duration': 9,
},
},
# downloadable song
{
'url': 'https://soundcloud.com/oddsamples/bus-brakes',
'md5': '7624f2351f8a3b2e7cd51522496e7631',
'info_dict': {
'id': '128590877',
'ext': 'mp3',
'title': 'Bus Brakes',
'description': 'md5:0053ca6396e8d2fd7b7e1595ef12ab66',
'uploader': 'oddsamples',
'upload_date': '20140109',
'duration': 17,
},
},
]
_CLIENT_ID = 'b45b1aa10f1ac2941910a7f0d10f8e28'
_IPHONE_CLIENT_ID = '376f225bf427445fc4bfb6b99b72e0bf'
def report_resolve(self, video_id):
"""Report information extraction."""
self.to_screen('%s: Resolving id' % video_id)
@classmethod
def _resolv_url(cls, url):
return 'http://api.soundcloud.com/resolve.json?url=' + url + '&client_id=' + cls._CLIENT_ID
def _extract_info_dict(self, info, full_title=None, quiet=False, secret_token=None):
track_id = compat_str(info['id'])
name = full_title or track_id
if quiet:
self.report_extraction(name)
thumbnail = info['artwork_url']
if thumbnail is not None:
thumbnail = thumbnail.replace('-large', '-t500x500')
ext = 'mp3'
result = {
'id': track_id,
'uploader': info['user']['username'],
'upload_date': unified_strdate(info['created_at']),
'title': info['title'],
'description': info['description'],
'thumbnail': thumbnail,
'duration': int_or_none(info.get('duration'), 1000),
'webpage_url': info.get('permalink_url'),
}
formats = []
if info.get('downloadable', False):
# We can build a direct link to the song
format_url = (
'https://api.soundcloud.com/tracks/{0}/download?client_id={1}'.format(
track_id, self._CLIENT_ID))
formats.append({
'format_id': 'download',
'ext': info.get('original_format', 'mp3'),
'url': format_url,
'vcodec': 'none',
'preference': 10,
})
# We have to retrieve the url
streams_url = ('http://api.soundcloud.com/i1/tracks/{0}/streams?'
'client_id={1}&secret_token={2}'.format(track_id, self._IPHONE_CLIENT_ID, secret_token))
format_dict = self._download_json(
streams_url,
track_id, 'Downloading track url')
for key, stream_url in format_dict.items():
if key.startswith('http'):
formats.append({
'format_id': key,
'ext': ext,
'url': stream_url,
'vcodec': 'none',
})
elif key.startswith('rtmp'):
# The url doesn't have an rtmp app, we have to extract the playpath
url, path = stream_url.split('mp3:', 1)
formats.append({
'format_id': key,
'url': url,
'play_path': 'mp3:' + path,
'ext': 'flv',
'vcodec': 'none',
})
if not formats:
# We fallback to the stream_url in the original info, this
# cannot be always used, sometimes it can give an HTTP 404 error
formats.append({
'format_id': 'fallback',
'url': info['stream_url'] + '?client_id=' + self._CLIENT_ID,
'ext': ext,
'vcodec': 'none',
})
for f in formats:
if f['format_id'].startswith('http'):
f['protocol'] = 'http'
if f['format_id'].startswith('rtmp'):
f['protocol'] = 'rtmp'
self._check_formats(formats, track_id)
self._sort_formats(formats)
result['formats'] = formats
return result
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url, flags=re.VERBOSE)
if mobj is None:
raise ExtractorError('Invalid URL: %s' % url)
track_id = mobj.group('track_id')
token = None
if track_id is not None:
info_json_url = 'http://api.soundcloud.com/tracks/' + track_id + '.json?client_id=' + self._CLIENT_ID
full_title = track_id
token = mobj.group('secret_token')
if token:
info_json_url += "&secret_token=" + token
elif mobj.group('player'):
query = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query)
real_url = query['url'][0]
# If the token is in the query of the original url we have to
# manually add it
if 'secret_token' in query:
real_url += '?secret_token=' + query['secret_token'][0]
return self.url_result(real_url)
else:
# extract uploader (which is in the url)
uploader = mobj.group('uploader')
# extract simple title (uploader + slug of song title)
slug_title = mobj.group('title')
token = mobj.group('token')
full_title = resolve_title = '%s/%s' % (uploader, slug_title)
if token:
resolve_title += '/%s' % token
self.report_resolve(full_title)
url = 'http://soundcloud.com/%s' % resolve_title
info_json_url = self._resolv_url(url)
info = self._download_json(info_json_url, full_title, 'Downloading info JSON')
return self._extract_info_dict(info, full_title, secret_token=token)
class SoundcloudSetIE(SoundcloudIE):
_VALID_URL = r'https?://(?:(?:www|m)\.)?soundcloud\.com/(?P<uploader>[\w\d-]+)/sets/(?P<slug_title>[\w\d-]+)(?:/(?P<token>[^?/]+))?'
IE_NAME = 'soundcloud:set'
_TESTS = [{
'url': 'https://soundcloud.com/the-concept-band/sets/the-royal-concept-ep',
'info_dict': {
'id': '2284613',
'title': 'The Royal Concept EP',
},
'playlist_mincount': 6,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
# extract uploader (which is in the url)
uploader = mobj.group('uploader')
# extract simple title (uploader + slug of song title)
slug_title = mobj.group('slug_title')
full_title = '%s/sets/%s' % (uploader, slug_title)
url = 'http://soundcloud.com/%s/sets/%s' % (uploader, slug_title)
token = mobj.group('token')
if token:
full_title += '/' + token
url += '/' + token
self.report_resolve(full_title)
resolv_url = self._resolv_url(url)
info = self._download_json(resolv_url, full_title)
if 'errors' in info:
msgs = (compat_str(err['error_message']) for err in info['errors'])
raise ExtractorError('unable to download video webpage: %s' % ','.join(msgs))
entries = [self.url_result(track['permalink_url'], 'Soundcloud') for track in info['tracks']]
return {
'_type': 'playlist',
'entries': entries,
'id': '%s' % info['id'],
'title': info['title'],
}
class SoundcloudUserIE(SoundcloudIE):
_VALID_URL = r'''(?x)
https?://
(?:(?:www|m)\.)?soundcloud\.com/
(?P<user>[^/]+)
(?:/
(?P<rsrc>tracks|sets|reposts|likes|spotlight)
)?
/?(?:[?#].*)?$
'''
IE_NAME = 'soundcloud:user'
_TESTS = [{
'url': 'https://soundcloud.com/the-akashic-chronicler',
'info_dict': {
'id': '114582580',
'title': 'The Akashic Chronicler (All)',
},
'playlist_mincount': 112,
}, {
'url': 'https://soundcloud.com/the-akashic-chronicler/tracks',
'info_dict': {
'id': '114582580',
'title': 'The Akashic Chronicler (Tracks)',
},
'playlist_mincount': 50,
}, {
'url': 'https://soundcloud.com/the-akashic-chronicler/sets',
'info_dict': {
'id': '114582580',
'title': 'The Akashic Chronicler (Playlists)',
},
'playlist_mincount': 3,
}, {
'url': 'https://soundcloud.com/the-akashic-chronicler/reposts',
'info_dict': {
'id': '114582580',
'title': 'The Akashic Chronicler (Reposts)',
},
'playlist_mincount': 9,
}, {
'url': 'https://soundcloud.com/the-akashic-chronicler/likes',
'info_dict': {
'id': '114582580',
'title': 'The Akashic Chronicler (Likes)',
},
'playlist_mincount': 333,
}, {
'url': 'https://soundcloud.com/grynpyret/spotlight',
'info_dict': {
'id': '7098329',
'title': 'Grynpyret (Spotlight)',
},
'playlist_mincount': 1,
}]
_API_BASE = 'https://api.soundcloud.com'
_API_V2_BASE = 'https://api-v2.soundcloud.com'
_BASE_URL_MAP = {
'all': '%s/profile/soundcloud:users:%%s' % _API_V2_BASE,
'tracks': '%s/users/%%s/tracks' % _API_BASE,
'sets': '%s/users/%%s/playlists' % _API_V2_BASE,
'reposts': '%s/profile/soundcloud:users:%%s/reposts' % _API_V2_BASE,
'likes': '%s/users/%%s/likes' % _API_V2_BASE,
'spotlight': '%s/users/%%s/spotlight' % _API_V2_BASE,
}
_TITLE_MAP = {
'all': 'All',
'tracks': 'Tracks',
'sets': 'Playlists',
'reposts': 'Reposts',
'likes': 'Likes',
'spotlight': 'Spotlight',
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader = mobj.group('user')
url = 'http://soundcloud.com/%s/' % uploader
resolv_url = self._resolv_url(url)
user = self._download_json(
resolv_url, uploader, 'Downloading user info')
resource = mobj.group('rsrc') or 'all'
base_url = self._BASE_URL_MAP[resource] % user['id']
next_href = None
entries = []
for i in itertools.count():
if not next_href:
data = compat_urllib_parse.urlencode({
'offset': i * 50,
'limit': 50,
'client_id': self._CLIENT_ID,
'linked_partitioning': '1',
'representation': 'speedy',
})
next_href = base_url + '?' + data
response = self._download_json(
next_href, uploader, 'Downloading track page %s' % (i + 1))
collection = response['collection']
if not collection:
self.to_screen('%s: End page received' % uploader)
break
def resolve_permalink_url(candidates):
for cand in candidates:
if isinstance(cand, dict):
permalink_url = cand.get('permalink_url')
if permalink_url and permalink_url.startswith('http'):
return permalink_url
for e in collection:
permalink_url = resolve_permalink_url((e, e.get('track'), e.get('playlist')))
if permalink_url:
entries.append(self.url_result(permalink_url))
if 'next_href' in response:
next_href = response['next_href']
if not next_href:
break
else:
next_href = None
return {
'_type': 'playlist',
'id': compat_str(user['id']),
'title': '%s (%s)' % (user['username'], self._TITLE_MAP[resource]),
'entries': entries,
}
class SoundcloudPlaylistIE(SoundcloudIE):
_VALID_URL = r'https?://api\.soundcloud\.com/playlists/(?P<id>[0-9]+)(?:/?\?secret_token=(?P<token>[^&]+?))?$'
IE_NAME = 'soundcloud:playlist'
_TESTS = [{
'url': 'http://api.soundcloud.com/playlists/4110309',
'info_dict': {
'id': '4110309',
'title': 'TILT Brass - Bowery Poetry Club, August \'03 [Non-Site SCR 02]',
'description': 're:.*?TILT Brass - Bowery Poetry Club',
},
'playlist_count': 6,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
playlist_id = mobj.group('id')
base_url = '%s//api.soundcloud.com/playlists/%s.json?' % (self.http_scheme(), playlist_id)
data_dict = {
'client_id': self._CLIENT_ID,
}
token = mobj.group('token')
if token:
data_dict['secret_token'] = token
data = compat_urllib_parse.urlencode(data_dict)
data = self._download_json(
base_url + data, playlist_id, 'Downloading playlist')
entries = [self.url_result(track['permalink_url'], 'Soundcloud') for track in data['tracks']]
return {
'_type': 'playlist',
'id': playlist_id,
'title': data.get('title'),
'description': data.get('description'),
'entries': entries,
}
| unlicense |
gdub/mysql-connector-python | examples/transaction.py | 15 | 4010 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2014, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
Example using MySQL Connector/Python showing:
* dropping and creating a table
* using warnings
* doing a transaction, rolling it back and committing one.
"""
import mysql.connector
def main(config):
output = []
db = mysql.connector.Connect(**config)
cursor = db.cursor()
# Drop table if exists, and create it new
stmt_drop = "DROP TABLE IF EXISTS names"
cursor.execute(stmt_drop)
stmt_create = """
CREATE TABLE names (
id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,
name VARCHAR(30) DEFAULT '' NOT NULL,
cnt TINYINT UNSIGNED DEFAULT 0,
PRIMARY KEY (id)
) ENGINE=InnoDB"""
cursor.execute(stmt_create)
warnings = cursor.fetchwarnings()
if warnings:
ids = [ i for l,i,m in warnings]
output.append("Oh oh.. we got warnings..")
if 1266 in ids:
output.append("""
Table was created as MYISAM, no transaction support.
Bailing out, no use to continue. Make sure InnoDB is available!
""")
db.close()
return
# Insert 3 records
output.append("Inserting data")
names = ( ('Geert',), ('Jan',), ('Michel',) )
stmt_insert = "INSERT INTO names (name) VALUES (%s)"
cursor.executemany(stmt_insert, names)
# Roll back!!!!
output.append("Rolling back transaction")
db.rollback()
# There should be no data!
stmt_select = "SELECT id, name FROM names ORDER BY id"
cursor.execute(stmt_select)
rows = None
try:
rows = cursor.fetchall()
except mysql.connector.InterfaceError as e:
raise
if rows == []:
output.append("No data, all is fine.")
else:
output.append("Something is wrong, we have data although we rolled back!")
output.append(rows)
cursor.close()
db.close()
return output
# Do the insert again.
cursor.executemany(stmt_insert, names)
# Data should be already there
cursor.execute(stmt_select)
output.append("Data before commit:")
for row in cursor.fetchall():
output.append("%d | %s" % (row[0], row[1]))
# Do a commit
db.commit()
cursor.execute(stmt_select)
output.append("Data after commit:")
for row in cursor.fetchall():
output.append("%d | %s" % (row[0], row[1]))
# Cleaning up, dropping the table again
cursor.execute(stmt_drop)
cursor.close()
db.close()
return output
if __name__ == '__main__':
config = {
'host': 'localhost',
'port': 3306,
'database': 'test',
'user': 'root',
'password': '',
'charset': 'utf8',
'use_unicode': True,
'get_warnings': True,
}
out = main(config)
print('\n'.join(out))
| gpl-2.0 |
palladius/gcloud | packages/gsutil/boto/tests/unit/ec2/test_address.py | 10 | 1671 | import mock
import unittest
from boto.ec2.address import Address
class AddressTest(unittest.TestCase):
def setUp(self):
self.address = Address()
self.address.connection = mock.Mock()
self.address.public_ip = "192.168.1.1"
def check_that_attribute_has_been_set(self, name, value, attribute):
self.address.endElement(name, value, None)
self.assertEqual(getattr(self.address, attribute), value)
def test_endElement_sets_correct_attributes_with_values(self):
for arguments in [("publicIp", "192.168.1.1", "public_ip"),
("instanceId", 1, "instance_id"),
("domain", "some domain", "domain"),
("allocationId", 1, "allocation_id"),
("associationId", 1, "association_id"),
("somethingRandom", "somethingRandom", "somethingRandom")]:
self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2])
def test_release_calls_connection_release_address_with_correct_args(self):
self.address.release()
self.address.connection.release_address.assert_called_with("192.168.1.1")
def test_associate_calls_connection_associate_address_with_correct_args(self):
self.address.associate(1)
self.address.connection.associate_address.assert_called_with(1, "192.168.1.1")
def test_disassociate_calls_connection_disassociate_address_with_correct_args(self):
self.address.disassociate()
self.address.connection.disassociate_address.assert_called_with("192.168.1.1")
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
Southpaw-TACTIC/TACTIC | 3rd_party/python2/site-packages/portend.py | 2 | 5087 | # -*- coding: utf-8 -*-
"""
A simple library for managing the availability of ports.
"""
from __future__ import print_function, division
import time
import socket
import argparse
import sys
import itertools
import contextlib
import collections
import platform
from tempora import timing
def client_host(server_host):
"""Return the host on which a client can connect to the given listener."""
if server_host == '0.0.0.0':
# 0.0.0.0 is INADDR_ANY, which should answer on localhost.
return '127.0.0.1'
if server_host in ('::', '::0', '::0.0.0.0'):
# :: is IN6ADDR_ANY, which should answer on localhost.
# ::0 and ::0.0.0.0 are non-canonical but common
# ways to write IN6ADDR_ANY.
return '::1'
return server_host
class Checker(object):
def __init__(self, timeout=1.0):
self.timeout = timeout
def assert_free(self, host, port=None):
"""
Assert that the given addr is free
in that all attempts to connect fail within the timeout
or raise a PortNotFree exception.
>>> free_port = find_available_local_port()
>>> Checker().assert_free('localhost', free_port)
>>> Checker().assert_free('127.0.0.1', free_port)
>>> Checker().assert_free('::1', free_port)
Also accepts an addr tuple
>>> addr = '::1', free_port, 0, 0
>>> Checker().assert_free(addr)
Host might refer to a server bind address like '::', which
should use localhost to perform the check.
>>> Checker().assert_free('::', free_port)
"""
if port is None and isinstance(host, collections.Sequence):
host, port = host[:2]
if platform.system() == 'Windows':
host = client_host(host)
info = socket.getaddrinfo(
host, port, socket.AF_UNSPEC, socket.SOCK_STREAM,
)
list(itertools.starmap(self._connect, info))
def _connect(self, af, socktype, proto, canonname, sa):
s = socket.socket(af, socktype, proto)
# fail fast with a small timeout
s.settimeout(self.timeout)
with contextlib.closing(s):
try:
s.connect(sa)
except socket.error:
return
# the connect succeeded, so the port isn't free
port, host = sa[:2]
tmpl = "Port {port} is in use on {host}."
raise PortNotFree(tmpl.format(**locals()))
class Timeout(IOError):
pass
class PortNotFree(IOError):
pass
def free(host, port, timeout=float('Inf')):
"""
Wait for the specified port to become free (dropping or rejecting
requests). Return when the port is free or raise a Timeout if timeout has
elapsed.
Timeout may be specified in seconds or as a timedelta.
If timeout is None or ∞, the routine will run indefinitely.
>>> free('localhost', find_available_local_port())
"""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
timer = timing.Timer(timeout)
while not timer.expired():
try:
# Expect a free port, so use a small timeout
Checker(timeout=0.1).assert_free(host, port)
return
except PortNotFree:
# Politely wait.
time.sleep(0.1)
raise Timeout("Port {port} not free on {host}.".format(**locals()))
wait_for_free_port = free
def occupied(host, port, timeout=float('Inf')):
"""
Wait for the specified port to become occupied (accepting requests).
Return when the port is occupied or raise a Timeout if timeout has
elapsed.
Timeout may be specified in seconds or as a timedelta.
If timeout is None or ∞, the routine will run indefinitely.
>>> occupied('localhost', find_available_local_port(), .1) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
Timeout: Port ... not bound on localhost.
"""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
timer = timing.Timer(timeout)
while not timer.expired():
try:
Checker(timeout=.5).assert_free(host, port)
# Politely wait
time.sleep(0.1)
except PortNotFree:
# port is occupied
return
raise Timeout("Port {port} not bound on {host}.".format(**locals()))
wait_for_occupied_port = occupied
def find_available_local_port():
"""
Find a free port on localhost.
>>> 0 < find_available_local_port() < 65536
True
"""
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
addr = '', 0
sock.bind(addr)
addr, port = sock.getsockname()[:2]
sock.close()
return port
class HostPort(str):
"""
A simple representation of a host/port pair as a string
>>> hp = HostPort('localhost:32768')
>>> hp.host
'localhost'
>>> hp.port
32768
>>> len(hp)
15
"""
@property
def host(self):
host, sep, port = self.partition(':')
return host
@property
def port(self):
host, sep, port = self.partition(':')
return int(port)
def _main():
parser = argparse.ArgumentParser()
global_lookup = lambda key: globals()[key]
parser.add_argument('target', metavar='host:port', type=HostPort)
parser.add_argument('func', metavar='state', type=global_lookup)
parser.add_argument('-t', '--timeout', default=None, type=float)
args = parser.parse_args()
try:
args.func(args.target.host, args.target.port, timeout=args.timeout)
except Timeout as timeout:
print(timeout, file=sys.stderr)
raise SystemExit(1)
if __name__ == '__main__':
_main()
| epl-1.0 |
alxyang/mysql-5.6 | xtrabackup/test/python/testtools/tests/test_spinner.py | 62 | 13061 | # Copyright (c) 2010 testtools developers. See LICENSE for details.
"""Tests for the evil Twisted reactor-spinning we do."""
import os
import signal
from testtools import (
skipIf,
TestCase,
)
from testtools.helpers import try_import
from testtools.matchers import (
Equals,
Is,
MatchesException,
Raises,
)
_spinner = try_import('testtools._spinner')
defer = try_import('twisted.internet.defer')
Failure = try_import('twisted.python.failure.Failure')
class NeedsTwistedTestCase(TestCase):
def setUp(self):
super(NeedsTwistedTestCase, self).setUp()
if defer is None or Failure is None:
self.skipTest("Need Twisted to run")
class TestNotReentrant(NeedsTwistedTestCase):
def test_not_reentrant(self):
# A function decorated as not being re-entrant will raise a
# _spinner.ReentryError if it is called while it is running.
calls = []
@_spinner.not_reentrant
def log_something():
calls.append(None)
if len(calls) < 5:
log_something()
self.assertThat(
log_something, Raises(MatchesException(_spinner.ReentryError)))
self.assertEqual(1, len(calls))
def test_deeper_stack(self):
calls = []
@_spinner.not_reentrant
def g():
calls.append(None)
if len(calls) < 5:
f()
@_spinner.not_reentrant
def f():
calls.append(None)
if len(calls) < 5:
g()
self.assertThat(f, Raises(MatchesException(_spinner.ReentryError)))
self.assertEqual(2, len(calls))
class TestExtractResult(NeedsTwistedTestCase):
def test_not_fired(self):
# _spinner.extract_result raises _spinner.DeferredNotFired if it's
# given a Deferred that has not fired.
self.assertThat(lambda:_spinner.extract_result(defer.Deferred()),
Raises(MatchesException(_spinner.DeferredNotFired)))
def test_success(self):
# _spinner.extract_result returns the value of the Deferred if it has
# fired successfully.
marker = object()
d = defer.succeed(marker)
self.assertThat(_spinner.extract_result(d), Equals(marker))
def test_failure(self):
# _spinner.extract_result raises the failure's exception if it's given
# a Deferred that is failing.
try:
1/0
except ZeroDivisionError:
f = Failure()
d = defer.fail(f)
self.assertThat(lambda:_spinner.extract_result(d),
Raises(MatchesException(ZeroDivisionError)))
class TestTrapUnhandledErrors(NeedsTwistedTestCase):
def test_no_deferreds(self):
marker = object()
result, errors = _spinner.trap_unhandled_errors(lambda: marker)
self.assertEqual([], errors)
self.assertIs(marker, result)
def test_unhandled_error(self):
failures = []
def make_deferred_but_dont_handle():
try:
1/0
except ZeroDivisionError:
f = Failure()
failures.append(f)
defer.fail(f)
result, errors = _spinner.trap_unhandled_errors(
make_deferred_but_dont_handle)
self.assertIs(None, result)
self.assertEqual(failures, [error.failResult for error in errors])
class TestRunInReactor(NeedsTwistedTestCase):
def make_reactor(self):
from twisted.internet import reactor
return reactor
def make_spinner(self, reactor=None):
if reactor is None:
reactor = self.make_reactor()
return _spinner.Spinner(reactor)
def make_timeout(self):
return 0.01
def test_function_called(self):
# run_in_reactor actually calls the function given to it.
calls = []
marker = object()
self.make_spinner().run(self.make_timeout(), calls.append, marker)
self.assertThat(calls, Equals([marker]))
def test_return_value_returned(self):
# run_in_reactor returns the value returned by the function given to
# it.
marker = object()
result = self.make_spinner().run(self.make_timeout(), lambda: marker)
self.assertThat(result, Is(marker))
def test_exception_reraised(self):
# If the given function raises an error, run_in_reactor re-raises that
# error.
self.assertThat(
lambda:self.make_spinner().run(self.make_timeout(), lambda: 1/0),
Raises(MatchesException(ZeroDivisionError)))
def test_keyword_arguments(self):
# run_in_reactor passes keyword arguments on.
calls = []
function = lambda *a, **kw: calls.extend([a, kw])
self.make_spinner().run(self.make_timeout(), function, foo=42)
self.assertThat(calls, Equals([(), {'foo': 42}]))
def test_not_reentrant(self):
# run_in_reactor raises an error if it is called inside another call
# to run_in_reactor.
spinner = self.make_spinner()
self.assertThat(lambda: spinner.run(
self.make_timeout(), spinner.run, self.make_timeout(),
lambda: None), Raises(MatchesException(_spinner.ReentryError)))
def test_deferred_value_returned(self):
# If the given function returns a Deferred, run_in_reactor returns the
# value in the Deferred at the end of the callback chain.
marker = object()
result = self.make_spinner().run(
self.make_timeout(), lambda: defer.succeed(marker))
self.assertThat(result, Is(marker))
def test_preserve_signal_handler(self):
signals = ['SIGINT', 'SIGTERM', 'SIGCHLD']
signals = filter(
None, (getattr(signal, name, None) for name in signals))
for sig in signals:
self.addCleanup(signal.signal, sig, signal.getsignal(sig))
new_hdlrs = list(lambda *a: None for _ in signals)
for sig, hdlr in zip(signals, new_hdlrs):
signal.signal(sig, hdlr)
spinner = self.make_spinner()
spinner.run(self.make_timeout(), lambda: None)
self.assertEqual(new_hdlrs, map(signal.getsignal, signals))
def test_timeout(self):
# If the function takes too long to run, we raise a
# _spinner.TimeoutError.
timeout = self.make_timeout()
self.assertThat(
lambda:self.make_spinner().run(timeout, lambda: defer.Deferred()),
Raises(MatchesException(_spinner.TimeoutError)))
def test_no_junk_by_default(self):
# If the reactor hasn't spun yet, then there cannot be any junk.
spinner = self.make_spinner()
self.assertThat(spinner.get_junk(), Equals([]))
def test_clean_do_nothing(self):
# If there's nothing going on in the reactor, then clean does nothing
# and returns an empty list.
spinner = self.make_spinner()
result = spinner._clean()
self.assertThat(result, Equals([]))
def test_clean_delayed_call(self):
# If there's a delayed call in the reactor, then clean cancels it and
# returns an empty list.
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
call = reactor.callLater(10, lambda: None)
results = spinner._clean()
self.assertThat(results, Equals([call]))
self.assertThat(call.active(), Equals(False))
def test_clean_delayed_call_cancelled(self):
# If there's a delayed call that's just been cancelled, then it's no
# longer there.
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
call = reactor.callLater(10, lambda: None)
call.cancel()
results = spinner._clean()
self.assertThat(results, Equals([]))
def test_clean_selectables(self):
# If there's still a selectable (e.g. a listening socket), then
# clean() removes it from the reactor's registry.
#
# Note that the socket is left open. This emulates a bug in trial.
from twisted.internet.protocol import ServerFactory
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
port = reactor.listenTCP(0, ServerFactory())
spinner.run(self.make_timeout(), lambda: None)
results = spinner.get_junk()
self.assertThat(results, Equals([port]))
def test_clean_running_threads(self):
import threading
import time
current_threads = list(threading.enumerate())
reactor = self.make_reactor()
timeout = self.make_timeout()
spinner = self.make_spinner(reactor)
spinner.run(timeout, reactor.callInThread, time.sleep, timeout / 2.0)
# Python before 2.5 has a race condition with thread handling where
# join() does not remove threads from enumerate before returning - the
# thread being joined does the removal. This was fixed in Python 2.5
# but we still support 2.4, so we have to workaround the issue.
# http://bugs.python.org/issue1703448.
self.assertThat(
[thread for thread in threading.enumerate() if thread.isAlive()],
Equals(current_threads))
def test_leftover_junk_available(self):
# If 'run' is given a function that leaves the reactor dirty in some
# way, 'run' will clean up the reactor and then store information
# about the junk. This information can be got using get_junk.
from twisted.internet.protocol import ServerFactory
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
port = spinner.run(
self.make_timeout(), reactor.listenTCP, 0, ServerFactory())
self.assertThat(spinner.get_junk(), Equals([port]))
def test_will_not_run_with_previous_junk(self):
# If 'run' is called and there's still junk in the spinner's junk
# list, then the spinner will refuse to run.
from twisted.internet.protocol import ServerFactory
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
timeout = self.make_timeout()
spinner.run(timeout, reactor.listenTCP, 0, ServerFactory())
self.assertThat(lambda: spinner.run(timeout, lambda: None),
Raises(MatchesException(_spinner.StaleJunkError)))
def test_clear_junk_clears_previous_junk(self):
# If 'run' is called and there's still junk in the spinner's junk
# list, then the spinner will refuse to run.
from twisted.internet.protocol import ServerFactory
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
timeout = self.make_timeout()
port = spinner.run(timeout, reactor.listenTCP, 0, ServerFactory())
junk = spinner.clear_junk()
self.assertThat(junk, Equals([port]))
self.assertThat(spinner.get_junk(), Equals([]))
@skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
def test_sigint_raises_no_result_error(self):
# If we get a SIGINT during a run, we raise _spinner.NoResultError.
SIGINT = getattr(signal, 'SIGINT', None)
if not SIGINT:
self.skipTest("SIGINT not available")
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
timeout = self.make_timeout()
reactor.callLater(timeout, os.kill, os.getpid(), SIGINT)
self.assertThat(lambda:spinner.run(timeout * 5, defer.Deferred),
Raises(MatchesException(_spinner.NoResultError)))
self.assertEqual([], spinner._clean())
@skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
def test_sigint_raises_no_result_error_second_time(self):
# If we get a SIGINT during a run, we raise _spinner.NoResultError.
# This test is exactly the same as test_sigint_raises_no_result_error,
# and exists to make sure we haven't futzed with state.
self.test_sigint_raises_no_result_error()
@skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
def test_fast_sigint_raises_no_result_error(self):
# If we get a SIGINT during a run, we raise _spinner.NoResultError.
SIGINT = getattr(signal, 'SIGINT', None)
if not SIGINT:
self.skipTest("SIGINT not available")
reactor = self.make_reactor()
spinner = self.make_spinner(reactor)
timeout = self.make_timeout()
reactor.callWhenRunning(os.kill, os.getpid(), SIGINT)
self.assertThat(lambda:spinner.run(timeout * 5, defer.Deferred),
Raises(MatchesException(_spinner.NoResultError)))
self.assertEqual([], spinner._clean())
@skipIf(os.name != "posix", "Sending SIGINT with os.kill is posix only")
def test_fast_sigint_raises_no_result_error_second_time(self):
self.test_fast_sigint_raises_no_result_error()
def test_suite():
from unittest import TestLoader
return TestLoader().loadTestsFromName(__name__)
| gpl-2.0 |
WillisXChen/django-oscar | src/oscar/apps/voucher/reports.py | 24 | 1551 | from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_class, get_model
ReportGenerator = get_class('dashboard.reports.reports', 'ReportGenerator')
ReportCSVFormatter = get_class(
'dashboard.reports.reports', 'ReportCSVFormatter')
ReportHTMLFormatter = get_class(
'dashboard.reports.reports', 'ReportHTMLFormatter')
Voucher = get_model('voucher', 'Voucher')
class VoucherReportCSVFormatter(ReportCSVFormatter):
filename_template = 'voucher-performance.csv'
def generate_csv(self, response, vouchers):
writer = self.get_csv_writer(response)
header_row = [_('Voucher code'),
_('Added to a basket'),
_('Used in an order'),
_('Total discount')]
writer.writerow(header_row)
for voucher in vouchers:
row = [voucher.code,
voucher.num_basket_additions,
voucher.num_orders,
voucher.total_discount]
writer.writerow(row)
class VoucherReportHTMLFormatter(ReportHTMLFormatter):
filename_template = 'dashboard/reports/partials/voucher_report.html'
class VoucherReportGenerator(ReportGenerator):
code = 'vouchers'
description = _('Voucher performance')
formatters = {
'CSV_formatter': VoucherReportCSVFormatter,
'HTML_formatter': VoucherReportHTMLFormatter}
def generate(self):
vouchers = Voucher._default_manager.all()
return self.formatter.generate_response(vouchers)
| bsd-3-clause |
navodissa/python-flask | flask/lib/python2.7/site-packages/wtforms/ext/appengine/ndb.py | 174 | 17124 | """
Form generation utilities for App Engine's new ``ndb.Model`` class.
The goal of ``model_form()`` is to provide a clean, explicit and predictable
way to create forms based on ``ndb.Model`` classes. No malabarism or black
magic should be necessary to generate a form for models, and to add custom
non-model related fields: ``model_form()`` simply generates a form class
that can be used as it is, or that can be extended directly or even be used
to create other forms using ``model_form()``.
Example usage:
.. code-block:: python
from google.appengine.ext import ndb
from wtforms.ext.appengine.ndb import model_form
# Define an example model and add a record.
class Contact(ndb.Model):
name = ndb.StringProperty(required=True)
city = ndb.StringProperty()
age = ndb.IntegerProperty(required=True)
is_admin = ndb.BooleanProperty(default=False)
new_entity = Contact(key_name='test', name='Test Name', age=17)
new_entity.put()
# Generate a form based on the model.
ContactForm = model_form(Contact)
# Get a form populated with entity data.
entity = Contact.get_by_key_name('test')
form = ContactForm(obj=entity)
Properties from the model can be excluded from the generated form, or it can
include just a set of properties. For example:
.. code-block:: python
# Generate a form based on the model, excluding 'city' and 'is_admin'.
ContactForm = model_form(Contact, exclude=('city', 'is_admin'))
# or...
# Generate a form based on the model, only including 'name' and 'age'.
ContactForm = model_form(Contact, only=('name', 'age'))
The form can be generated setting field arguments:
.. code-block:: python
ContactForm = model_form(Contact, only=('name', 'age'), field_args={
'name': {
'label': 'Full name',
'description': 'Your name',
},
'age': {
'label': 'Age',
'validators': [validators.NumberRange(min=14, max=99)],
}
})
The class returned by ``model_form()`` can be used as a base class for forms
mixing non-model fields and/or other model forms. For example:
.. code-block:: python
# Generate a form based on the model.
BaseContactForm = model_form(Contact)
# Generate a form based on other model.
ExtraContactForm = model_form(MyOtherModel)
class ContactForm(BaseContactForm):
# Add an extra, non-model related field.
subscribe_to_news = f.BooleanField()
# Add the other model form as a subform.
extra = f.FormField(ExtraContactForm)
The class returned by ``model_form()`` can also extend an existing form
class:
.. code-block:: python
class BaseContactForm(Form):
# Add an extra, non-model related field.
subscribe_to_news = f.BooleanField()
# Generate a form based on the model.
ContactForm = model_form(Contact, base_class=BaseContactForm)
"""
from wtforms import Form, validators, fields as f
from wtforms.compat import string_types
from wtforms.ext.appengine.fields import GeoPtPropertyField, KeyPropertyField, StringListPropertyField, IntegerListPropertyField
def get_TextField(kwargs):
"""
Returns a ``TextField``, applying the ``ndb.StringProperty`` length limit
of 500 bytes.
"""
kwargs['validators'].append(validators.length(max=500))
return f.TextField(**kwargs)
def get_IntegerField(kwargs):
"""
Returns an ``IntegerField``, applying the ``ndb.IntegerProperty`` range
limits.
"""
v = validators.NumberRange(min=-0x8000000000000000, max=0x7fffffffffffffff)
kwargs['validators'].append(v)
return f.IntegerField(**kwargs)
class ModelConverterBase(object):
def __init__(self, converters=None):
"""
Constructs the converter, setting the converter callables.
:param converters:
A dictionary of converter callables for each property type. The
callable must accept the arguments (model, prop, kwargs).
"""
self.converters = {}
for name in dir(self):
if not name.startswith('convert_'):
continue
self.converters[name[8:]] = getattr(self, name)
def convert(self, model, prop, field_args):
"""
Returns a form field for a single model property.
:param model:
The ``db.Model`` class that contains the property.
:param prop:
The model property: a ``db.Property`` instance.
:param field_args:
Optional keyword arguments to construct the field.
"""
prop_type_name = type(prop).__name__
# Check for generic property
if(prop_type_name == "GenericProperty"):
# Try to get type from field args
generic_type = field_args.get("type")
if generic_type:
prop_type_name = field_args.get("type")
# If no type is found, the generic property uses string set in convert_GenericProperty
kwargs = {
'label': prop._code_name.replace('_', ' ').title(),
'default': prop._default,
'validators': [],
}
if field_args:
kwargs.update(field_args)
if prop._required and prop_type_name not in self.NO_AUTO_REQUIRED:
kwargs['validators'].append(validators.required())
if kwargs.get('choices', None):
# Use choices in a select field.
kwargs['choices'] = [(v, v) for v in kwargs.get('choices')]
return f.SelectField(**kwargs)
if prop._choices:
# Use choices in a select field.
kwargs['choices'] = [(v, v) for v in prop._choices]
return f.SelectField(**kwargs)
else:
converter = self.converters.get(prop_type_name, None)
if converter is not None:
return converter(model, prop, kwargs)
else:
return self.fallback_converter(model, prop, kwargs)
class ModelConverter(ModelConverterBase):
"""
Converts properties from a ``ndb.Model`` class to form fields.
Default conversions between properties and fields:
+====================+===================+==============+==================+
| Property subclass | Field subclass | datatype | notes |
+====================+===================+==============+==================+
| StringProperty | TextField | unicode | TextArea | repeated support
| | | | if multiline |
+--------------------+-------------------+--------------+------------------+
| BooleanProperty | BooleanField | bool | |
+--------------------+-------------------+--------------+------------------+
| IntegerProperty | IntegerField | int or long | | repeated support
+--------------------+-------------------+--------------+------------------+
| FloatProperty | TextField | float | |
+--------------------+-------------------+--------------+------------------+
| DateTimeProperty | DateTimeField | datetime | skipped if |
| | | | auto_now[_add] |
+--------------------+-------------------+--------------+------------------+
| DateProperty | DateField | date | skipped if |
| | | | auto_now[_add] |
+--------------------+-------------------+--------------+------------------+
| TimeProperty | DateTimeField | time | skipped if |
| | | | auto_now[_add] |
+--------------------+-------------------+--------------+------------------+
| TextProperty | TextAreaField | unicode | |
+--------------------+-------------------+--------------+------------------+
| GeoPtProperty | TextField | db.GeoPt | |
+--------------------+-------------------+--------------+------------------+
| KeyProperty | KeyProperyField | ndb.Key | |
+--------------------+-------------------+--------------+------------------+
| BlobKeyProperty | None | ndb.BlobKey | always skipped |
+--------------------+-------------------+--------------+------------------+
| UserProperty | None | users.User | always skipped |
+--------------------+-------------------+--------------+------------------+
| StructuredProperty | None | ndb.Model | always skipped |
+--------------------+-------------------+--------------+------------------+
| LocalStructuredPro | None | ndb.Model | always skipped |
+--------------------+-------------------+--------------+------------------+
| JsonProperty | TextField | unicode | |
+--------------------+-------------------+--------------+------------------+
| PickleProperty | None | bytedata | always skipped |
+--------------------+-------------------+--------------+------------------+
| GenericProperty | None | generic | always skipped |
+--------------------+-------------------+--------------+------------------+
| ComputedProperty | none | | always skipped |
+====================+===================+==============+==================+
"""
# Don't automatically add a required validator for these properties
NO_AUTO_REQUIRED = frozenset(['ListProperty', 'StringListProperty', 'BooleanProperty'])
def convert_StringProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.StringProperty``."""
if prop._repeated:
return StringListPropertyField(**kwargs)
kwargs['validators'].append(validators.length(max=500))
return get_TextField(kwargs)
def convert_BooleanProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.BooleanProperty``."""
return f.BooleanField(**kwargs)
def convert_IntegerProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.IntegerProperty``."""
if prop._repeated:
return IntegerListPropertyField(**kwargs)
return get_IntegerField(kwargs)
def convert_FloatProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.FloatProperty``."""
return f.FloatField(**kwargs)
def convert_DateTimeProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.DateTimeProperty``."""
if prop._auto_now or prop._auto_now_add:
return None
return f.DateTimeField(format='%Y-%m-%d %H:%M:%S', **kwargs)
def convert_DateProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.DateProperty``."""
if prop._auto_now or prop._auto_now_add:
return None
return f.DateField(format='%Y-%m-%d', **kwargs)
def convert_TimeProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.TimeProperty``."""
if prop._auto_now or prop._auto_now_add:
return None
return f.DateTimeField(format='%H:%M:%S', **kwargs)
def convert_RepeatedProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.ListProperty``."""
return None
def convert_UserProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.UserProperty``."""
return None
def convert_StructuredProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.ListProperty``."""
return None
def convert_LocalStructuredProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.ListProperty``."""
return None
def convert_JsonProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.ListProperty``."""
return None
def convert_PickleProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.ListProperty``."""
return None
def convert_GenericProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.ListProperty``."""
kwargs['validators'].append(validators.length(max=500))
return get_TextField(kwargs)
def convert_BlobKeyProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.BlobKeyProperty``."""
return f.FileField(**kwargs)
def convert_TextProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.TextProperty``."""
return f.TextAreaField(**kwargs)
def convert_ComputedProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.ComputedProperty``."""
return None
def convert_GeoPtProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.GeoPtProperty``."""
return GeoPtPropertyField(**kwargs)
def convert_KeyProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.KeyProperty``."""
if 'reference_class' not in kwargs:
try:
reference_class = prop._kind
except AttributeError:
reference_class = prop._reference_class
if isinstance(reference_class, string_types):
# reference class is a string, try to retrieve the model object.
mod = __import__(model.__module__, None, None, [reference_class], 0)
reference_class = getattr(mod, reference_class)
kwargs['reference_class'] = reference_class
kwargs.setdefault('allow_blank', not prop._required)
return KeyPropertyField(**kwargs)
def model_fields(model, only=None, exclude=None, field_args=None,
converter=None):
"""
Extracts and returns a dictionary of form fields for a given
``db.Model`` class.
:param model:
The ``db.Model`` class to extract fields from.
:param only:
An optional iterable with the property names that should be included in
the form. Only these properties will have fields.
:param exclude:
An optional iterable with the property names that should be excluded
from the form. All other properties will have fields.
:param field_args:
An optional dictionary of field names mapping to a keyword arguments
used to construct each field object.
:param converter:
A converter to generate the fields based on the model properties. If
not set, ``ModelConverter`` is used.
"""
converter = converter or ModelConverter()
field_args = field_args or {}
# Get the field names we want to include or exclude, starting with the
# full list of model properties.
props = model._properties
field_names = list(x[0] for x in sorted(props.items(), key=lambda x: x[1]._creation_counter))
if only:
field_names = list(f for f in only if f in field_names)
elif exclude:
field_names = list(f for f in field_names if f not in exclude)
# Create all fields.
field_dict = {}
for name in field_names:
field = converter.convert(model, props[name], field_args.get(name))
if field is not None:
field_dict[name] = field
return field_dict
def model_form(model, base_class=Form, only=None, exclude=None, field_args=None,
converter=None):
"""
Creates and returns a dynamic ``wtforms.Form`` class for a given
``ndb.Model`` class. The form class can be used as it is or serve as a base
for extended form classes, which can then mix non-model related fields,
subforms with other model forms, among other possibilities.
:param model:
The ``ndb.Model`` class to generate a form for.
:param base_class:
Base form class to extend from. Must be a ``wtforms.Form`` subclass.
:param only:
An optional iterable with the property names that should be included in
the form. Only these properties will have fields.
:param exclude:
An optional iterable with the property names that should be excluded
from the form. All other properties will have fields.
:param field_args:
An optional dictionary of field names mapping to keyword arguments
used to construct each field object.
:param converter:
A converter to generate the fields based on the model properties. If
not set, ``ModelConverter`` is used.
"""
# Extract the fields from the model.
field_dict = model_fields(model, only, exclude, field_args, converter)
# Return a dynamically created form class, extending from base_class and
# including the created fields as properties.
return type(model._get_kind() + 'Form', (base_class,), field_dict)
| bsd-3-clause |
opencivicdata/scrapers-ca | ca_ab_wood_buffalo/people.py | 1 | 2201 | from utils import CanadianScraper, CanadianPerson as Person
from collections import defaultdict
COUNCIL_PAGE = 'http://www.woodbuffalo.ab.ca/Municipal-Government/Mayor-and-Council/Councillor-Profiles.htm'
class WoodBuffaloPersonScraper(CanadianScraper):
def scrape(self):
seat_numbers = defaultdict(int)
page = self.lxmlize(COUNCIL_PAGE)
mayor_url = page.xpath('//li[@id="pageid1075"]/div/a/@href')[0]
yield self.scrape_mayor(mayor_url)
wards = page.xpath('//div[@id="content"]//h3')
assert len(wards), 'No wards found'
for ward in wards:
area = ward.text_content()
councillors = ward.xpath('./following-sibling::ul[1]//a')
assert len(councillors), 'No councillors found for ward {}'.format(area)
for councillor in councillors:
name = ' '.join(reversed(councillor.text.split(', ')))
url = councillor.attrib['href']
if area in ('Ward 1', 'Ward 2'):
seat_numbers[area] += 1
district = '{} (seat {})'.format(area, seat_numbers[area])
else:
district = area
p = Person(primary_org='legislature', name=name, district=district, role='Councillor')
p.add_source(COUNCIL_PAGE)
p.add_source(url)
page = self.lxmlize(url)
p.image = page.xpath('//div[@id="content"]//img[contains(@alt, "Councillor")]/@src')[0]
email = self.get_email(page.xpath('//div[@id="content"]')[0])
p.add_contact('email', email)
yield p
def scrape_mayor(self, url):
page = self.lxmlize(url)
name = page.xpath('//h1[@id="pagetitle"]/text()')[0].replace('Mayor', '').strip()
image = page.xpath('//div[@id="content"]//@src')[0]
p = Person(primary_org='legislature', name=name, district='Wood Buffalo', role='Mayor')
p.add_source(url)
p.image = image
p.add_contact('voice', self.get_phone(page.xpath('//div[@id="icon5"]')[0]), 'legislature')
p.add_contact('email', 'mayor@rmwb.ca')
return p
| mit |
pydanny/django-admin2 | example/blog/actions.py | 1 | 2110 | # -*- coding: utf-8 -*-
from __future__ import division, absolute_import, unicode_literals
from django.contrib import messages
from django.utils.translation import ugettext_lazy, pgettext_lazy
from djadmin2 import permissions
from djadmin2.actions import BaseListAction
class CustomPublishAction(BaseListAction):
permission_classes = BaseListAction.permission_classes + (
permissions.ModelChangePermission,
)
description = ugettext_lazy('Publish selected items')
success_message = pgettext_lazy(
'singular form',
'Successfully published %(count)s %(items)s')
success_message_plural = pgettext_lazy(
'plural form',
'Successfully published %(count)s %(items)s')
default_template_name = "actions/publish_selected_items.html"
def process_queryset(self):
self.get_queryset().update(published=True)
class PublishAllItemsAction(BaseListAction):
permission_classes = BaseListAction.permission_classes + (
permissions.ModelChangePermission,
)
description = ugettext_lazy('Publish all items')
success_message = pgettext_lazy(
'singular form',
'Successfully published %(count)s %(items)s',
)
success_message_plural = pgettext_lazy(
'plural form',
'Successfully published %(count)s %(items)s',
)
default_template_name = "model_list.html"
only_selected = False
def process_queryset(self):
self.get_queryset().update(published=True)
def unpublish_items(request, queryset):
queryset.update(published=False)
messages.add_message(request, messages.INFO,
ugettext_lazy(u'Items unpublished'))
# Translators : action description
unpublish_items.description = ugettext_lazy('Unpublish selected items')
def unpublish_all_items(request, queryset):
queryset.update(published=False)
messages.add_message(
request,
messages.INFO,
ugettext_lazy('Items unpublished'),
)
unpublish_all_items.description = ugettext_lazy('Unpublish all items')
unpublish_all_items.only_selected = False
| bsd-3-clause |
bowlofstew/bii-server | test/model/social_account_test.py | 2 | 2004 | import unittest
from biicode.server.model.social_account import SocialAccount, SocialAccountToken
from biicode.server.model.epoch.utc_datetime import UtcDatetime
import datetime
class SocialAccountTest(unittest.TestCase):
def setUp(self):
self.utc_datetime = UtcDatetime.deserialize(datetime.datetime.now())
def test_social_token_serialization(self):
social_token = SocialAccountToken("xxzc", "zxcc", self.utc_datetime)
serialized_social_token = social_token.serialize()
self.assertEquals(SocialAccountToken.deserialize(serialized_social_token), social_token)
def test_social_token_no_secret_serialization(self):
social_token = SocialAccountToken("xxzc", "", self.utc_datetime)
serialized_social_token = social_token.serialize()
self.assertEquals(SocialAccountToken.deserialize(serialized_social_token), social_token)
def test_social_account_serialization(self):
tokens = [SocialAccountToken("xxzc", "zxcc", self.utc_datetime),
SocialAccountToken("xxzc", "zxcc", self.utc_datetime)]
social_account = SocialAccount("zcas",
self.utc_datetime,
self.utc_datetime,
tokens,
"zcc")
serialized_social_account = social_account.serialize()
self.assertEquals(SocialAccount.deserialize(serialized_social_account), social_account)
def test_social_account_without_token_serialization(self):
tokens = []
social_account = SocialAccount("zcas",
self.utc_datetime,
self.utc_datetime,
tokens,
"zcc")
serialized_social_account = social_account.serialize()
self.assertEquals(SocialAccount.deserialize(serialized_social_account), social_account)
| mit |
mmottahedi/neuralnilm_prototype | scripts/e249.py | 2 | 3897 | from __future__ import print_function, division
import matplotlib
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import Net, RealApplianceSource, BLSTMLayer, DimshuffleLayer
from lasagne.nonlinearities import sigmoid, rectify
from lasagne.objectives import crossentropy, mse
from lasagne.init import Uniform, Normal
from lasagne.layers import LSTMLayer, DenseLayer, Conv1DLayer, ReshapeLayer, FeaturePoolLayer
from lasagne.updates import nesterov_momentum
from functools import partial
import os
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment
from neuralnilm.net import TrainingError
import __main__
from copy import deepcopy
from math import sqrt
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 250
GRADIENT_STEPS = 100
"""
e233
based on e131c but with:
* lag=32
* pool
e234
* init final layer and conv layer
235
no lag
236
should be exactly as 131c: no pool, no lag, no init for final and conv layer
237
putting the pool back
238
seems pooling hurts us! disable pooling.
enable lag = 32
239
BLSTM
lag = 20
240
LSTM not BLSTM
various lags
241
output is prediction
ideas for next TODO:
* 3 LSTM layers with smaller conv between them
* why does pooling hurt us?
"""
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
],
max_appliance_powers=[300, 500, 200, 2500, 2400],
on_power_thresholds=[5] * 5,
max_input_power=5900,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=1500,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
# skip_probability=0.0,
n_seq_per_batch=50,
# subsample_target=5,
include_diff=False,
clip_appliance_power=True,
target_is_prediction=True
#lag=0
)
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
loss_function=crossentropy,
layers_config=[
{
'type': LSTMLayer,
'num_units': 10,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False,
'W_in_to_cell': Normal(std=1.)
}
]
)
def exp_x(name, learning_rate):
global source
try:
a = source
except NameError:
source = RealApplianceSource(**source_dict)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
updates=partial(nesterov_momentum, learning_rate=learning_rate)
))
net_dict_copy['layers_config'].append(
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': sigmoid,
'W': Normal(std=(1/sqrt(50)))
}
)
net = Net(**net_dict_copy)
return net
def main():
for experiment, learning_rate in [('a', 1.0), ('b', 0.1), ('c', 0.01),
('d', 0.001), ('e', 0.0001)]:
full_exp_name = NAME + experiment
path = os.path.join(PATH, full_exp_name)
print("***********************************")
print("Preparing", full_exp_name, "...")
try:
net = exp_x(full_exp_name, learning_rate)
run_experiment(net, path, epochs=1000)
except KeyboardInterrupt:
break
except TrainingError as exception:
print("EXCEPTION:", exception)
except Exception as exception:
print("EXCEPTION:", exception)
if __name__ == "__main__":
main()
| mit |
twz915/django | django/template/utils.py | 53 | 3665 | import functools
import os
from collections import Counter, OrderedDict
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
class InvalidTemplateEngineError(ImproperlyConfigured):
pass
class EngineHandler:
def __init__(self, templates=None):
"""
templates is an optional list of template engine definitions
(structured like settings.TEMPLATES).
"""
self._templates = templates
self._engines = {}
@cached_property
def templates(self):
if self._templates is None:
self._templates = settings.TEMPLATES
templates = OrderedDict()
backend_names = []
for tpl in self._templates:
tpl = tpl.copy()
try:
# This will raise an exception if 'BACKEND' doesn't exist or
# isn't a string containing at least one dot.
default_name = tpl['BACKEND'].rsplit('.', 2)[-2]
except Exception:
invalid_backend = tpl.get('BACKEND', '<not defined>')
raise ImproperlyConfigured(
"Invalid BACKEND for a template engine: {}. Check "
"your TEMPLATES setting.".format(invalid_backend))
tpl.setdefault('NAME', default_name)
tpl.setdefault('DIRS', [])
tpl.setdefault('APP_DIRS', False)
tpl.setdefault('OPTIONS', {})
templates[tpl['NAME']] = tpl
backend_names.append(tpl['NAME'])
counts = Counter(backend_names)
duplicates = [alias for alias, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Template engine aliases aren't unique, duplicates: {}. "
"Set a unique NAME for each engine in settings.TEMPLATES."
.format(", ".join(duplicates)))
return templates
def __getitem__(self, alias):
try:
return self._engines[alias]
except KeyError:
try:
params = self.templates[alias]
except KeyError:
raise InvalidTemplateEngineError(
"Could not find config for '{}' "
"in settings.TEMPLATES".format(alias))
# If importing or initializing the backend raises an exception,
# self._engines[alias] isn't set and this code may get executed
# again, so we must preserve the original params. See #24265.
params = params.copy()
backend = params.pop('BACKEND')
engine_cls = import_string(backend)
engine = engine_cls(params)
self._engines[alias] = engine
return engine
def __iter__(self):
return iter(self.templates)
def all(self):
return [self[alias] for alias in self]
@functools.lru_cache()
def get_app_template_dirs(dirname):
"""
Return an iterable of paths of directories to load app templates from.
dirname is the name of the subdirectory containing templates inside
installed applications.
"""
template_dirs = []
for app_config in apps.get_app_configs():
if not app_config.path:
continue
template_dir = os.path.join(app_config.path, dirname)
if os.path.isdir(template_dir):
template_dirs.append(template_dir)
# Immutable return value because it will be cached and shared by callers.
return tuple(template_dirs)
| bsd-3-clause |
mlskit/astromlskit | FRONTEND/forestfront.py | 2 | 8530 |
from PyQt4 import QtCore, QtGui
from dtree import *
import numpy as np
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_RandomForest(object):
def setupUi(self, RandomForest):
RandomForest.setObjectName(_fromUtf8("RandomForest"))
RandomForest.resize(257, 442)
self.groupBox = QtGui.QGroupBox(RandomForest)
self.groupBox.setGeometry(QtCore.QRect(20, 10, 221, 61))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.lineEdit = QtGui.QLineEdit(self.groupBox)
self.lineEdit.setGeometry(QtCore.QRect(40, 20, 141, 20))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.groupBox_2 = QtGui.QGroupBox(RandomForest)
self.groupBox_2.setGeometry(QtCore.QRect(30, 80, 211, 251))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.label = QtGui.QLabel(self.groupBox_2)
self.label.setGeometry(QtCore.QRect(30, 20, 111, 16))
self.label.setObjectName(_fromUtf8("label"))
self.spinBox = QtGui.QSpinBox(self.groupBox_2)
self.spinBox.setGeometry(QtCore.QRect(150, 20, 42, 22))
self.spinBox.setObjectName(_fromUtf8("spinBox"))
self.spinBox.valueChanged.connect(self.notree)
self.spinBox_2 = QtGui.QSpinBox(self.groupBox_2)
self.spinBox_2.setGeometry(QtCore.QRect(150, 50, 42, 22))
self.spinBox_2.setObjectName(_fromUtf8("spinBox_2"))
self.spinBox_2.valueChanged.connect(self.sample)
self.label_2 = QtGui.QLabel(self.groupBox_2)
self.label_2.setGeometry(QtCore.QRect(30, 50, 111, 16))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.comboBox_2 = QtGui.QComboBox(self.groupBox_2)
self.comboBox_2.setGeometry(QtCore.QRect(20, 100, 161, 22))
self.comboBox_2.setObjectName(_fromUtf8("comboBox_2"))
self.comboBox_2.addItem(_fromUtf8(""))
self.comboBox_2.addItem(_fromUtf8(""))
self.comboBox_2.addItem(_fromUtf8(""))
self.comboBox_2.activated[str].connect(self.grow)
self.label_3 = QtGui.QLabel(self.groupBox_2)
self.label_3.setGeometry(QtCore.QRect(60, 80, 111, 16))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_4 = QtGui.QLabel(self.groupBox_2)
self.label_4.setGeometry(QtCore.QRect(60, 130, 111, 16))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.comboBox_3 = QtGui.QComboBox(self.groupBox_2)
self.comboBox_3.setGeometry(QtCore.QRect(20, 150, 161, 22))
self.comboBox_3.setObjectName(_fromUtf8("comboBox_3"))
self.comboBox_3.addItem(_fromUtf8(""))
self.comboBox_3.addItem(_fromUtf8(""))
self.comboBox_3.activated[str].connect(self.weigh)
self.label_5 = QtGui.QLabel(self.groupBox_2)
self.label_5.setGeometry(QtCore.QRect(60, 180, 111, 16))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.comboBox_4 = QtGui.QComboBox(self.groupBox_2)
self.comboBox_4.setGeometry(QtCore.QRect(20, 200, 161, 22))
self.comboBox_4.setObjectName(_fromUtf8("comboBox_4"))
self.comboBox_4.addItem(_fromUtf8(""))
self.comboBox_4.addItem(_fromUtf8(""))
self.comboBox_4.addItem(_fromUtf8(""))
self.comboBox_4.activated[str].connect(self.ent)
self.pushButton = QtGui.QPushButton(RandomForest)
self.pushButton.setGeometry(QtCore.QRect(40, 340, 161, 23))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.pushButton.clicked.connect(self.takeinput)
self.pushButton_2 = QtGui.QPushButton(RandomForest)
self.pushButton_2.setGeometry(QtCore.QRect(40, 380, 161, 23))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.pushButton_2.clicked.connect(self.taketest)
self.pushButton_3 = QtGui.QPushButton(RandomForest)
self.pushButton_3.setGeometry(QtCore.QRect(40, 410, 161, 23))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.pushButton_3.clicked.connect(self.startrft)
self.retranslateUi(RandomForest)
QtCore.QMetaObject.connectSlotsByName(RandomForest)
def takeinput(self):
self.fname = QtGui.QFileDialog.getOpenFileName(None, 'Open file', 'C:')
print type(self.fname)
def taketest(self):
self.tfname = QtGui.QFileDialog.getOpenFileName(None, 'Open file', 'C:')
print (self.tfname)
def startrft(self):
print 'Testing forest...'
cdata2 = Data(str(self.fname))
cda=Data(str(self.tfname))
cdata2test=list(cda)
print self.no,self.samp
forest = Forest(
data=cdata2,
size=int(self.no), # Grow 10 trees.
sample_ratio=int(self.samp)/10.0, # Train each tree on 80% of all records.
grow_method=GROW_AUTO_INCREMENTAL, # Incrementally grow each tree.
weighting_method=Forest.mean_oob_mae_weight,
#weighting_method=str(self.wei),
tree_kwargs=dict(metric=ENTROPY2),
)
mae = None
for _ in xrange(10):
for row in cdata2test:
#print row
forest.train(row)
mae = forest.test(cdata2test)
print 'Forest MAE:',mae.mean
#if mae.mean==1.0:
#print "WellDone!!!"
from pprint import pprint
trees = list(forest.trees)
trees.sort(key=lambda t:t.out_of_bag_mae.mean)
print 'Best tree:'
pprint(trees[-1].to_dict(), indent=4)
#assertEqual(trees[-1].auto_grow, True)
print "--------- ALL TREES---------------"
for tree in trees:
pprint(tree.to_dict(), indent=4)
print "---done----"
def ent(self,txt):
self.ent=txt
print txt
def grow(self,txt):
self.grw=txt
print txt
def weigh(self,txt):
self.wei=txt
print txt
def notree(self):
self.no=self.spinBox.value()
print self.no
def sample(self):
self.samp=self.spinBox_2.value()
if self.samp>10:
self.samp=10
print self.samp/10.0
def retranslateUi(self, RandomForest):
RandomForest.setWindowTitle(_translate("RandomForest", "Forest Learner", None))
self.groupBox.setTitle(_translate("RandomForest", "Learner/Classifier Name", None))
self.lineEdit.setText(_translate("RandomForest", "Random Forest", None))
self.groupBox_2.setTitle(_translate("RandomForest", "Options", None))
self.label.setText(_translate("RandomForest", "Number of Trees", None))
self.label_2.setText(_translate("RandomForest", "Sample ratio", None))
self.comboBox_2.setItemText(0, _translate("RandomForest", "GROW_RANDOM", None))
self.comboBox_2.setItemText(1, _translate("RandomForest", "GROW_AUTO_INCREMENTAL", None))
self.comboBox_2.setItemText(2, _translate("RandomForest", "GROW_AUTO_MINI_BATCH", None))
self.label_3.setText(_translate("RandomForest", "Grow Method", None))
self.label_4.setText(_translate("RandomForest", "weighting_method", None))
self.comboBox_3.setItemText(0, _translate("RandomForest", "mean_oob_mae_weight", None))
self.comboBox_3.setItemText(1, _translate("RandomForest", "best_oob_mae_weight", None))
self.label_5.setText(_translate("RandomForest", "Entropy metric", None))
self.comboBox_4.setItemText(0, _translate("RandomForest", "ENTROPY1", None))
self.comboBox_4.setItemText(1, _translate("RandomForest", "ENTROPY2", None))
self.comboBox_4.setItemText(2, _translate("RandomForest", "ENTROPY3", None))
self.pushButton.setText(_translate("RandomForest", "Train File", None))
self.pushButton_2.setText(_translate("RandomForest", "Test File", None))
self.pushButton_3.setText(_translate("RandomForest", "Start", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_RandomForest()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| gpl-3.0 |
simonwydooghe/ansible | lib/ansible/modules/net_tools/ldap/_ldap_attr.py | 11 | 8838 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Peter Sagerson <psagers@ignorare.net>
# Copyright: (c) 2016, Jiri Tyr <jiri.tyr@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: ldap_attr
short_description: Add or remove LDAP attribute values
description:
- Add or remove LDAP attribute values.
notes:
- This only deals with attributes on existing entries. To add or remove
whole entries, see M(ldap_entry).
- The default authentication settings will attempt to use a SASL EXTERNAL
bind over a UNIX domain socket. This works well with the default Ubuntu
install for example, which includes a cn=peercred,cn=external,cn=auth ACL
rule allowing root to modify the server configuration. If you need to use
a simple bind to access your server, pass the credentials in I(bind_dn)
and I(bind_pw).
- For I(state=present) and I(state=absent), all value comparisons are
performed on the server for maximum accuracy. For I(state=exact), values
have to be compared in Python, which obviously ignores LDAP matching
rules. This should work out in most cases, but it is theoretically
possible to see spurious changes when target and actual values are
semantically identical but lexically distinct.
version_added: '2.3'
deprecated:
removed_in: '2.14'
why: 'The current "ldap_attr" module does not support LDAP attribute insertions or deletions with objectClass dependencies.'
alternative: 'Use M(ldap_attrs) instead. Deprecated in 2.10.'
author:
- Jiri Tyr (@jtyr)
requirements:
- python-ldap
options:
name:
description:
- The name of the attribute to modify.
type: str
required: true
state:
description:
- The state of the attribute values.
- If C(present), all given values will be added if they're missing.
- If C(absent), all given values will be removed if present.
- If C(exact), the set of values will be forced to exactly those provided and no others.
- If I(state=exact) and I(value) is an empty list, all values for this attribute will be removed.
type: str
choices: [ absent, exact, present ]
default: present
values:
description:
- The value(s) to add or remove. This can be a string or a list of
strings. The complex argument format is required in order to pass
a list of strings (see examples).
type: raw
required: true
params:
description:
- Additional module parameters.
type: dict
extends_documentation_fragment:
- ldap.documentation
'''
EXAMPLES = r'''
- name: Configure directory number 1 for example.com
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: olcSuffix
values: dc=example,dc=com
state: exact
# The complex argument format is required here to pass a list of ACL strings.
- name: Set up the ACL
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: olcAccess
values:
- >-
{0}to attrs=userPassword,shadowLastChange
by self write
by anonymous auth
by dn="cn=admin,dc=example,dc=com" write
by * none'
- >-
{1}to dn.base="dc=example,dc=com"
by dn="cn=admin,dc=example,dc=com" write
by * read
state: exact
- name: Declare some indexes
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: olcDbIndex
values: "{{ item }}"
with_items:
- objectClass eq
- uid eq
- name: Set up a root user, which we can use later to bootstrap the directory
ldap_attr:
dn: olcDatabase={1}hdb,cn=config
name: "{{ item.key }}"
values: "{{ item.value }}"
state: exact
with_dict:
olcRootDN: cn=root,dc=example,dc=com
olcRootPW: "{SSHA}tabyipcHzhwESzRaGA7oQ/SDoBZQOGND"
- name: Get rid of an unneeded attribute
ldap_attr:
dn: uid=jdoe,ou=people,dc=example,dc=com
name: shadowExpire
values: []
state: exact
server_uri: ldap://localhost/
bind_dn: cn=admin,dc=example,dc=com
bind_pw: password
#
# The same as in the previous example but with the authentication details
# stored in the ldap_auth variable:
#
# ldap_auth:
# server_uri: ldap://localhost/
# bind_dn: cn=admin,dc=example,dc=com
# bind_pw: password
- name: Get rid of an unneeded attribute
ldap_attr:
dn: uid=jdoe,ou=people,dc=example,dc=com
name: shadowExpire
values: []
state: exact
params: "{{ ldap_auth }}"
'''
RETURN = r'''
modlist:
description: list of modified parameters
returned: success
type: list
sample: '[[2, "olcRootDN", ["cn=root,dc=example,dc=com"]]]'
'''
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native, to_bytes
from ansible.module_utils.ldap import LdapGeneric, gen_specs
LDAP_IMP_ERR = None
try:
import ldap
HAS_LDAP = True
except ImportError:
LDAP_IMP_ERR = traceback.format_exc()
HAS_LDAP = False
class LdapAttr(LdapGeneric):
def __init__(self, module):
LdapGeneric.__init__(self, module)
# Shortcuts
self.name = self.module.params['name']
self.state = self.module.params['state']
# Normalize values
if isinstance(self.module.params['values'], list):
self.values = list(map(to_bytes, self.module.params['values']))
else:
self.values = [to_bytes(self.module.params['values'])]
def add(self):
values_to_add = list(filter(self._is_value_absent, self.values))
if len(values_to_add) > 0:
modlist = [(ldap.MOD_ADD, self.name, values_to_add)]
else:
modlist = []
return modlist
def delete(self):
values_to_delete = list(filter(self._is_value_present, self.values))
if len(values_to_delete) > 0:
modlist = [(ldap.MOD_DELETE, self.name, values_to_delete)]
else:
modlist = []
return modlist
def exact(self):
try:
results = self.connection.search_s(
self.dn, ldap.SCOPE_BASE, attrlist=[self.name])
except ldap.LDAPError as e:
self.fail("Cannot search for attribute %s" % self.name, e)
current = results[0][1].get(self.name, [])
modlist = []
if frozenset(self.values) != frozenset(current):
if len(current) == 0:
modlist = [(ldap.MOD_ADD, self.name, self.values)]
elif len(self.values) == 0:
modlist = [(ldap.MOD_DELETE, self.name, None)]
else:
modlist = [(ldap.MOD_REPLACE, self.name, self.values)]
return modlist
def _is_value_present(self, value):
""" True if the target attribute has the given value. """
try:
is_present = bool(
self.connection.compare_s(self.dn, self.name, value))
except ldap.NO_SUCH_ATTRIBUTE:
is_present = False
return is_present
def _is_value_absent(self, value):
""" True if the target attribute doesn't have the given value. """
return not self._is_value_present(value)
def main():
module = AnsibleModule(
argument_spec=gen_specs(
name=dict(type='str', required=True),
params=dict(type='dict'),
state=dict(type='str', default='present', choices=['absent', 'exact', 'present']),
values=dict(type='raw', required=True),
),
supports_check_mode=True,
)
if not HAS_LDAP:
module.fail_json(msg=missing_required_lib('python-ldap'),
exception=LDAP_IMP_ERR)
# Update module parameters with user's parameters if defined
if 'params' in module.params and isinstance(module.params['params'], dict):
module.params.update(module.params['params'])
# Remove the params
module.params.pop('params', None)
# Instantiate the LdapAttr object
ldap = LdapAttr(module)
state = module.params['state']
# Perform action
if state == 'present':
modlist = ldap.add()
elif state == 'absent':
modlist = ldap.delete()
elif state == 'exact':
modlist = ldap.exact()
changed = False
if len(modlist) > 0:
changed = True
if not module.check_mode:
try:
ldap.connection.modify_s(ldap.dn, modlist)
except Exception as e:
module.fail_json(msg="Attribute action failed.", details=to_native(e))
module.exit_json(changed=changed, modlist=modlist)
if __name__ == '__main__':
main()
| gpl-3.0 |
disigma/depot_tools | third_party/logilab/common/configuration.py | 85 | 42160 | # copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Classes to handle advanced configuration in simple to complex applications.
Allows to load the configuration from a file or from command line
options, to generate a sample configuration file or to display
program's usage. Fills the gap between optik/optparse and ConfigParser
by adding data types (which are also available as a standalone optik
extension in the `optik_ext` module).
Quick start: simplest usage
---------------------------
.. python ::
>>> import sys
>>> from logilab.common.configuration import Configuration
>>> options = [('dothis', {'type':'yn', 'default': True, 'metavar': '<y or n>'}),
... ('value', {'type': 'string', 'metavar': '<string>'}),
... ('multiple', {'type': 'csv', 'default': ('yop',),
... 'metavar': '<comma separated values>',
... 'help': 'you can also document the option'}),
... ('number', {'type': 'int', 'default':2, 'metavar':'<int>'}),
... ]
>>> config = Configuration(options=options, name='My config')
>>> print config['dothis']
True
>>> print config['value']
None
>>> print config['multiple']
('yop',)
>>> print config['number']
2
>>> print config.help()
Usage: [options]
Options:
-h, --help show this help message and exit
--dothis=<y or n>
--value=<string>
--multiple=<comma separated values>
you can also document the option [current: none]
--number=<int>
>>> f = open('myconfig.ini', 'w')
>>> f.write('''[MY CONFIG]
... number = 3
... dothis = no
... multiple = 1,2,3
... ''')
>>> f.close()
>>> config.load_file_configuration('myconfig.ini')
>>> print config['dothis']
False
>>> print config['value']
None
>>> print config['multiple']
['1', '2', '3']
>>> print config['number']
3
>>> sys.argv = ['mon prog', '--value', 'bacon', '--multiple', '4,5,6',
... 'nonoptionargument']
>>> print config.load_command_line_configuration()
['nonoptionargument']
>>> print config['value']
bacon
>>> config.generate_config()
# class for simple configurations which don't need the
# manager / providers model and prefer delegation to inheritance
#
# configuration values are accessible through a dict like interface
#
[MY CONFIG]
dothis=no
value=bacon
# you can also document the option
multiple=4,5,6
number=3
Note : starting with Python 2.7 ConfigParser is able to take into
account the order of occurrences of the options into a file (by
using an OrderedDict). If you have two options changing some common
state, like a 'disable-all-stuff' and a 'enable-some-stuff-a', their
order of appearance will be significant : the last specified in the
file wins. For earlier version of python and logilab.common newer
than 0.61 the behaviour is unspecified.
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
__all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn',
'ConfigurationMixIn', 'Configuration',
'OptionsManager2ConfigurationAdapter')
import os
import sys
import re
from os.path import exists, expanduser
from copy import copy
from warnings import warn
from six import string_types
from six.moves import range, configparser as cp, input
from logilab.common.compat import str_encode as _encode
from logilab.common.deprecation import deprecated
from logilab.common.textutils import normalize_text, unquote
from logilab.common import optik_ext
OptionError = optik_ext.OptionError
REQUIRED = []
class UnsupportedAction(Exception):
"""raised by set_option when it doesn't know what to do for an action"""
def _get_encoding(encoding, stream):
encoding = encoding or getattr(stream, 'encoding', None)
if not encoding:
import locale
encoding = locale.getpreferredencoding()
return encoding
# validation functions ########################################################
# validators will return the validated value or raise optparse.OptionValueError
# XXX add to documentation
def choice_validator(optdict, name, value):
"""validate and return a converted value for option of type 'choice'
"""
if not value in optdict['choices']:
msg = "option %s: invalid value: %r, should be in %s"
raise optik_ext.OptionValueError(msg % (name, value, optdict['choices']))
return value
def multiple_choice_validator(optdict, name, value):
"""validate and return a converted value for option of type 'choice'
"""
choices = optdict['choices']
values = optik_ext.check_csv(None, name, value)
for value in values:
if not value in choices:
msg = "option %s: invalid value: %r, should be in %s"
raise optik_ext.OptionValueError(msg % (name, value, choices))
return values
def csv_validator(optdict, name, value):
"""validate and return a converted value for option of type 'csv'
"""
return optik_ext.check_csv(None, name, value)
def yn_validator(optdict, name, value):
"""validate and return a converted value for option of type 'yn'
"""
return optik_ext.check_yn(None, name, value)
def named_validator(optdict, name, value):
"""validate and return a converted value for option of type 'named'
"""
return optik_ext.check_named(None, name, value)
def file_validator(optdict, name, value):
"""validate and return a filepath for option of type 'file'"""
return optik_ext.check_file(None, name, value)
def color_validator(optdict, name, value):
"""validate and return a valid color for option of type 'color'"""
return optik_ext.check_color(None, name, value)
def password_validator(optdict, name, value):
"""validate and return a string for option of type 'password'"""
return optik_ext.check_password(None, name, value)
def date_validator(optdict, name, value):
"""validate and return a mx DateTime object for option of type 'date'"""
return optik_ext.check_date(None, name, value)
def time_validator(optdict, name, value):
"""validate and return a time object for option of type 'time'"""
return optik_ext.check_time(None, name, value)
def bytes_validator(optdict, name, value):
"""validate and return an integer for option of type 'bytes'"""
return optik_ext.check_bytes(None, name, value)
VALIDATORS = {'string': unquote,
'int': int,
'float': float,
'file': file_validator,
'font': unquote,
'color': color_validator,
'regexp': re.compile,
'csv': csv_validator,
'yn': yn_validator,
'bool': yn_validator,
'named': named_validator,
'password': password_validator,
'date': date_validator,
'time': time_validator,
'bytes': bytes_validator,
'choice': choice_validator,
'multiple_choice': multiple_choice_validator,
}
def _call_validator(opttype, optdict, option, value):
if opttype not in VALIDATORS:
raise Exception('Unsupported type "%s"' % opttype)
try:
return VALIDATORS[opttype](optdict, option, value)
except TypeError:
try:
return VALIDATORS[opttype](value)
except optik_ext.OptionValueError:
raise
except:
raise optik_ext.OptionValueError('%s value (%r) should be of type %s' %
(option, value, opttype))
# user input functions ########################################################
# user input functions will ask the user for input on stdin then validate
# the result and return the validated value or raise optparse.OptionValueError
# XXX add to documentation
def input_password(optdict, question='password:'):
from getpass import getpass
while True:
value = getpass(question)
value2 = getpass('confirm: ')
if value == value2:
return value
print('password mismatch, try again')
def input_string(optdict, question):
value = input(question).strip()
return value or None
def _make_input_function(opttype):
def input_validator(optdict, question):
while True:
value = input(question)
if not value.strip():
return None
try:
return _call_validator(opttype, optdict, None, value)
except optik_ext.OptionValueError as ex:
msg = str(ex).split(':', 1)[-1].strip()
print('bad value: %s' % msg)
return input_validator
INPUT_FUNCTIONS = {
'string': input_string,
'password': input_password,
}
for opttype in VALIDATORS.keys():
INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype))
# utility functions ############################################################
def expand_default(self, option):
"""monkey patch OptionParser.expand_default since we have a particular
way to handle defaults to avoid overriding values in the configuration
file
"""
if self.parser is None or not self.default_tag:
return option.help
optname = option._long_opts[0][2:]
try:
provider = self.parser.options_manager._all_options[optname]
except KeyError:
value = None
else:
optdict = provider.get_option_def(optname)
optname = provider.option_attrname(optname, optdict)
value = getattr(provider.config, optname, optdict)
value = format_option_value(optdict, value)
if value is optik_ext.NO_DEFAULT or not value:
value = self.NO_DEFAULT_VALUE
return option.help.replace(self.default_tag, str(value))
def _validate(value, optdict, name=''):
"""return a validated value for an option according to its type
optional argument name is only used for error message formatting
"""
try:
_type = optdict['type']
except KeyError:
# FIXME
return value
return _call_validator(_type, optdict, name, value)
convert = deprecated('[0.60] convert() was renamed _validate()')(_validate)
# format and output functions ##################################################
def comment(string):
"""return string as a comment"""
lines = [line.strip() for line in string.splitlines()]
return '# ' + ('%s# ' % os.linesep).join(lines)
def format_time(value):
if not value:
return '0'
if value != int(value):
return '%.2fs' % value
value = int(value)
nbmin, nbsec = divmod(value, 60)
if nbsec:
return '%ss' % value
nbhour, nbmin_ = divmod(nbmin, 60)
if nbmin_:
return '%smin' % nbmin
nbday, nbhour_ = divmod(nbhour, 24)
if nbhour_:
return '%sh' % nbhour
return '%sd' % nbday
def format_bytes(value):
if not value:
return '0'
if value != int(value):
return '%.2fB' % value
value = int(value)
prevunit = 'B'
for unit in ('KB', 'MB', 'GB', 'TB'):
next, remain = divmod(value, 1024)
if remain:
return '%s%s' % (value, prevunit)
prevunit = unit
value = next
return '%s%s' % (value, unit)
def format_option_value(optdict, value):
"""return the user input's value from a 'compiled' value"""
if isinstance(value, (list, tuple)):
value = ','.join(value)
elif isinstance(value, dict):
value = ','.join(['%s:%s' % (k, v) for k, v in value.items()])
elif hasattr(value, 'match'): # optdict.get('type') == 'regexp'
# compiled regexp
value = value.pattern
elif optdict.get('type') == 'yn':
value = value and 'yes' or 'no'
elif isinstance(value, string_types) and value.isspace():
value = "'%s'" % value
elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)):
value = format_time(value)
elif optdict.get('type') == 'bytes' and hasattr(value, '__int__'):
value = format_bytes(value)
return value
def ini_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using the INI format"""
encoding = _get_encoding(encoding, stream)
if doc:
print(_encode(comment(doc), encoding), file=stream)
print('[%s]' % section, file=stream)
ini_format(stream, options, encoding)
def ini_format(stream, options, encoding):
"""format options using the INI format"""
for optname, optdict, value in options:
value = format_option_value(optdict, value)
help = optdict.get('help')
if help:
help = normalize_text(help, line_len=79, indent='# ')
print(file=stream)
print(_encode(help, encoding), file=stream)
else:
print(file=stream)
if value is None:
print('#%s=' % optname, file=stream)
else:
value = _encode(value, encoding).strip()
print('%s=%s' % (optname, value), file=stream)
format_section = ini_format_section
def rest_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using as ReST formatted output"""
encoding = _get_encoding(encoding, stream)
if section:
print('%s\n%s' % (section, "'"*len(section)), file=stream)
if doc:
print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream)
print(file=stream)
for optname, optdict, value in options:
help = optdict.get('help')
print(':%s:' % optname, file=stream)
if help:
help = normalize_text(help, line_len=79, indent=' ')
print(_encode(help, encoding), file=stream)
if value:
value = _encode(format_option_value(optdict, value), encoding)
print(file=stream)
print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
# Options Manager ##############################################################
class OptionsManagerMixIn(object):
"""MixIn to handle a configuration from both a configuration file and
command line options
"""
def __init__(self, usage, config_file=None, version=None, quiet=0):
self.config_file = config_file
self.reset_parsers(usage, version=version)
# list of registered options providers
self.options_providers = []
# dictionary associating option name to checker
self._all_options = {}
self._short_options = {}
self._nocallback_options = {}
self._mygroups = dict()
# verbosity
self.quiet = quiet
self._maxlevel = 0
def reset_parsers(self, usage='', version=None):
# configuration file parser
self.cfgfile_parser = cp.ConfigParser()
# command line parser
self.cmdline_parser = optik_ext.OptionParser(usage=usage, version=version)
self.cmdline_parser.options_manager = self
self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
def register_options_provider(self, provider, own_group=True):
"""register an options provider"""
assert provider.priority <= 0, "provider's priority can't be >= 0"
for i in range(len(self.options_providers)):
if provider.priority > self.options_providers[i].priority:
self.options_providers.insert(i, provider)
break
else:
self.options_providers.append(provider)
non_group_spec_options = [option for option in provider.options
if 'group' not in option[1]]
groups = getattr(provider, 'option_groups', ())
if own_group and non_group_spec_options:
self.add_option_group(provider.name.upper(), provider.__doc__,
non_group_spec_options, provider)
else:
for opt, optdict in non_group_spec_options:
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
for gname, gdoc in groups:
gname = gname.upper()
goptions = [option for option in provider.options
if option[1].get('group', '').upper() == gname]
self.add_option_group(gname, gdoc, goptions, provider)
def add_option_group(self, group_name, doc, options, provider):
"""add an option group including the listed options
"""
assert options
# add option group to the command line parser
if group_name in self._mygroups:
group = self._mygroups[group_name]
else:
group = optik_ext.OptionGroup(self.cmdline_parser,
title=group_name.capitalize())
self.cmdline_parser.add_option_group(group)
group.level = provider.level
self._mygroups[group_name] = group
# add section to the config file
if group_name != "DEFAULT":
self.cfgfile_parser.add_section(group_name)
# add provider's specific options
for opt, optdict in options:
self.add_optik_option(provider, group, opt, optdict)
def add_optik_option(self, provider, optikcontainer, opt, optdict):
if 'inputlevel' in optdict:
warn('[0.50] "inputlevel" in option dictionary for %s is deprecated,'
' use "level"' % opt, DeprecationWarning)
optdict['level'] = optdict.pop('inputlevel')
args, optdict = self.optik_option(provider, opt, optdict)
option = optikcontainer.add_option(*args, **optdict)
self._all_options[opt] = provider
self._maxlevel = max(self._maxlevel, option.level or 0)
def optik_option(self, provider, opt, optdict):
"""get our personal option definition and return a suitable form for
use with optik/optparse
"""
optdict = copy(optdict)
others = {}
if 'action' in optdict:
self._nocallback_options[provider] = opt
else:
optdict['action'] = 'callback'
optdict['callback'] = self.cb_set_provider_option
# default is handled here and *must not* be given to optik if you
# want the whole machinery to work
if 'default' in optdict:
if ('help' in optdict
and optdict.get('default') is not None
and not optdict['action'] in ('store_true', 'store_false')):
optdict['help'] += ' [current: %default]'
del optdict['default']
args = ['--' + str(opt)]
if 'short' in optdict:
self._short_options[optdict['short']] = opt
args.append('-' + optdict['short'])
del optdict['short']
# cleanup option definition dict before giving it to optik
for key in list(optdict.keys()):
if not key in self._optik_option_attrs:
optdict.pop(key)
return args, optdict
def cb_set_provider_option(self, option, opt, value, parser):
"""optik callback for option setting"""
if opt.startswith('--'):
# remove -- on long option
opt = opt[2:]
else:
# short option, get its long equivalent
opt = self._short_options[opt[1:]]
# trick since we can't set action='store_true' on options
if value is None:
value = 1
self.global_set_option(opt, value)
def global_set_option(self, opt, value):
"""set option on the correct option provider"""
self._all_options[opt].set_option(opt, value)
def generate_config(self, stream=None, skipsections=(), encoding=None):
"""write a configuration file according to the current configuration
into the given stream or stdout
"""
options_by_section = {}
sections = []
for provider in self.options_providers:
for section, options in provider.options_by_section():
if section is None:
section = provider.name
if section in skipsections:
continue
options = [(n, d, v) for (n, d, v) in options
if d.get('type') is not None]
if not options:
continue
if not section in sections:
sections.append(section)
alloptions = options_by_section.setdefault(section, [])
alloptions += options
stream = stream or sys.stdout
encoding = _get_encoding(encoding, stream)
printed = False
for section in sections:
if printed:
print('\n', file=stream)
format_section(stream, section.upper(), options_by_section[section],
encoding)
printed = True
def generate_manpage(self, pkginfo, section=1, stream=None):
"""write a man page for the current configuration into the given
stream or stdout
"""
self._monkeypatch_expand_default()
try:
optik_ext.generate_manpage(self.cmdline_parser, pkginfo,
section, stream=stream or sys.stdout,
level=self._maxlevel)
finally:
self._unmonkeypatch_expand_default()
# initialization methods ##################################################
def load_provider_defaults(self):
"""initialize configuration using default values"""
for provider in self.options_providers:
provider.load_defaults()
def load_file_configuration(self, config_file=None):
"""load the configuration from file"""
self.read_config_file(config_file)
self.load_config_file()
def read_config_file(self, config_file=None):
"""read the configuration file but do not load it (i.e. dispatching
values to each options provider)
"""
helplevel = 1
while helplevel <= self._maxlevel:
opt = '-'.join(['long'] * helplevel) + '-help'
if opt in self._all_options:
break # already processed
def helpfunc(option, opt, val, p, level=helplevel):
print(self.help(level))
sys.exit(0)
helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel)
optdict = {'action' : 'callback', 'callback' : helpfunc,
'help' : helpmsg}
provider = self.options_providers[0]
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
provider.options += ( (opt, optdict), )
helplevel += 1
if config_file is None:
config_file = self.config_file
if config_file is not None:
config_file = expanduser(config_file)
if config_file and exists(config_file):
parser = self.cfgfile_parser
parser.read([config_file])
# normalize sections'title
for sect, values in parser._sections.items():
if not sect.isupper() and values:
parser._sections[sect.upper()] = values
elif not self.quiet:
msg = 'No config file found, using default configuration'
print(msg, file=sys.stderr)
return
def input_config(self, onlysection=None, inputlevel=0, stream=None):
"""interactively get configuration values by asking to the user and generate
a configuration file
"""
if onlysection is not None:
onlysection = onlysection.upper()
for provider in self.options_providers:
for section, option, optdict in provider.all_options():
if onlysection is not None and section != onlysection:
continue
if not 'type' in optdict:
# ignore action without type (callback, store_true...)
continue
provider.input_option(option, optdict, inputlevel)
# now we can generate the configuration file
if stream is not None:
self.generate_config(stream)
def load_config_file(self):
"""dispatch values previously read from a configuration file to each
options provider)
"""
parser = self.cfgfile_parser
for section in parser.sections():
for option, value in parser.items(section):
try:
self.global_set_option(option, value)
except (KeyError, OptionError):
# TODO handle here undeclared options appearing in the config file
continue
def load_configuration(self, **kwargs):
"""override configuration according to given parameters
"""
for opt, opt_value in kwargs.items():
opt = opt.replace('_', '-')
provider = self._all_options[opt]
provider.set_option(opt, opt_value)
def load_command_line_configuration(self, args=None):
"""override configuration according to command line parameters
return additional arguments
"""
self._monkeypatch_expand_default()
try:
if args is None:
args = sys.argv[1:]
else:
args = list(args)
(options, args) = self.cmdline_parser.parse_args(args=args)
for provider in self._nocallback_options.keys():
config = provider.config
for attr in config.__dict__.keys():
value = getattr(options, attr, None)
if value is None:
continue
setattr(config, attr, value)
return args
finally:
self._unmonkeypatch_expand_default()
# help methods ############################################################
def add_help_section(self, title, description, level=0):
"""add a dummy option section for help purpose """
group = optik_ext.OptionGroup(self.cmdline_parser,
title=title.capitalize(),
description=description)
group.level = level
self._maxlevel = max(self._maxlevel, level)
self.cmdline_parser.add_option_group(group)
def _monkeypatch_expand_default(self):
# monkey patch optik_ext to deal with our default values
try:
self.__expand_default_backup = optik_ext.HelpFormatter.expand_default
optik_ext.HelpFormatter.expand_default = expand_default
except AttributeError:
# python < 2.4: nothing to be done
pass
def _unmonkeypatch_expand_default(self):
# remove monkey patch
if hasattr(optik_ext.HelpFormatter, 'expand_default'):
# unpatch optik_ext to avoid side effects
optik_ext.HelpFormatter.expand_default = self.__expand_default_backup
def help(self, level=0):
"""return the usage string for available options """
self.cmdline_parser.formatter.output_level = level
self._monkeypatch_expand_default()
try:
return self.cmdline_parser.format_help()
finally:
self._unmonkeypatch_expand_default()
class Method(object):
"""used to ease late binding of default method (so you can define options
on the class using default methods on the configuration instance)
"""
def __init__(self, methname):
self.method = methname
self._inst = None
def bind(self, instance):
"""bind the method to its instance"""
if self._inst is None:
self._inst = instance
def __call__(self, *args, **kwargs):
assert self._inst, 'unbound method'
return getattr(self._inst, self.method)(*args, **kwargs)
# Options Provider #############################################################
class OptionsProviderMixIn(object):
"""Mixin to provide options to an OptionsManager"""
# those attributes should be overridden
priority = -1
name = 'default'
options = ()
level = 0
def __init__(self):
self.config = optik_ext.Values()
for option in self.options:
try:
option, optdict = option
except ValueError:
raise Exception('Bad option: %r' % option)
if isinstance(optdict.get('default'), Method):
optdict['default'].bind(self)
elif isinstance(optdict.get('callback'), Method):
optdict['callback'].bind(self)
self.load_defaults()
def load_defaults(self):
"""initialize the provider using default values"""
for opt, optdict in self.options:
action = optdict.get('action')
if action != 'callback':
# callback action have no default
default = self.option_default(opt, optdict)
if default is REQUIRED:
continue
self.set_option(opt, default, action, optdict)
def option_default(self, opt, optdict=None):
"""return the default value for an option"""
if optdict is None:
optdict = self.get_option_def(opt)
default = optdict.get('default')
if callable(default):
default = default()
return default
def option_attrname(self, opt, optdict=None):
"""get the config attribute corresponding to opt
"""
if optdict is None:
optdict = self.get_option_def(opt)
return optdict.get('dest', opt.replace('-', '_'))
option_name = deprecated('[0.60] OptionsProviderMixIn.option_name() was renamed to option_attrname()')(option_attrname)
def option_value(self, opt):
"""get the current value for the given option"""
return getattr(self.config, self.option_attrname(opt), None)
def set_option(self, opt, value, action=None, optdict=None):
"""method called to set an option (registered in the options list)
"""
if optdict is None:
optdict = self.get_option_def(opt)
if value is not None:
value = _validate(value, optdict, opt)
if action is None:
action = optdict.get('action', 'store')
if optdict.get('type') == 'named': # XXX need specific handling
optname = self.option_attrname(opt, optdict)
currentvalue = getattr(self.config, optname, None)
if currentvalue:
currentvalue.update(value)
value = currentvalue
if action == 'store':
setattr(self.config, self.option_attrname(opt, optdict), value)
elif action in ('store_true', 'count'):
setattr(self.config, self.option_attrname(opt, optdict), 0)
elif action == 'store_false':
setattr(self.config, self.option_attrname(opt, optdict), 1)
elif action == 'append':
opt = self.option_attrname(opt, optdict)
_list = getattr(self.config, opt, None)
if _list is None:
if isinstance(value, (list, tuple)):
_list = value
elif value is not None:
_list = []
_list.append(value)
setattr(self.config, opt, _list)
elif isinstance(_list, tuple):
setattr(self.config, opt, _list + (value,))
else:
_list.append(value)
elif action == 'callback':
optdict['callback'](None, opt, value, None)
else:
raise UnsupportedAction(action)
def input_option(self, option, optdict, inputlevel=99):
default = self.option_default(option, optdict)
if default is REQUIRED:
defaultstr = '(required): '
elif optdict.get('level', 0) > inputlevel:
return
elif optdict['type'] == 'password' or default is None:
defaultstr = ': '
else:
defaultstr = '(default: %s): ' % format_option_value(optdict, default)
print(':%s:' % option)
print(optdict.get('help') or option)
inputfunc = INPUT_FUNCTIONS[optdict['type']]
value = inputfunc(optdict, defaultstr)
while default is REQUIRED and not value:
print('please specify a value')
value = inputfunc(optdict, '%s: ' % option)
if value is None and default is not None:
value = default
self.set_option(option, value, optdict=optdict)
def get_option_def(self, opt):
"""return the dictionary defining an option given it's name"""
assert self.options
for option in self.options:
if option[0] == opt:
return option[1]
raise OptionError('no such option %s in section %r'
% (opt, self.name), opt)
def all_options(self):
"""return an iterator on available options for this provider
option are actually described by a 3-uple:
(section, option name, option dictionary)
"""
for section, options in self.options_by_section():
if section is None:
if self.name is None:
continue
section = self.name.upper()
for option, optiondict, value in options:
yield section, option, optiondict
def options_by_section(self):
"""return an iterator on options grouped by section
(section, [list of (optname, optdict, optvalue)])
"""
sections = {}
for optname, optdict in self.options:
sections.setdefault(optdict.get('group'), []).append(
(optname, optdict, self.option_value(optname)))
if None in sections:
yield None, sections.pop(None)
for section, options in sections.items():
yield section.upper(), options
def options_and_values(self, options=None):
if options is None:
options = self.options
for optname, optdict in options:
yield (optname, optdict, self.option_value(optname))
# configuration ################################################################
class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
"""basic mixin for simple configurations which don't need the
manager / providers model
"""
def __init__(self, *args, **kwargs):
if not args:
kwargs.setdefault('usage', '')
kwargs.setdefault('quiet', 1)
OptionsManagerMixIn.__init__(self, *args, **kwargs)
OptionsProviderMixIn.__init__(self)
if not getattr(self, 'option_groups', None):
self.option_groups = []
for option, optdict in self.options:
try:
gdef = (optdict['group'].upper(), '')
except KeyError:
continue
if not gdef in self.option_groups:
self.option_groups.append(gdef)
self.register_options_provider(self, own_group=False)
def register_options(self, options):
"""add some options to the configuration"""
options_by_group = {}
for optname, optdict in options:
options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict))
for group, options in options_by_group.items():
self.add_option_group(group, None, options, self)
self.options += tuple(options)
def load_defaults(self):
OptionsProviderMixIn.load_defaults(self)
def __iter__(self):
return iter(self.config.__dict__.iteritems())
def __getitem__(self, key):
try:
return getattr(self.config, self.option_attrname(key))
except (optik_ext.OptionValueError, AttributeError):
raise KeyError(key)
def __setitem__(self, key, value):
self.set_option(key, value)
def get(self, key, default=None):
try:
return getattr(self.config, self.option_attrname(key))
except (OptionError, AttributeError):
return default
class Configuration(ConfigurationMixIn):
"""class for simple configurations which don't need the
manager / providers model and prefer delegation to inheritance
configuration values are accessible through a dict like interface
"""
def __init__(self, config_file=None, options=None, name=None,
usage=None, doc=None, version=None):
if options is not None:
self.options = options
if name is not None:
self.name = name
if doc is not None:
self.__doc__ = doc
super(Configuration, self).__init__(config_file=config_file, usage=usage, version=version)
class OptionsManager2ConfigurationAdapter(object):
"""Adapt an option manager to behave like a
`logilab.common.configuration.Configuration` instance
"""
def __init__(self, provider):
self.config = provider
def __getattr__(self, key):
return getattr(self.config, key)
def __getitem__(self, key):
provider = self.config._all_options[key]
try:
return getattr(provider.config, provider.option_attrname(key))
except AttributeError:
raise KeyError(key)
def __setitem__(self, key, value):
self.config.global_set_option(self.config.option_attrname(key), value)
def get(self, key, default=None):
provider = self.config._all_options[key]
try:
return getattr(provider.config, provider.option_attrname(key))
except AttributeError:
return default
# other functions ##############################################################
def read_old_config(newconfig, changes, configfile):
"""initialize newconfig from a deprecated configuration file
possible changes:
* ('renamed', oldname, newname)
* ('moved', option, oldgroup, newgroup)
* ('typechanged', option, oldtype, newvalue)
"""
# build an index of changes
changesindex = {}
for action in changes:
if action[0] == 'moved':
option, oldgroup, newgroup = action[1:]
changesindex.setdefault(option, []).append((action[0], oldgroup, newgroup))
continue
if action[0] == 'renamed':
oldname, newname = action[1:]
changesindex.setdefault(newname, []).append((action[0], oldname))
continue
if action[0] == 'typechanged':
option, oldtype, newvalue = action[1:]
changesindex.setdefault(option, []).append((action[0], oldtype, newvalue))
continue
if action[1] in ('added', 'removed'):
continue # nothing to do here
raise Exception('unknown change %s' % action[0])
# build a config object able to read the old config
options = []
for optname, optdef in newconfig.options:
for action in changesindex.pop(optname, ()):
if action[0] == 'moved':
oldgroup, newgroup = action[1:]
optdef = optdef.copy()
optdef['group'] = oldgroup
elif action[0] == 'renamed':
optname = action[1]
elif action[0] == 'typechanged':
oldtype = action[1]
optdef = optdef.copy()
optdef['type'] = oldtype
options.append((optname, optdef))
if changesindex:
raise Exception('unapplied changes: %s' % changesindex)
oldconfig = Configuration(options=options, name=newconfig.name)
# read the old config
oldconfig.load_file_configuration(configfile)
# apply values reverting changes
changes.reverse()
done = set()
for action in changes:
if action[0] == 'renamed':
oldname, newname = action[1:]
newconfig[newname] = oldconfig[oldname]
done.add(newname)
elif action[0] == 'typechanged':
optname, oldtype, newvalue = action[1:]
newconfig[optname] = newvalue
done.add(optname)
for optname, optdef in newconfig.options:
if optdef.get('type') and not optname in done:
newconfig.set_option(optname, oldconfig[optname], optdict=optdef)
def merge_options(options, optgroup=None):
"""preprocess a list of options and remove duplicates, returning a new list
(tuple actually) of options.
Options dictionaries are copied to avoid later side-effect. Also, if
`otpgroup` argument is specified, ensure all options are in the given group.
"""
alloptions = {}
options = list(options)
for i in range(len(options)-1, -1, -1):
optname, optdict = options[i]
if optname in alloptions:
options.pop(i)
alloptions[optname].update(optdict)
else:
optdict = optdict.copy()
options[i] = (optname, optdict)
alloptions[optname] = optdict
if optgroup is not None:
alloptions[optname]['group'] = optgroup
return tuple(options)
| bsd-3-clause |
jagguli/intellij-community | python/helpers/python-skeletons/re.py | 45 | 7418 | """Skeleton for 're' stdlib module."""
def compile(pattern, flags=0):
"""Compile a regular expression pattern, returning a pattern object.
:type pattern: bytes | unicode
:type flags: int
:rtype: __Regex
"""
pass
def search(pattern, string, flags=0):
"""Scan through string looking for a match, and return a corresponding
match instance. Return None if no position in the string matches.
:type pattern: bytes | unicode | __Regex
:type string: T <= bytes | unicode
:type flags: int
:rtype: __Match[T] | None
"""
pass
def match(pattern, string, flags=0):
"""Matches zero or more characters at the beginning of the string.
:type pattern: bytes | unicode | __Regex
:type string: T <= bytes | unicode
:type flags: int
:rtype: __Match[T] | None
"""
pass
def split(pattern, string, maxsplit=0, flags=0):
"""Split string by the occurrences of pattern.
:type pattern: bytes | unicode | __Regex
:type string: T <= bytes | unicode
:type maxsplit: int
:type flags: int
:rtype: list[T]
"""
pass
def findall(pattern, string, flags=0):
"""Return a list of all non-overlapping matches of pattern in string.
:type pattern: bytes | unicode | __Regex
:type string: T <= bytes | unicode
:type flags: int
:rtype: list[T]
"""
pass
def finditer(pattern, string, flags=0):
"""Return an iterator over all non-overlapping matches for the pattern in
string. For each match, the iterator returns a match object.
:type pattern: bytes | unicode | __Regex
:type string: T <= bytes | unicode
:type flags: int
:rtype: collections.Iterable[__Match[T]]
"""
pass
def sub(pattern, repl, string, count=0, flags=0):
"""Return the string obtained by replacing the leftmost non-overlapping
occurrences of pattern in string by the replacement repl.
:type pattern: bytes | unicode | __Regex
:type repl: bytes | unicode | collections.Callable
:type string: T <= bytes | unicode
:type count: int
:type flags: int
:rtype: T
"""
pass
def subn(pattern, repl, string, count=0, flags=0):
"""Return the tuple (new_string, number_of_subs_made) found by replacing
the leftmost non-overlapping occurrences of pattern with the
replacement repl.
:type pattern: bytes | unicode | __Regex
:type repl: bytes | unicode | collections.Callable
:type string: T <= bytes | unicode
:type count: int
:type flags: int
:rtype: (T, int)
"""
pass
def escape(string):
"""Escape all the characters in pattern except ASCII letters and numbers.
:type string: T <= bytes | unicode
:type: T
"""
pass
class __Regex(object):
"""Mock class for a regular expression pattern object."""
def __init__(self, flags, groups, groupindex, pattern):
"""Create a new pattern object.
:type flags: int
:type groups: int
:type groupindex: dict[bytes | unicode, int]
:type pattern: bytes | unicode
"""
self.flags = flags
self.groups = groups
self.groupindex = groupindex
self.pattern = pattern
def search(self, string, pos=0, endpos=-1):
"""Scan through string looking for a match, and return a corresponding
match instance. Return None if no position in the string matches.
:type string: T <= bytes | unicode
:type pos: int
:type endpos: int
:rtype: __Match[T] | None
"""
pass
def match(self, string, pos=0, endpos=-1):
"""Matches zero | more characters at the beginning of the string.
:type string: T <= bytes | unicode
:type pos: int
:type endpos: int
:rtype: __Match[T] | None
"""
pass
def split(self, string, maxsplit=0):
"""Split string by the occurrences of pattern.
:type string: T <= bytes | unicode
:type maxsplit: int
:rtype: list[T]
"""
pass
def findall(self, string, pos=0, endpos=-1):
"""Return a list of all non-overlapping matches of pattern in string.
:type string: T <= bytes | unicode
:type pos: int
:type endpos: int
:rtype: list[T]
"""
pass
def finditer(self, string, pos=0, endpos=-1):
"""Return an iterator over all non-overlapping matches for the
pattern in string. For each match, the iterator returns a
match object.
:type string: T <= bytes | unicode
:type pos: int
:type endpos: int
:rtype: collections.Iterable[__Match[T]]
"""
pass
def sub(self, repl, string, count=0):
"""Return the string obtained by replacing the leftmost non-overlapping
occurrences of pattern in string by the replacement repl.
:type repl: bytes | unicode | collections.Callable
:type string: T <= bytes | unicode
:type count: int
:rtype: T
"""
pass
def subn(self, repl, string, count=0):
"""Return the tuple (new_string, number_of_subs_made) found by replacing
the leftmost non-overlapping occurrences of pattern with the
replacement repl.
:type repl: bytes | unicode | collections.Callable
:type string: T <= bytes | unicode
:type count: int
:rtype: (T, int)
"""
pass
class __Match(object):
"""Mock class for a match object."""
def __init__(self, pos, endpos, lastindex, lastgroup, re, string):
"""Create a new match object.
:type pos: int
:type endpos: int
:type lastindex: int | None
:type lastgroup: int | bytes | unicode | None
:type re: __Regex
:type string: bytes | unicode
:rtype: __Match[T]
"""
self.pos = pos
self.endpos = endpos
self.lastindex = lastindex
self.lastgroup = lastgroup
self.re = re
self.string = string
def expand(self, template):
"""Return the string obtained by doing backslash substitution on the
template string template.
:type template: T
:rtype: T
"""
pass
def group(self, *args):
"""Return one or more subgroups of the match.
:rtype: T | tuple
"""
pass
def groups(self, default=None):
"""Return a tuple containing all the subgroups of the match, from 1 up
to however many groups are in the pattern.
:rtype: tuple
"""
pass
def groupdict(self, default=None):
"""Return a dictionary containing all the named subgroups of the match,
keyed by the subgroup name.
:rtype: dict[bytes | unicode, T]
"""
pass
def start(self, group=0):
"""Return the index of the start of the substring matched by group.
:type group: int | bytes | unicode
:rtype: int
"""
pass
def end(self, group=0):
"""Return the index of the end of the substring matched by group.
:type group: int | bytes | unicode
:rtype: int
"""
pass
def span(self, group=0):
"""Return a 2-tuple (start, end) for the substring matched by group.
:type group: int | bytes | unicode
:rtype: (int, int)
"""
pass
| apache-2.0 |
honnibal/spaCy | examples/information_extraction/parse_subtrees.py | 1 | 2585 | #!/usr/bin/env python
# coding: utf8
"""This example shows how to navigate the parse tree including subtrees
attached to a word.
Based on issue #252:
"In the documents and tutorials the main thing I haven't found is
examples on how to break sentences down into small sub thoughts/chunks. The
noun_chunks is handy, but having examples on using the token.head to find small
(near-complete) sentence chunks would be neat. Lets take the example sentence:
"displaCy uses CSS and JavaScript to show you how computers understand language"
This sentence has two main parts (XCOMP & CCOMP) according to the breakdown:
[displaCy] uses CSS and Javascript [to + show]
show you how computers understand [language]
I'm assuming that we can use the token.head to build these groups."
Compatible with: spaCy v2.0.0+
Last tested with: v2.1.0
"""
from __future__ import unicode_literals, print_function
import plac
import spacy
@plac.annotations(model=("Model to load", "positional", None, str))
def main(model="en_core_web_sm"):
nlp = spacy.load(model)
print("Loaded model '%s'" % model)
doc = nlp(
"displaCy uses CSS and JavaScript to show you how computers "
"understand language"
)
# The easiest way is to find the head of the subtree you want, and then use
# the `.subtree`, `.children`, `.lefts` and `.rights` iterators. `.subtree`
# is the one that does what you're asking for most directly:
for word in doc:
if word.dep_ in ("xcomp", "ccomp"):
print("".join(w.text_with_ws for w in word.subtree))
# It'd probably be better for `word.subtree` to return a `Span` object
# instead of a generator over the tokens. If you want the `Span` you can
# get it via the `.right_edge` and `.left_edge` properties. The `Span`
# object is nice because you can easily get a vector, merge it, etc.
for word in doc:
if word.dep_ in ("xcomp", "ccomp"):
subtree_span = doc[word.left_edge.i : word.right_edge.i + 1]
print(subtree_span.text, "|", subtree_span.root.text)
# You might also want to select a head, and then select a start and end
# position by walking along its children. You could then take the
# `.left_edge` and `.right_edge` of those tokens, and use it to calculate
# a span.
if __name__ == "__main__":
plac.call(main)
# Expected output:
# to show you how computers understand language
# how computers understand language
# to show you how computers understand language | show
# how computers understand language | understand
| mit |
geary/claslite | web/app/lib/dist/blinker/_saferef.py | 174 | 9223 | # extracted from Louie, http://pylouie.org/
# updated for Python 3
#
# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher,
# Matthew R. Scott
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the <ORGANIZATION> nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""Refactored 'safe reference from dispatcher.py"""
import operator
import sys
import traceback
import weakref
try:
callable
except NameError:
def callable(object):
return hasattr(object, '__call__')
if sys.version_info < (3,):
get_self = operator.attrgetter('im_self')
get_func = operator.attrgetter('im_func')
else:
get_self = operator.attrgetter('__self__')
get_func = operator.attrgetter('__func__')
def safe_ref(target, on_delete=None):
"""Return a *safe* weak reference to a callable target.
- ``target``: The object to be weakly referenced, if it's a bound
method reference, will create a BoundMethodWeakref, otherwise
creates a simple weakref.
- ``on_delete``: If provided, will have a hard reference stored to
the callable to be called after the safe reference goes out of
scope with the reference object, (either a weakref or a
BoundMethodWeakref) as argument.
"""
try:
im_self = get_self(target)
except AttributeError:
if callable(on_delete):
return weakref.ref(target, on_delete)
else:
return weakref.ref(target)
else:
if im_self is not None:
# Turn a bound method into a BoundMethodWeakref instance.
# Keep track of these instances for lookup by disconnect().
assert hasattr(target, 'im_func') or hasattr(target, '__func__'), (
"safe_ref target %r has im_self, but no im_func, "
"don't know how to create reference" % target)
reference = BoundMethodWeakref(target=target, on_delete=on_delete)
return reference
class BoundMethodWeakref(object):
"""'Safe' and reusable weak references to instance methods.
BoundMethodWeakref objects provide a mechanism for referencing a
bound method without requiring that the method object itself
(which is normally a transient object) is kept alive. Instead,
the BoundMethodWeakref object keeps weak references to both the
object and the function which together define the instance method.
Attributes:
- ``key``: The identity key for the reference, calculated by the
class's calculate_key method applied to the target instance method.
- ``deletion_methods``: Sequence of callable objects taking single
argument, a reference to this object which will be called when
*either* the target object or target function is garbage
collected (i.e. when this object becomes invalid). These are
specified as the on_delete parameters of safe_ref calls.
- ``weak_self``: Weak reference to the target object.
- ``weak_func``: Weak reference to the target function.
Class Attributes:
- ``_all_instances``: Class attribute pointing to all live
BoundMethodWeakref objects indexed by the class's
calculate_key(target) method applied to the target objects.
This weak value dictionary is used to short-circuit creation so
that multiple references to the same (object, function) pair
produce the same BoundMethodWeakref instance.
"""
_all_instances = weakref.WeakValueDictionary()
def __new__(cls, target, on_delete=None, *arguments, **named):
"""Create new instance or return current instance.
Basically this method of construction allows us to
short-circuit creation of references to already- referenced
instance methods. The key corresponding to the target is
calculated, and if there is already an existing reference,
that is returned, with its deletion_methods attribute updated.
Otherwise the new instance is created and registered in the
table of already-referenced methods.
"""
key = cls.calculate_key(target)
current = cls._all_instances.get(key)
if current is not None:
current.deletion_methods.append(on_delete)
return current
else:
base = super(BoundMethodWeakref, cls).__new__(cls)
cls._all_instances[key] = base
base.__init__(target, on_delete, *arguments, **named)
return base
def __init__(self, target, on_delete=None):
"""Return a weak-reference-like instance for a bound method.
- ``target``: The instance-method target for the weak reference,
must have im_self and im_func attributes and be
reconstructable via the following, which is true of built-in
instance methods::
target.im_func.__get__( target.im_self )
- ``on_delete``: Optional callback which will be called when
this weak reference ceases to be valid (i.e. either the
object or the function is garbage collected). Should take a
single argument, which will be passed a pointer to this
object.
"""
def remove(weak, self=self):
"""Set self.isDead to True when method or instance is destroyed."""
methods = self.deletion_methods[:]
del self.deletion_methods[:]
try:
del self.__class__._all_instances[self.key]
except KeyError:
pass
for function in methods:
try:
if callable(function):
function(self)
except Exception:
try:
traceback.print_exc()
except AttributeError:
e = sys.exc_info()[1]
print ('Exception during saferef %s '
'cleanup function %s: %s' % (self, function, e))
self.deletion_methods = [on_delete]
self.key = self.calculate_key(target)
im_self = get_self(target)
im_func = get_func(target)
self.weak_self = weakref.ref(im_self, remove)
self.weak_func = weakref.ref(im_func, remove)
self.self_name = str(im_self)
self.func_name = str(im_func.__name__)
def calculate_key(cls, target):
"""Calculate the reference key for this reference.
Currently this is a two-tuple of the id()'s of the target
object and the target function respectively.
"""
return (id(get_self(target)), id(get_func(target)))
calculate_key = classmethod(calculate_key)
def __str__(self):
"""Give a friendly representation of the object."""
return "%s(%s.%s)" % (
self.__class__.__name__,
self.self_name,
self.func_name,
)
__repr__ = __str__
def __nonzero__(self):
"""Whether we are still a valid reference."""
return self() is not None
def __cmp__(self, other):
"""Compare with another reference."""
if not isinstance(other, self.__class__):
return cmp(self.__class__, type(other))
return cmp(self.key, other.key)
def __call__(self):
"""Return a strong reference to the bound method.
If the target cannot be retrieved, then will return None,
otherwise returns a bound instance method for our object and
function.
Note: You may call this method any number of times, as it does
not invalidate the reference.
"""
target = self.weak_self()
if target is not None:
function = self.weak_func()
if function is not None:
return function.__get__(target)
return None
| unlicense |
lthurlow/Network-Grapher | proj/external/networkx-1.7/build/lib.linux-i686-2.7/networkx/algorithms/bipartite/tests/test_centrality.py | 96 | 6127 | from nose.tools import *
import networkx as nx
from networkx.algorithms import bipartite
class TestBipartiteCentrality(object):
def setUp(self):
self.P4 = nx.path_graph(4)
self.K3 = nx.complete_bipartite_graph(3,3)
self.C4 = nx.cycle_graph(4)
self.davis = nx.davis_southern_women_graph()
self.top_nodes = [n for n,d in self.davis.nodes(data=True)
if d['bipartite']==0]
def test_degree_centrality(self):
d = bipartite.degree_centrality(self.P4, [1,3])
answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5}
assert_equal(d, answer)
d = bipartite.degree_centrality(self.K3, [0,1,2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
assert_equal(d, answer)
d = bipartite.degree_centrality(self.C4, [0,2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
assert_equal(d,answer)
def test_betweenness_centrality(self):
c = bipartite.betweenness_centrality(self.P4, [1,3])
answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0}
assert_equal(c, answer)
c = bipartite.betweenness_centrality(self.K3, [0,1,2])
answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125}
assert_equal(c, answer)
c = bipartite.betweenness_centrality(self.C4, [0,2])
answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
assert_equal(c, answer)
def test_closeness_centrality(self):
c = bipartite.closeness_centrality(self.P4, [1,3])
answer = {0: 2.0/3, 1: 1.0, 2: 1.0, 3:2.0/3}
assert_equal(c, answer)
c = bipartite.closeness_centrality(self.K3, [0,1,2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
assert_equal(c, answer)
c = bipartite.closeness_centrality(self.C4, [0,2])
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
assert_equal(c, answer)
G = nx.Graph()
G.add_node(0)
G.add_node(1)
c = bipartite.closeness_centrality(G, [0])
assert_equal(c, {1: 0.0})
c = bipartite.closeness_centrality(G, [1])
assert_equal(c, {1: 0.0})
def test_davis_degree_centrality(self):
G = self.davis
deg = bipartite.degree_centrality(G, self.top_nodes)
answer = {'E8':0.78,
'E9':0.67,
'E7':0.56,
'Nora Fayette':0.57,
'Evelyn Jefferson':0.57,
'Theresa Anderson':0.57,
'E6':0.44,
'Sylvia Avondale':0.50,
'Laura Mandeville':0.50,
'Brenda Rogers':0.50,
'Katherina Rogers':0.43,
'E5':0.44,
'Helen Lloyd':0.36,
'E3':0.33,
'Ruth DeSand':0.29,
'Verne Sanderson':0.29,
'E12':0.33,
'Myra Liddel':0.29,
'E11':0.22,
'Eleanor Nye':0.29,
'Frances Anderson':0.29,
'Pearl Oglethorpe':0.21,
'E4':0.22,
'Charlotte McDowd':0.29,
'E10':0.28,
'Olivia Carleton':0.14,
'Flora Price':0.14,
'E2':0.17,
'E1':0.17,
'Dorothy Murchison':0.14,
'E13':0.17,
'E14':0.17}
for node, value in answer.items():
assert_almost_equal(value, deg[node], places=2)
def test_davis_betweenness_centrality(self):
G = self.davis
bet = bipartite.betweenness_centrality(G, self.top_nodes)
answer = {'E8':0.24,
'E9':0.23,
'E7':0.13,
'Nora Fayette':0.11,
'Evelyn Jefferson':0.10,
'Theresa Anderson':0.09,
'E6':0.07,
'Sylvia Avondale':0.07,
'Laura Mandeville':0.05,
'Brenda Rogers':0.05,
'Katherina Rogers':0.05,
'E5':0.04,
'Helen Lloyd':0.04,
'E3':0.02,
'Ruth DeSand':0.02,
'Verne Sanderson':0.02,
'E12':0.02,
'Myra Liddel':0.02,
'E11':0.02,
'Eleanor Nye':0.01,
'Frances Anderson':0.01,
'Pearl Oglethorpe':0.01,
'E4':0.01,
'Charlotte McDowd':0.01,
'E10':0.01,
'Olivia Carleton':0.01,
'Flora Price':0.01,
'E2':0.00,
'E1':0.00,
'Dorothy Murchison':0.00,
'E13':0.00,
'E14':0.00}
for node, value in answer.items():
assert_almost_equal(value, bet[node], places=2)
def test_davis_closeness_centrality(self):
G = self.davis
clos = bipartite.closeness_centrality(G, self.top_nodes)
answer = {'E8':0.85,
'E9':0.79,
'E7':0.73,
'Nora Fayette':0.80,
'Evelyn Jefferson':0.80,
'Theresa Anderson':0.80,
'E6':0.69,
'Sylvia Avondale':0.77,
'Laura Mandeville':0.73,
'Brenda Rogers':0.73,
'Katherina Rogers':0.73,
'E5':0.59,
'Helen Lloyd':0.73,
'E3':0.56,
'Ruth DeSand':0.71,
'Verne Sanderson':0.71,
'E12':0.56,
'Myra Liddel':0.69,
'E11':0.54,
'Eleanor Nye':0.67,
'Frances Anderson':0.67,
'Pearl Oglethorpe':0.67,
'E4':0.54,
'Charlotte McDowd':0.60,
'E10':0.55,
'Olivia Carleton':0.59,
'Flora Price':0.59,
'E2':0.52,
'E1':0.52,
'Dorothy Murchison':0.65,
'E13':0.52,
'E14':0.52}
for node, value in answer.items():
assert_almost_equal(value, clos[node], places=2)
| mit |
bsmedberg/socorro | socorro/processor/processed_transform_rules.py | 1 | 9719 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""""
these are the rules that transform a raw crash into a processed crash
"""
from socorro.lib.ver_tools import normalize
from socorro.lib.util import DotDict
from sys import maxint
#==============================================================================
class ProcessedTransformRule(object):
"""the base class for Support Rules. It provides the framework for the
rules 'predicate', 'action', and 'version' as well as utilites to help
rules do their jobs."""
#--------------------------------------------------------------------------
def predicate(self, raw_crash, processed_crash, processor):
"""the default predicate for processed_transform invokes any derivied
_predicate function, trapping any exceptions raised in the process. We
are obligated to catch these exceptions to give subsequent rules the
opportunity act. An error during the predicate application is a
failure of the rule, not a failure of the classification system itself
"""
try:
return self._predicate(raw_crash, processed_crash, processor)
except Exception, x:
processor.config.logger.debug(
'processed_transform: %s predicate rejection - consideration '
'of %s failed because of "%s"',
self.__class__,
raw_crash.get('uuid', 'unknown uuid'),
x,
exc_info=True
)
return False
#--------------------------------------------------------------------------
def _predicate(self, raw_crash, processed_crash, processor):
""""The default processed_transform predicate just returns True. We
want all the processed_transform rules to run.
parameters:
raw_crash - a mapping representing the raw crash data originally
submitted by the client
processed_crash - the ultimate result of the processor, this is the
analyzed version of a crash. It contains the
output of the MDSW program for each of the dumps
within the crash.
processor - a reference to the processor object that is assigned
to working on the current crash. This object contains
resources that might be useful to a classifier rule.
'processor.config' is the configuration for the
processor in which database connection paramaters can
be found. 'processor.config.logger' is useful for any
logging of debug information.
'processor.c_signature_tool' or
'processor.java_signature_tool' contain utilities that
might be useful during classification.
returns:
True - this rule should be applied
False - this rule should not be applied
"""
return True
#--------------------------------------------------------------------------
def action(self, raw_crash, processed_crash, processor):
"""the default action for processed_transform invokes any derivied
_action function, trapping any exceptions raised in the process. We
are obligated to catch these exceptions to give subsequent rules the
opportunity act and perhaps (mitigate the error). An error during the
action application is a failure of the rule, not a failure of the
classification system itself."""
try:
return self._action(raw_crash, processed_crash, processor)
except KeyError, x:
processor.config.logger.debug(
'processed_transform: %s action failure - %s failed because of '
'"%s"',
self.__class__,
raw_crash.get('uuid', 'unknown uuid'),
x,
)
except Exception, x:
processor.config.logger.debug(
'processed_transform: %s action failure - %s failed because of '
'"%s"',
self.__class__,
raw_crash.get('uuid', 'unknown uuid'),
x,
exc_info=True
)
return False
#--------------------------------------------------------------------------
def _action(self, raw_crash, processed_crash, processor):
"""Rules derived from this base class ought to override this method
with an actual classification rule. Successful application of this
method should include a call to '_add_classification'.
parameters:
raw_crash - a mapping representing the raw crash data originally
submitted by the client
processed_crash - the ultimate result of the processor, this is the
analized version of a crash. It contains the
output of the MDSW program for each of the dumps
within the crash.
processor - a reference to the processor object that is assigned
to working on the current crash. This object contains
resources that might be useful to a classifier rule.
'processor.config' is the configuration for the
processor in which database connection paramaters can
be found. 'processor.config.logger' is useful for any
logging of debug information.
'processor.c_signature_tool' or
'processor.java_signature_tool' contain utilities that
might be useful during classification.
returns:
True - this rule was applied successfully and no further rules
should be applied
False - this rule did not succeed and further rules should be
tried
"""
return True
#--------------------------------------------------------------------------
def version(self):
"""This method should be overridden in a base class."""
return '0.0'
#==============================================================================
class OOMSignature(ProcessedTransformRule):
"""To satisfy Bug 1007530, this rule will modify the signature to
tag OOM (out of memory) crashes"""
signature_fragments = (
'NS_ABORT_OOM',
'mozalloc_handle_oom',
'CrashAtUnhandlableOOM'
)
#--------------------------------------------------------------------------
def version(self):
return '1.0'
#--------------------------------------------------------------------------
def _predicate(self, raw_crash, processed_crash, processor):
if 'OOMAllocationSize' in raw_crash:
return True
signature = processed_crash.signature
for a_signature_fragment in self.signature_fragments:
if a_signature_fragment in signature:
return True
return False
#--------------------------------------------------------------------------
def _action(self, raw_crash, processed_crash, processor):
processed_crash.original_signature = processed_crash.signature
try:
size = int(raw_crash.OOMAllocationSize)
except (TypeError, AttributeError, KeyError):
processed_crash.signature = (
"OOM | unknown | " + processed_crash.signature
)
return True
if size <= 262144: # 256K
processed_crash.signature = "OOM | small"
else:
processed_crash.signature = (
"OOM | large | " + processed_crash.signature
)
return True
#==============================================================================
class SigTrunc(ProcessedTransformRule):
"""ensure that the signature is never longer than 255 characters"""
#--------------------------------------------------------------------------
def version(self):
return '1.0'
#--------------------------------------------------------------------------
def _predicate(self, raw_crash, processed_crash, processor):
return len(processed_crash.signature) > 255
#--------------------------------------------------------------------------
def _action(self, raw_crash, processed_crash, processor):
processed_crash.signature = "%s..." % processed_crash.signature[:252]
return True
#------------------------------------------------------------------------------
# the following tuple of tuples is a structure for loading rules into the
# TransformRules system. The tuples take the form:
# predicate_function, predicate_args, predicate_kwargs,
# action_function, action_args, action_kwargs.
#
# The args and kwargs components are additional information that a predicate
# or an action might need to have to do its job. Providing values for args
# or kwargs essentially acts in a manner similar to functools.partial.
# When the predicate or action functions are invoked, these args and kwags
# values will be passed into the function along with the raw_crash,
# processed_crash and processor objects.
default_rules = (
(OOMSignature, (), {}, OOMSignature, (), {}),
(SigTrunc, (), {}, SigTrunc, (), {}),
)
| mpl-2.0 |
horazont/aioxmpp | aioxmpp/roster/xso.py | 1 | 4970 | ########################################################################
# File name: xso.py
# This file is part of: aioxmpp
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import aioxmpp.stanza as stanza
import aioxmpp.nonza as nonza
import aioxmpp.xso as xso
from aioxmpp.utils import namespaces
namespaces.rfc6121_roster = "jabber:iq:roster"
namespaces.rfc6121_roster_versioning = "urn:xmpp:features:rosterver"
class Group(xso.XSO):
"""
A group declaration for a contact in a roster.
.. attribute:: name
The name of the group.
"""
TAG = (namespaces.rfc6121_roster, "group")
name = xso.Text(default=None)
def __init__(self, *, name=None):
super().__init__()
self.name = name
class Item(xso.XSO):
"""
A contact item in a roster.
.. attribute:: jid
The bare :class:`~aioxmpp.JID` of the contact.
.. attribute:: name
The optional display name of the contact.
.. attribute:: groups
A :class:`~aioxmpp.xso.model.XSOList` of :class:`Group` instances which
describe the roster groups in which the contact is.
The following attributes represent the subscription status of the
contact. A client **must not** set these attributes when sending roster
items to the server. To change subscription status, use presence stanzas of
the respective type. The only exception is a :attr:`subscription` value of
``"remove"``, which is used to remove an entry from the roster.
.. attribute:: subscription
Primary subscription status, one of ``"none"`` (the default), ``"to"``,
``"from"`` and ``"both"``.
In addition, :attr:`subscription` can be set to ``"remove"`` to remove
an item from the roster during a roster set. Removing an entry from the
roster will also cancel any presence subscriptions from and to that
entries entity.
.. attribute:: approved
Whether the subscription has been pre-approved by the owning entity.
.. attribute:: ask
Subscription sub-states, one of ``"subscribe"`` and :data:`None`.
.. note::
Do not confuse this class with :class:`~aioxmpp.roster.Item`.
"""
TAG = (namespaces.rfc6121_roster, "item")
approved = xso.Attr(
"approved",
type_=xso.Bool(),
default=False,
)
ask = xso.Attr(
"ask",
validator=xso.RestrictToSet({
None,
"subscribe",
}),
validate=xso.ValidateMode.ALWAYS,
default=None,
)
jid = xso.Attr(
"jid",
type_=xso.JID(),
)
name = xso.Attr(
"name",
default=None,
)
subscription = xso.Attr(
"subscription",
validator=xso.RestrictToSet({
"none",
"to",
"from",
"both",
"remove",
}),
validate=xso.ValidateMode.ALWAYS,
default="none",
)
groups = xso.ChildList([Group])
def __init__(self, jid, *,
name=None,
groups=(),
subscription="none",
approved=False,
ask=None):
super().__init__()
if jid is not None:
self.jid = jid
self.name = name
self.groups.extend(groups)
self.subscription = subscription
self.approved = approved
self.ask = ask
@stanza.IQ.as_payload_class
class Query(xso.XSO):
"""
A query which fetches data from the roster or sends new items to the
roster.
.. attribute:: ver
The version of the roster, if any. See the RFC for the detailed
semantics.
.. attribute:: items
The items in the roster query.
"""
TAG = (namespaces.rfc6121_roster, "query")
ver = xso.Attr(
"ver",
default=None
)
items = xso.ChildList([Item])
def __init__(self, *, ver=None, items=()):
super().__init__()
self.ver = ver
self.items.extend(items)
@nonza.StreamFeatures.as_feature_class
class RosterVersioningFeature(xso.XSO):
"""
Roster versioning feature.
.. seealso::
:class:`aioxmpp.nonza.StreamFeatures`
"""
TAG = (namespaces.rfc6121_roster_versioning, "ver")
| lgpl-3.0 |
tianzhidao28/pyspider | tests/test_result_worker.py | 72 | 2770 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-11-11 20:52:53
import os
import time
import unittest2 as unittest
import logging.config
logging.config.fileConfig("pyspider/logging.conf")
import shutil
from multiprocessing import Queue
from pyspider.database.sqlite import resultdb
from pyspider.result.result_worker import ResultWorker
from pyspider.libs.utils import run_in_thread
class TestProcessor(unittest.TestCase):
resultdb_path = './data/tests/result.db'
@classmethod
def setUpClass(self):
shutil.rmtree('./data/tests/', ignore_errors=True)
os.makedirs('./data/tests/')
def get_resultdb():
return resultdb.ResultDB(self.resultdb_path)
self.resultdb = get_resultdb()
self.inqueue = Queue(10)
def run_result_worker():
self.result_worker = ResultWorker(get_resultdb(), self.inqueue)
self.result_worker.run()
self.process = run_in_thread(run_result_worker)
time.sleep(1)
@classmethod
def tearDownClass(self):
if self.process.is_alive():
self.result_worker.quit()
self.process.join(2)
assert not self.process.is_alive()
shutil.rmtree('./data/tests/', ignore_errors=True)
def test_10_bad_result(self):
self.inqueue.put(({'project': 'test_project'}, {}))
self.resultdb._list_project()
self.assertEqual(len(self.resultdb.projects), 0)
self.assertEqual(self.resultdb.count('test_project'), 0)
def test_20_insert_result(self):
data = {
'a': 'b'
}
self.inqueue.put(({
'project': 'test_project',
'taskid': 'id1',
'url': 'url1'
}, data))
time.sleep(0.5)
self.resultdb._list_project()
self.assertEqual(len(self.resultdb.projects), 1)
self.assertEqual(self.resultdb.count('test_project'), 1)
result = self.resultdb.get('test_project', 'id1')
self.assertEqual(result['result'], data)
def test_30_overwrite(self):
self.inqueue.put(({
'project': 'test_project',
'taskid': 'id1',
'url': 'url1'
}, "abc"))
time.sleep(0.1)
result = self.resultdb.get('test_project', 'id1')
self.assertEqual(result['result'], "abc")
def test_40_insert_list(self):
self.inqueue.put(({
'project': 'test_project',
'taskid': 'id2',
'url': 'url1'
}, ['a', 'b']))
time.sleep(0.1)
result = self.resultdb.get('test_project', 'id2')
self.assertEqual(result['result'], ['a', 'b'])
| apache-2.0 |
rlworkgroup/metaworld | metaworld/envs/mujoco/sawyer_xyz/v2/sawyer_sweep_into_goal_v2.py | 1 | 7416 | import numpy as np
from gym.spaces import Box
from scipy.spatial.transform import Rotation
from metaworld.envs import reward_utils
from metaworld.envs.asset_path_utils import full_v2_path_for
from metaworld.envs.mujoco.sawyer_xyz.sawyer_xyz_env import SawyerXYZEnv, _assert_task_is_set
class SawyerSweepIntoGoalEnvV2(SawyerXYZEnv):
OBJ_RADIUS = 0.02
def __init__(self):
hand_low = (-0.5, 0.40, 0.05)
hand_high = (0.5, 1, 0.5)
obj_low = (-0.1, 0.6, 0.02)
obj_high = (0.1, 0.7, 0.02)
goal_low = (-.001, 0.8399, 0.0199)
goal_high = (+.001, 0.8401, 0.0201)
super().__init__(
self.model_name,
hand_low=hand_low,
hand_high=hand_high,
)
self.init_config = {
'obj_init_pos':np.array([0., 0.6, 0.02]),
'obj_init_angle': 0.3,
'hand_init_pos': np.array([0., .6, .2]),
}
self.goal = np.array([0., 0.84, 0.02])
self.obj_init_pos = self.init_config['obj_init_pos']
self.obj_init_angle = self.init_config['obj_init_angle']
self.hand_init_pos = self.init_config['hand_init_pos']
self._random_reset_space = Box(
np.hstack((obj_low, goal_low)),
np.hstack((obj_high, goal_high)),
)
self.goal_space = Box(np.array(goal_low), np.array(goal_high))
@property
def model_name(self):
return full_v2_path_for('sawyer_xyz/sawyer_table_with_hole.xml')
@_assert_task_is_set
def evaluate_state(self, obs, action):
obj = obs[4:7]
(
reward,
tcp_to_obj,
tcp_opened,
target_to_obj,
object_grasped,
in_place
) = self.compute_reward(action, obs)
grasp_success = float(self.touching_main_object and (tcp_opened > 0))
info = {
'success': float(target_to_obj <= 0.05),
'near_object': float(tcp_to_obj <= 0.03),
'grasp_reward': object_grasped,
'grasp_success': grasp_success,
'in_place_reward': in_place,
'obj_to_target': target_to_obj,
'unscaled_reward': reward,
}
return reward, info
def _get_quat_objects(self):
return Rotation.from_matrix(
self.data.get_geom_xmat('objGeom')
).as_quat()
def _get_pos_objects(self):
return self.get_body_com('obj')
def reset_model(self):
self._reset_hand()
self._target_pos = self.goal.copy()
self.obj_init_pos = self.get_body_com('obj')
self.obj_init_angle = self.init_config['obj_init_angle']
self.objHeight = self.get_body_com('obj')[2]
if self.random_init:
goal_pos = self._get_state_rand_vec()
while np.linalg.norm(goal_pos[:2] - self._target_pos[:2]) < 0.15:
goal_pos = self._get_state_rand_vec()
self.obj_init_pos = np.concatenate((goal_pos[:2], [self.obj_init_pos[-1]]))
self._set_obj_xyz(self.obj_init_pos)
self.maxPushDist = np.linalg.norm(self.obj_init_pos[:2] - np.array(self._target_pos)[:2])
return self._get_obs()
def _gripper_caging_reward(self, action, obj_position, obj_radius):
pad_success_margin = 0.05
grip_success_margin = obj_radius + 0.005
x_z_success_margin = 0.01
tcp = self.tcp_center
left_pad = self.get_body_com('leftpad')
right_pad = self.get_body_com('rightpad')
delta_object_y_left_pad = left_pad[1] - obj_position[1]
delta_object_y_right_pad = obj_position[1] - right_pad[1]
right_caging_margin = abs(abs(obj_position[1] - self.init_right_pad[1]) - pad_success_margin)
left_caging_margin = abs(abs(obj_position[1] - self.init_left_pad[1]) - pad_success_margin)
right_caging = reward_utils.tolerance(delta_object_y_right_pad,
bounds=(obj_radius, pad_success_margin),
margin=right_caging_margin,
sigmoid='long_tail',
)
left_caging = reward_utils.tolerance(delta_object_y_left_pad,
bounds=(obj_radius, pad_success_margin),
margin=left_caging_margin,
sigmoid='long_tail',
)
right_gripping = reward_utils.tolerance(delta_object_y_right_pad,
bounds=(obj_radius, grip_success_margin),
margin=right_caging_margin,
sigmoid='long_tail',
)
left_gripping = reward_utils.tolerance(delta_object_y_left_pad,
bounds=(obj_radius, grip_success_margin),
margin=left_caging_margin,
sigmoid='long_tail',
)
assert right_caging >= 0 and right_caging <= 1
assert left_caging >= 0 and left_caging <= 1
y_caging = reward_utils.hamacher_product(right_caging, left_caging)
y_gripping = reward_utils.hamacher_product(right_gripping, left_gripping)
assert y_caging >= 0 and y_caging <= 1
tcp_xz = tcp + np.array([0., -tcp[1], 0.])
obj_position_x_z = np.copy(obj_position) + np.array([0., -obj_position[1], 0.])
tcp_obj_norm_x_z = np.linalg.norm(tcp_xz - obj_position_x_z, ord=2)
init_obj_x_z = self.obj_init_pos + np.array([0., -self.obj_init_pos[1], 0.])
init_tcp_x_z = self.init_tcp + np.array([0., -self.init_tcp[1], 0.])
tcp_obj_x_z_margin = np.linalg.norm(init_obj_x_z - init_tcp_x_z, ord=2) - x_z_success_margin
x_z_caging = reward_utils.tolerance(tcp_obj_norm_x_z,
bounds=(0, x_z_success_margin),
margin=tcp_obj_x_z_margin,
sigmoid='long_tail',)
assert right_caging >= 0 and right_caging <= 1
gripper_closed = min(max(0, action[-1]), 1)
assert gripper_closed >= 0 and gripper_closed <= 1
caging = reward_utils.hamacher_product(y_caging, x_z_caging)
assert caging >= 0 and caging <= 1
if caging > 0.95:
gripping = y_gripping
else:
gripping = 0.
assert gripping >= 0 and gripping <= 1
caging_and_gripping = (caging + gripping) / 2
assert caging_and_gripping >= 0 and caging_and_gripping <= 1
return caging_and_gripping
def compute_reward(self, action, obs):
_TARGET_RADIUS = 0.05
tcp = self.tcp_center
obj = obs[4:7]
tcp_opened = obs[3]
target = np.array([self._target_pos[0], self._target_pos[1], obj[2]])
obj_to_target = np.linalg.norm(obj - target)
tcp_to_obj = np.linalg.norm(obj - tcp)
in_place_margin = np.linalg.norm(self.obj_init_pos - target)
in_place = reward_utils.tolerance(obj_to_target,
bounds=(0, _TARGET_RADIUS),
margin=in_place_margin,
sigmoid='long_tail',)
object_grasped = self._gripper_caging_reward(action, obj, self.OBJ_RADIUS)
in_place_and_object_grasped = reward_utils.hamacher_product(object_grasped,
in_place)
reward = (2*object_grasped) + (6*in_place_and_object_grasped)
if obj_to_target < _TARGET_RADIUS:
reward = 10.
return [reward, tcp_to_obj, tcp_opened, obj_to_target, object_grasped, in_place]
| mit |
rosswhitfield/mantid | Framework/PythonInterface/test/python/mantid/api/FilePropertyTest.py | 3 | 2196 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
import unittest
from mantid.api import AlgorithmManager, FileProperty, FileAction, FrameworkManagerImpl
from mantid.kernel import Direction
class FilePropertyTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
FrameworkManagerImpl.Instance()
def test_constructor_with_name_and_default_and_action(self):
prop = FileProperty("LoadProperty", "", FileAction.Load)
self.assertNotEquals("", prop.isValid)
self.assertEqual(Direction.Input, prop.direction)
def test_constructor_with_name_and_default_and_action_and_exts_list(self):
prop = FileProperty("LoadProperty", "", FileAction.Load, ['.nxs', '.raw'])
self.assertNotEquals("", prop.isValid)
self.assertEqual(Direction.Input, prop.direction)
allowed = prop.allowedValues
self.assertTrue('.nxs' in allowed)
self.assertTrue('.raw' in allowed)
def test_constructor_with_name_and_default_and_action_and_single_ext(self):
prop = FileProperty("LoadProperty", "", FileAction.Load, '.nxs')
self.assertNotEquals("", prop.isValid)
self.assertEqual(Direction.Input, prop.direction)
allowed = prop.allowedValues
self.assertTrue('.nxs' in allowed)
def test_constructor_with_name_and_default_and_action_and_single_ext_and_direction(self):
prop = FileProperty("LoadProperty", "", FileAction.Load, ['.nxs'], Direction.InOut)
self.assertNotEquals("", prop.isValid)
self.assertEqual(Direction.InOut, prop.direction)
def test_alg_get_property_converts_to_this(self):
alg = AlgorithmManager.createUnmanaged("LoadRaw")
alg.initialize()
prop = alg.getProperty("Filename")
self.assertEqual(type(prop), FileProperty)
self.assertTrue('value' in dir(prop)) # Do we have a value method
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
aguirreeng/libeng | math/rotations.py | 1 | 2992 | import numpy
def obtain_matrix_from_quaternion(q: numpy.ndarray) -> numpy.ndarray:
if q.size != 4:
raise ValueError('Wrong number of elements on the array (q.size != 4)!')
m = numpy.zeros((3, 3), dtype=numpy.float64)
m[0, 0] = 1.0 - 2.0 * q[2] * q[2] - 2.0 * q[3] * q[3]
m[0, 1] = 2.0 * q[1] * q[2] - 2.0 * q[3] * q[0]
m[0, 2] = 2.0 * q[1] * q[3] + 2.0 * q[2] * q[0]
m[1, 0] = 2.0 * q[1] * q[2] + 2.0 * q[3] * q[0]
m[1, 1] = 1.0 - 2.0 * q[1] * q[1] - 2.0 * q[3] * q[3]
m[1, 2] = 2.0 * q[2] * q[3] - 2.0 * q[1] * q[0]
m[2, 0] = 2.0 * q[1] * q[3] - 2.0 * q[2] * q[0]
m[2, 1] = 2.0 * q[2] * q[3] + 2.0 * q[1] * q[0]
m[2, 2] = 1.0 - 2.0 * q[1] * q[1] - 2.0 * q[2] * q[2]
return m
def obtain_angle_vector_from_matrix(m: numpy.ndarray) -> tuple:
v = numpy.array([
m[2, 1] - m[1, 2],
m[0, 2] - m[2, 0],
m[1, 0] - m[0, 1]
], dtype=numpy.float64)
a = numpy.linalg.norm(v)
v /= a
a = numpy.arcsin(a / 2.0)
return (a, v)
def obtain_quaternion_from_angle_vector(a: float, v: numpy.ndarray) -> numpy.ndarray:
v_magnitude = numpy.linalg.norm(v)
q = numpy.array((1.0, 0.0, 0.0, 0.0), dtype=numpy.float64)
if v_magnitude > 0.0 and abs(a) > 0.0:
q = numpy.hstack((numpy.cos(a / 2.0), numpy.sin(a / 2.0) * v / v_magnitude))
return q
def calculate_hamilton_product(q1: numpy.ndarray, q2: numpy.ndarray) -> numpy.ndarray:
p = numpy.zeros((4), dtype=numpy.float64)
p[0] = q1[0] * q2[0] - q1[1] * q2[1] - q1[2] * q2[2] - q1[3] * q2[3]
p[1] = q1[0] * q2[1] + q1[1] * q2[0] + q1[2] * q2[3] - q1[3] * q2[2]
p[2] = q1[0] * q2[2] + q1[2] * q2[0] - q1[1] * q2[3] + q1[3] * q2[1]
p[3] = q1[0] * q2[3] + q1[3] * q2[0] + q1[1] * q2[2] - q1[2] * q2[1]
return p
def obtain_quaternion_conjugate(q: numpy.ndarray)->numpy.ndarray:
p = q.copy()
p[1:] *= -1
return p
def calculate_rotated_vector_by_quaternion(
v: numpy.ndarray,
q: numpy.ndarray
) -> numpy.ndarray:
p = numpy.hstack((0.0, v))
p = calculate_hamilton_product(
calculate_hamilton_product(q, p),
obtain_quaternion_conjugate(q)
)
return p[1:]
def calculate_rotation_matrix_from_bases(
b1: numpy.ndarray,
b2: numpy.ndarray,
b3: numpy.ndarray,
v1: numpy.ndarray,
v2: numpy.ndarray,
v3: numpy.ndarray,
) -> numpy.ndarray:
b1n = b1 / numpy.linalg.norm(b1)
b2n = b2 / numpy.linalg.norm(b2)
b3n = b3 / numpy.linalg.norm(b3)
v1n = v1 / numpy.linalg.norm(v1)
v2n = v2 / numpy.linalg.norm(v2)
v3n = v3 / numpy.linalg.norm(v3)
V = numpy.matmul(v1n.reshape(3, 1), b1n.reshape((1, 3))) \
+ numpy.matmul(v2n.reshape(3, 1), b2n.reshape((1, 3))) \
+ numpy.matmul(v3n.reshape(3, 1), b3n.reshape((1, 3)))
U, Q, W = numpy.linalg.svd(V)
R = numpy.matmul(U, W)
return R
| mit |
tensorflow/models | official/nlp/data/tagging_data_lib.py | 1 | 15488 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library to process data for tagging task such as NER/POS."""
import collections
import os
from absl import logging
import tensorflow as tf
from official.nlp.bert import tokenization
from official.nlp.data import classifier_data_lib
# A negative label id for the padding label, which will not contribute
# to loss/metrics in training.
_PADDING_LABEL_ID = -1
# The special unknown token, used to substitute a word which has too many
# subwords after tokenization.
_UNK_TOKEN = "[UNK]"
class InputExample(object):
"""A single training/test example for token classification."""
def __init__(self,
sentence_id,
sub_sentence_id=0,
words=None,
label_ids=None):
"""Constructs an InputExample."""
self.sentence_id = sentence_id
self.sub_sentence_id = sub_sentence_id
self.words = words if words else []
self.label_ids = label_ids if label_ids else []
def add_word_and_label_id(self, word, label_id):
"""Adds word and label_id pair in the example."""
self.words.append(word)
self.label_ids.append(label_id)
def _read_one_file(file_name, label_list):
"""Reads one file and returns a list of `InputExample` instances."""
lines = tf.io.gfile.GFile(file_name, "r").readlines()
examples = []
label_id_map = {label: i for i, label in enumerate(label_list)}
sentence_id = 0
example = InputExample(sentence_id=0)
for line in lines:
line = line.strip("\n")
if line:
# The format is: <token>\t<label> for train/dev set and <token> for test.
items = line.split("\t")
assert len(items) == 2 or len(items) == 1
token = items[0].strip()
# Assign a dummy label_id for test set
label_id = label_id_map[items[1].strip()] if len(items) == 2 else 0
example.add_word_and_label_id(token, label_id)
else:
# Empty line indicates a new sentence.
if example.words:
examples.append(example)
sentence_id += 1
example = InputExample(sentence_id=sentence_id)
if example.words:
examples.append(example)
return examples
class PanxProcessor(classifier_data_lib.DataProcessor):
"""Processor for the Panx data set."""
supported_languages = [
"ar", "he", "vi", "id", "jv", "ms", "tl", "eu", "ml", "ta", "te", "af",
"nl", "en", "de", "el", "bn", "hi", "mr", "ur", "fa", "fr", "it", "pt",
"es", "bg", "ru", "ja", "ka", "ko", "th", "sw", "yo", "my", "zh", "kk",
"tr", "et", "fi", "hu"
]
def __init__(self,
process_text_fn=tokenization.convert_to_unicode,
only_use_en_train=True,
only_use_en_dev=True):
"""See base class.
Args:
process_text_fn: See base class.
only_use_en_train: If True, only use english training data. Otherwise, use
training data from all languages.
only_use_en_dev: If True, only use english dev data. Otherwise, use dev
data from all languages.
"""
super(PanxProcessor, self).__init__(process_text_fn)
self.only_use_en_train = only_use_en_train
self.only_use_en_dev = only_use_en_dev
def get_train_examples(self, data_dir):
examples = _read_one_file(
os.path.join(data_dir, "train-en.tsv"), self.get_labels())
if not self.only_use_en_train:
for language in self.supported_languages:
if language == "en":
continue
examples.extend(
_read_one_file(
os.path.join(data_dir, f"train-{language}.tsv"),
self.get_labels()))
return examples
def get_dev_examples(self, data_dir):
examples = _read_one_file(
os.path.join(data_dir, "dev-en.tsv"), self.get_labels())
if not self.only_use_en_dev:
for language in self.supported_languages:
if language == "en":
continue
examples.extend(
_read_one_file(
os.path.join(data_dir, f"dev-{language}.tsv"),
self.get_labels()))
return examples
def get_test_examples(self, data_dir):
examples_dict = {}
for language in self.supported_languages:
examples_dict[language] = _read_one_file(
os.path.join(data_dir, "test-%s.tsv" % language), self.get_labels())
return examples_dict
def get_labels(self):
return ["O", "B-PER", "I-PER", "B-LOC", "I-LOC", "B-ORG", "I-ORG"]
@staticmethod
def get_processor_name():
return "panx"
class UdposProcessor(classifier_data_lib.DataProcessor):
"""Processor for the Udpos data set."""
supported_languages = [
"af", "ar", "bg", "de", "el", "en", "es", "et", "eu", "fa", "fi", "fr",
"he", "hi", "hu", "id", "it", "ja", "kk", "ko", "mr", "nl", "pt", "ru",
"ta", "te", "th", "tl", "tr", "ur", "vi", "yo", "zh"
]
def __init__(self,
process_text_fn=tokenization.convert_to_unicode,
only_use_en_train=True,
only_use_en_dev=True):
"""See base class.
Args:
process_text_fn: See base class.
only_use_en_train: If True, only use english training data. Otherwise, use
training data from all languages.
only_use_en_dev: If True, only use english dev data. Otherwise, use dev
data from all languages.
"""
super(UdposProcessor, self).__init__(process_text_fn)
self.only_use_en_train = only_use_en_train
self.only_use_en_dev = only_use_en_dev
def get_train_examples(self, data_dir):
if self.only_use_en_train:
examples = _read_one_file(
os.path.join(data_dir, "train-en.tsv"), self.get_labels())
else:
examples = []
# Uses glob because some languages are missing in train.
for filepath in tf.io.gfile.glob(os.path.join(data_dir, "train-*.tsv")):
examples.extend(
_read_one_file(
filepath,
self.get_labels()))
return examples
def get_dev_examples(self, data_dir):
if self.only_use_en_dev:
examples = _read_one_file(
os.path.join(data_dir, "dev-en.tsv"), self.get_labels())
else:
examples = []
for filepath in tf.io.gfile.glob(os.path.join(data_dir, "dev-*.tsv")):
examples.extend(
_read_one_file(
filepath,
self.get_labels()))
return examples
def get_test_examples(self, data_dir):
examples_dict = {}
for language in self.supported_languages:
examples_dict[language] = _read_one_file(
os.path.join(data_dir, "test-%s.tsv" % language), self.get_labels())
return examples_dict
def get_labels(self):
return [
"ADJ", "ADP", "ADV", "AUX", "CCONJ", "DET", "INTJ", "NOUN", "NUM",
"PART", "PRON", "PROPN", "PUNCT", "SCONJ", "SYM", "VERB", "X"
]
@staticmethod
def get_processor_name():
return "udpos"
def _tokenize_example(example, max_length, tokenizer, text_preprocessing=None):
"""Tokenizes words and breaks long example into short ones."""
# Needs additional [CLS] and [SEP] tokens.
max_length = max_length - 2
new_examples = []
new_example = InputExample(sentence_id=example.sentence_id, sub_sentence_id=0)
if any([x < 0 for x in example.label_ids]):
raise ValueError("Unexpected negative label_id: %s" % example.label_ids)
for i, word in enumerate(example.words):
if text_preprocessing:
word = text_preprocessing(word)
subwords = tokenizer.tokenize(word)
if (not subwords or len(subwords) > max_length) and word:
subwords = [_UNK_TOKEN]
if len(subwords) + len(new_example.words) > max_length:
# Start a new example.
new_examples.append(new_example)
last_sub_sentence_id = new_example.sub_sentence_id
new_example = InputExample(
sentence_id=example.sentence_id,
sub_sentence_id=last_sub_sentence_id + 1)
for j, subword in enumerate(subwords):
# Use the real label for the first subword, and pad label for
# the remainings.
subword_label = example.label_ids[i] if j == 0 else _PADDING_LABEL_ID
new_example.add_word_and_label_id(subword, subword_label)
if new_example.words:
new_examples.append(new_example)
return new_examples
def _convert_single_example(example, max_seq_length, tokenizer):
"""Converts an `InputExample` instance to a `tf.train.Example` instance."""
tokens = ["[CLS]"]
tokens.extend(example.words)
tokens.append("[SEP]")
input_ids = tokenizer.convert_tokens_to_ids(tokens)
label_ids = [_PADDING_LABEL_ID]
label_ids.extend(example.label_ids)
label_ids.append(_PADDING_LABEL_ID)
segment_ids = [0] * len(input_ids)
input_mask = [1] * len(input_ids)
# Pad up to the sequence length.
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
label_ids.append(_PADDING_LABEL_ID)
def create_int_feature(values):
return tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
features = collections.OrderedDict()
features["input_ids"] = create_int_feature(input_ids)
features["input_mask"] = create_int_feature(input_mask)
features["segment_ids"] = create_int_feature(segment_ids)
features["label_ids"] = create_int_feature(label_ids)
features["sentence_id"] = create_int_feature([example.sentence_id])
features["sub_sentence_id"] = create_int_feature([example.sub_sentence_id])
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
return tf_example
def write_example_to_file(examples,
tokenizer,
max_seq_length,
output_file,
text_preprocessing=None):
"""Writes `InputExample`s into a tfrecord file with `tf.train.Example` protos.
Note that the words inside each example will be tokenized and be applied by
`text_preprocessing` if available. Also, if the length of sentence (plus
special [CLS] and [SEP] tokens) exceeds `max_seq_length`, the long sentence
will be broken into multiple short examples. For example:
Example (text_preprocessing=lowercase, max_seq_length=5)
words: ["What", "a", "great", "weekend"]
labels: [ 7, 5, 9, 10]
sentence_id: 0
preprocessed: ["what", "a", "great", "weekend"]
tokenized: ["what", "a", "great", "week", "##end"]
will result in two tf.example protos:
tokens: ["[CLS]", "what", "a", "great", "[SEP]"]
label_ids: [-1, 7, 5, 9, -1]
input_mask: [ 1, 1, 1, 1, 1]
segment_ids: [ 0, 0, 0, 0, 0]
input_ids: [ tokenizer.convert_tokens_to_ids(tokens) ]
sentence_id: 0
tokens: ["[CLS]", "week", "##end", "[SEP]", "[PAD]"]
label_ids: [-1, 10, -1, -1, -1]
input_mask: [ 1, 1, 1, 0, 0]
segment_ids: [ 0, 0, 0, 0, 0]
input_ids: [ tokenizer.convert_tokens_to_ids(tokens) ]
sentence_id: 0
Note the use of -1 in `label_ids` to indicate that a token should not be
considered for classification (e.g., trailing ## wordpieces or special
token). Token classification models should accordingly ignore these when
calculating loss, metrics, etc...
Args:
examples: A list of `InputExample` instances.
tokenizer: The tokenizer to be applied on the data.
max_seq_length: Maximum length of generated sequences.
output_file: The name of the output tfrecord file.
text_preprocessing: optional preprocessing run on each word prior to
tokenization.
Returns:
The total number of tf.train.Example proto written to file.
"""
tf.io.gfile.makedirs(os.path.dirname(output_file))
writer = tf.io.TFRecordWriter(output_file)
num_tokenized_examples = 0
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
logging.info("Writing example %d of %d to %s", ex_index, len(examples),
output_file)
tokenized_examples = _tokenize_example(example, max_seq_length, tokenizer,
text_preprocessing)
num_tokenized_examples += len(tokenized_examples)
for per_tokenized_example in tokenized_examples:
tf_example = _convert_single_example(per_tokenized_example,
max_seq_length, tokenizer)
writer.write(tf_example.SerializeToString())
writer.close()
return num_tokenized_examples
def token_classification_meta_data(train_data_size,
max_seq_length,
num_labels,
eval_data_size=None,
test_data_size=None,
label_list=None,
processor_type=None):
"""Creates metadata for tagging (token classification) datasets."""
meta_data = {
"train_data_size": train_data_size,
"max_seq_length": max_seq_length,
"num_labels": num_labels,
"task_type": "tagging",
"label_type": "int",
"label_shape": [max_seq_length],
}
if eval_data_size:
meta_data["eval_data_size"] = eval_data_size
if test_data_size:
meta_data["test_data_size"] = test_data_size
if label_list:
meta_data["label_list"] = label_list
if processor_type:
meta_data["processor_type"] = processor_type
return meta_data
def generate_tf_record_from_data_file(processor, data_dir, tokenizer,
max_seq_length, train_data_output_path,
eval_data_output_path,
test_data_output_path,
text_preprocessing):
"""Generates tfrecord files from the raw data."""
common_kwargs = dict(
tokenizer=tokenizer,
max_seq_length=max_seq_length,
text_preprocessing=text_preprocessing)
train_examples = processor.get_train_examples(data_dir)
train_data_size = write_example_to_file(
train_examples, output_file=train_data_output_path, **common_kwargs)
eval_examples = processor.get_dev_examples(data_dir)
eval_data_size = write_example_to_file(
eval_examples, output_file=eval_data_output_path, **common_kwargs)
test_input_data_examples = processor.get_test_examples(data_dir)
test_data_size = {}
for language, examples in test_input_data_examples.items():
test_data_size[language] = write_example_to_file(
examples,
output_file=test_data_output_path.format(language),
**common_kwargs)
labels = processor.get_labels()
meta_data = token_classification_meta_data(
train_data_size,
max_seq_length,
len(labels),
eval_data_size,
test_data_size,
label_list=labels,
processor_type=processor.get_processor_name())
return meta_data
| apache-2.0 |
webgeodatavore/django | django/core/management/commands/migrate.py | 54 | 13145 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import time
from collections import OrderedDict
from importlib import import_module
from django.apps import apps
from django.core.management.base import BaseCommand, CommandError
from django.core.management.sql import (
emit_post_migrate_signal, emit_pre_migrate_signal,
)
from django.db import DEFAULT_DB_ALIAS, connections, router, transaction
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.loader import AmbiguityError
from django.db.migrations.state import ProjectState
from django.utils.module_loading import module_has_submodule
class Command(BaseCommand):
help = "Updates database schema. Manages both apps with migrations and those without."
def add_arguments(self, parser):
parser.add_argument('app_label', nargs='?',
help='App label of an application to synchronize the state.')
parser.add_argument('migration_name', nargs='?',
help=(
'Database state will be brought to the state after that '
'migration. Use the name "zero" to unapply all migrations.'
),
)
parser.add_argument('--noinput', '--no-input',
action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. '
'Defaults to the "default" database.')
parser.add_argument('--fake', action='store_true', dest='fake', default=False,
help='Mark migrations as run without actually running them.')
parser.add_argument('--fake-initial', action='store_true', dest='fake_initial', default=False,
help='Detect if tables already exist and fake-apply initial migrations if so. Make sure '
'that the current database schema matches your initial migration before using this '
'flag. Django will only check for an existing table name.')
parser.add_argument('--run-syncdb', action='store_true', dest='run_syncdb',
help='Creates tables for apps without migrations.')
def handle(self, *args, **options):
self.verbosity = options.get('verbosity')
self.interactive = options.get('interactive')
# Import the 'management' module within each installed app, to register
# dispatcher events.
for app_config in apps.get_app_configs():
if module_has_submodule(app_config.module, "management"):
import_module('.management', app_config.name)
# Get the database we're operating from
db = options.get('database')
connection = connections[db]
# Hook for backends needing any database preparation
connection.prepare_database()
# Work out which apps have migrations and which do not
executor = MigrationExecutor(connection, self.migration_progress_callback)
# Before anything else, see if there's conflicting apps and drop out
# hard if there are any
conflicts = executor.loader.detect_conflicts()
if conflicts:
name_str = "; ".join(
"%s in %s" % (", ".join(names), app)
for app, names in conflicts.items()
)
raise CommandError(
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (%s).\nTo fix them run "
"'python manage.py makemigrations --merge'" % name_str
)
# If they supplied command line arguments, work out what they mean.
target_app_labels_only = True
if options['app_label'] and options['migration_name']:
app_label, migration_name = options['app_label'], options['migration_name']
if app_label not in executor.loader.migrated_apps:
raise CommandError(
"App '%s' does not have migrations." % app_label
)
if migration_name == "zero":
targets = [(app_label, None)]
else:
try:
migration = executor.loader.get_migration_by_prefix(app_label, migration_name)
except AmbiguityError:
raise CommandError(
"More than one migration matches '%s' in app '%s'. "
"Please be more specific." %
(migration_name, app_label)
)
except KeyError:
raise CommandError("Cannot find a migration matching '%s' from app '%s'." % (
migration_name, app_label))
targets = [(app_label, migration.name)]
target_app_labels_only = False
elif options['app_label']:
app_label = options['app_label']
if app_label not in executor.loader.migrated_apps:
raise CommandError(
"App '%s' does not have migrations." % app_label
)
targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label]
else:
targets = executor.loader.graph.leaf_nodes()
plan = executor.migration_plan(targets)
run_syncdb = options.get('run_syncdb') and executor.loader.unmigrated_apps
# Print some useful info
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:"))
if run_syncdb:
self.stdout.write(
self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") +
(", ".join(executor.loader.unmigrated_apps))
)
if target_app_labels_only:
self.stdout.write(
self.style.MIGRATE_LABEL(" Apply all migrations: ") +
(", ".join(set(a for a, n in targets)) or "(none)")
)
else:
if targets[0][1] is None:
self.stdout.write(self.style.MIGRATE_LABEL(
" Unapply all migrations: ") + "%s" % (targets[0][0], )
)
else:
self.stdout.write(self.style.MIGRATE_LABEL(
" Target specific migration: ") + "%s, from %s"
% (targets[0][1], targets[0][0])
)
emit_pre_migrate_signal(self.verbosity, self.interactive, connection.alias)
# Run the syncdb phase.
if run_syncdb:
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:"))
self.sync_apps(connection, executor.loader.unmigrated_apps)
# Migrate!
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:"))
if not plan:
executor.check_replacements()
if self.verbosity >= 1:
self.stdout.write(" No migrations to apply.")
# If there's changes that aren't in migrations yet, tell them how to fix it.
autodetector = MigrationAutodetector(
executor.loader.project_state(),
ProjectState.from_apps(apps),
)
changes = autodetector.changes(graph=executor.loader.graph)
if changes:
self.stdout.write(self.style.NOTICE(
" Your models have changes that are not yet reflected "
"in a migration, and so won't be applied."
))
self.stdout.write(self.style.NOTICE(
" Run 'manage.py makemigrations' to make new "
"migrations, and then re-run 'manage.py migrate' to "
"apply them."
))
else:
fake = options.get("fake")
fake_initial = options.get("fake_initial")
executor.migrate(targets, plan, fake=fake, fake_initial=fake_initial)
# Send the post_migrate signal, so individual apps can do whatever they need
# to do at this point.
emit_post_migrate_signal(self.verbosity, self.interactive, connection.alias)
def migration_progress_callback(self, action, migration=None, fake=False):
if self.verbosity >= 1:
compute_time = self.verbosity > 1
if action == "apply_start":
if compute_time:
self.start = time.time()
self.stdout.write(" Applying %s..." % migration, ending="")
self.stdout.flush()
elif action == "apply_success":
elapsed = " (%.3fs)" % (time.time() - self.start) if compute_time else ""
if fake:
self.stdout.write(self.style.MIGRATE_SUCCESS(" FAKED" + elapsed))
else:
self.stdout.write(self.style.MIGRATE_SUCCESS(" OK" + elapsed))
elif action == "unapply_start":
if compute_time:
self.start = time.time()
self.stdout.write(" Unapplying %s..." % migration, ending="")
self.stdout.flush()
elif action == "unapply_success":
elapsed = " (%.3fs)" % (time.time() - self.start) if compute_time else ""
if fake:
self.stdout.write(self.style.MIGRATE_SUCCESS(" FAKED" + elapsed))
else:
self.stdout.write(self.style.MIGRATE_SUCCESS(" OK" + elapsed))
elif action == "render_start":
if compute_time:
self.start = time.time()
self.stdout.write(" Rendering model states...", ending="")
self.stdout.flush()
elif action == "render_success":
elapsed = " (%.3fs)" % (time.time() - self.start) if compute_time else ""
self.stdout.write(self.style.MIGRATE_SUCCESS(" DONE" + elapsed))
def sync_apps(self, connection, app_labels):
"Runs the old syncdb-style operation on a list of app_labels."
cursor = connection.cursor()
try:
# Get a list of already installed *models* so that references work right.
tables = connection.introspection.table_names(cursor)
created_models = set()
# Build the manifest of apps and models that are to be synchronized
all_models = [
(app_config.label,
router.get_migratable_models(app_config, connection.alias, include_auto_created=False))
for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config.label in app_labels
]
def model_installed(model):
opts = model._meta
converter = connection.introspection.table_name_converter
# Note that if a model is unmanaged we short-circuit and never try to install it
return not ((converter(opts.db_table) in tables) or
(opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))
manifest = OrderedDict(
(app_name, list(filter(model_installed, model_list)))
for app_name, model_list in all_models
)
# Create the tables for each model
if self.verbosity >= 1:
self.stdout.write(" Creating tables...\n")
with transaction.atomic(using=connection.alias, savepoint=connection.features.can_rollback_ddl):
deferred_sql = []
for app_name, model_list in manifest.items():
for model in model_list:
if not model._meta.can_migrate(connection):
continue
if self.verbosity >= 3:
self.stdout.write(
" Processing %s.%s model\n" % (app_name, model._meta.object_name)
)
with connection.schema_editor() as editor:
if self.verbosity >= 1:
self.stdout.write(" Creating table %s\n" % model._meta.db_table)
editor.create_model(model)
deferred_sql.extend(editor.deferred_sql)
editor.deferred_sql = []
created_models.add(model)
if self.verbosity >= 1:
self.stdout.write(" Running deferred SQL...\n")
for statement in deferred_sql:
cursor.execute(statement)
finally:
cursor.close()
return created_models
| bsd-3-clause |
wlonk/python-semantic-release | tests/test_history.py | 2 | 8852 | from unittest import TestCase
import semantic_release
from semantic_release.history import (evaluate_version_bump, get_current_version, get_new_version,
get_previous_version)
from semantic_release.history.logs import generate_changelog, markdown_changelog
from . import mock
MAJOR = 'feat(x): Add super-feature\n\nBREAKING CHANGE: Uses super-feature as default instead of ' \
'dull-feature.'
MAJOR2 = 'feat(x): Add super-feature\n\nSome explanation\n\n' \
'BREAKING CHANGE: Uses super-feature as default instead of ' \
'dull-feature.'
MINOR = 'feat(x): Add non-breaking super-feature'
PATCH = 'fix(x): Fix bug in super-feature'
NO_TAG = 'docs(x): Add documentation for super-feature'
UNKNOWN_STYLE = 'random commits are the worst'
ALL_KINDS_OF_COMMIT_MESSAGES = [MINOR, MAJOR, MINOR, PATCH]
MINOR_AND_PATCH_COMMIT_MESSAGES = [MINOR, PATCH]
PATCH_COMMIT_MESSAGES = [PATCH, PATCH]
MAJOR_LAST_RELEASE_MINOR_AFTER = [MINOR, '1.1.0', MAJOR]
class EvaluateVersionBumpTest(TestCase):
def test_major(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **kw: ALL_KINDS_OF_COMMIT_MESSAGES):
self.assertEqual(evaluate_version_bump('0.0.0'), 'major')
def test_minor(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **kw: MINOR_AND_PATCH_COMMIT_MESSAGES):
self.assertEqual(evaluate_version_bump('0.0.0'), 'minor')
def test_patch(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **kw: PATCH_COMMIT_MESSAGES):
self.assertEqual(evaluate_version_bump('0.0.0'), 'patch')
def test_nothing_if_no_tag(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **kw: ['', '...']):
self.assertIsNone(evaluate_version_bump('0.0.0'))
def test_force(self):
self.assertEqual(evaluate_version_bump('0.0.0', 'major'), 'major')
self.assertEqual(evaluate_version_bump('0.0.0', 'minor'), 'minor')
self.assertEqual(evaluate_version_bump('0.0.0', 'patch'), 'patch')
def test_should_account_for_commits_earlier_than_last_commit(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **kw: MAJOR_LAST_RELEASE_MINOR_AFTER):
self.assertEqual(evaluate_version_bump('1.1.0'), 'minor')
@mock.patch('semantic_release.history.config.getboolean', lambda *x: True)
@mock.patch('semantic_release.history.logs.get_commit_log', lambda *a, **kw: [NO_TAG])
def test_should_patch_without_tagged_commits(self):
self.assertEqual(evaluate_version_bump('1.1.0'), 'patch')
@mock.patch('semantic_release.history.config.getboolean', lambda *x: False)
@mock.patch('semantic_release.history.logs.get_commit_log', lambda *a, **kw: [NO_TAG])
def test_should_return_none_without_tagged_commits(self):
self.assertIsNone(evaluate_version_bump('1.1.0'))
@mock.patch('semantic_release.history.logs.get_commit_log', lambda *a, **kw: [])
def test_should_return_none_without_commits(self):
"""
Make sure that we do not release if there are no commits since last release.
"""
with mock.patch('semantic_release.history.config.getboolean', lambda *x: True):
self.assertIsNone(evaluate_version_bump('1.1.0'))
with mock.patch('semantic_release.history.config.getboolean', lambda *x: False):
self.assertIsNone(evaluate_version_bump('1.1.0'))
class GenerateChangelogTests(TestCase):
def test_should_generate_all_sections(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **k: ALL_KINDS_OF_COMMIT_MESSAGES + [MAJOR2, UNKNOWN_STYLE]):
changelog = generate_changelog('0.0.0')
self.assertIn('feature', changelog)
self.assertIn('fix', changelog)
self.assertIn('documentation', changelog)
self.assertIn('breaking', changelog)
self.assertGreater(len(changelog['feature']), 0)
self.assertGreater(len(changelog['fix']), 0)
self.assertGreater(len(changelog['breaking']), 0)
def test_should_only_read_until_given_version(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **k: MAJOR_LAST_RELEASE_MINOR_AFTER):
changelog = generate_changelog('1.1.0')
self.assertGreater(len(changelog['feature']), 0)
self.assertEqual(len(changelog['fix']), 0)
self.assertEqual(len(changelog['documentation']), 0)
self.assertEqual(len(changelog['breaking']), 0)
def test_should_skip_style_changes(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **k: PATCH_COMMIT_MESSAGES + ['style(x): change x']):
changelog = generate_changelog('0.0.0')
self.assertNotIn('style', changelog)
def test_should_skip_chore_changes(self):
with mock.patch('semantic_release.history.logs.get_commit_log',
lambda *a, **kw: PATCH_COMMIT_MESSAGES + ['chore(x): change x']):
changelog = generate_changelog('0.0.0')
self.assertNotIn('chore', changelog)
def test_current_version_should_return_correct_version():
assert get_current_version() == semantic_release.__version__
class GetPreviousVersionTests(TestCase):
@mock.patch('semantic_release.history.get_commit_log', lambda: ['0.10.0', '0.9.0'])
def test_should_return_correct_version(self):
self.assertEqual(get_previous_version('0.10.0'), '0.9.0')
@mock.patch('semantic_release.history.get_commit_log', lambda: ['v0.10.0', 'v0.9.0'])
def test_should_return_correct_version_with_v(self):
self.assertEqual(get_previous_version('0.10.0'), '0.9.0')
class GetNewVersionTests(TestCase):
def test_major_bump(self):
self.assertEqual(get_new_version('0.0.0', 'major'), '1.0.0')
self.assertEqual(get_new_version('0.1.0', 'major'), '1.0.0')
self.assertEqual(get_new_version('0.1.9', 'major'), '1.0.0')
self.assertEqual(get_new_version('10.1.0', 'major'), '11.0.0')
def test_minor_bump(self):
self.assertEqual(get_new_version('0.0.0', 'minor'), '0.1.0')
self.assertEqual(get_new_version('1.2.0', 'minor'), '1.3.0')
self.assertEqual(get_new_version('1.2.1', 'minor'), '1.3.0')
self.assertEqual(get_new_version('10.1.0', 'minor'), '10.2.0')
def test_patch_bump(self):
self.assertEqual(get_new_version('0.0.0', 'patch'), '0.0.1')
self.assertEqual(get_new_version('0.1.0', 'patch'), '0.1.1')
self.assertEqual(get_new_version('10.0.9', 'patch'), '10.0.10')
def test_none_bump(self):
self.assertEqual(get_new_version('1.0.0', None), '1.0.0')
class MarkdownChangelogTests(TestCase):
def test_should_output_all_sections(self):
markdown = markdown_changelog('0', {
'refactor': ['Refactor super-feature'],
'breaking': ['Uses super-feature as default instead of dull-feature.'],
'feature': ['Add non-breaking super-feature', 'Add super-feature'],
'fix': ['Fix bug in super-feature'],
'documentation': ['Document super-feature']
})
self.assertEqual(
markdown,
'\n'
'### Feature\n'
'* Add non-breaking super-feature\n'
'* Add super-feature\n'
'\n'
'### Fix\n'
'* Fix bug in super-feature\n'
'\n'
'### Breaking\n'
'* Uses super-feature as default instead of dull-feature.\n'
'\n'
'### Documentation\n'
'* Document super-feature\n'
)
def test_should_not_include_empty_sections(self):
self.assertEqual(
markdown_changelog(
'1.0.1',
{'refactor': [], 'breaking': [], 'feature': [], 'fix': [], 'documentation': []},
),
''
)
def test_should_output_heading(self):
self.assertIn(
'## v1.0.1\n',
markdown_changelog(
'1.0.1',
{'refactor': [], 'breaking': [], 'feature': [], 'fix': [], 'documentation': []},
header=True
)
)
def test_should_not_output_heading(self):
self.assertNotIn(
'v1.0.1',
markdown_changelog(
'1.0.1',
{'refactor': [], 'breaking': [], 'feature': [], 'fix': [], 'documentation': []},
)
)
| mit |
sinkuri256/python-for-android | python-build/python-libs/gdata/build/lib/gdata/Crypto/PublicKey/RSA.py | 228 | 6974 | #
# RSA.py : RSA encryption/decryption
#
# Part of the Python Cryptography Toolkit
#
# Distribute and use freely; there are no restrictions on further
# dissemination and usage except those imposed by the laws of your
# country of residence. This software is provided "as is" without
# warranty of fitness for use or suitability for any purpose, express
# or implied. Use at your own risk or not at all.
#
__revision__ = "$Id: RSA.py,v 1.20 2004/05/06 12:52:54 akuchling Exp $"
from Crypto.PublicKey import pubkey
from Crypto.Util import number
try:
from Crypto.PublicKey import _fastmath
except ImportError:
_fastmath = None
class error (Exception):
pass
def generate(bits, randfunc, progress_func=None):
"""generate(bits:int, randfunc:callable, progress_func:callable)
Generate an RSA key of length 'bits', using 'randfunc' to get
random data and 'progress_func', if present, to display
the progress of the key generation.
"""
obj=RSAobj()
# Generate the prime factors of n
if progress_func:
progress_func('p,q\n')
p = q = 1L
while number.size(p*q) < bits:
p = pubkey.getPrime(bits/2, randfunc)
q = pubkey.getPrime(bits/2, randfunc)
# p shall be smaller than q (for calc of u)
if p > q:
(p, q)=(q, p)
obj.p = p
obj.q = q
if progress_func:
progress_func('u\n')
obj.u = pubkey.inverse(obj.p, obj.q)
obj.n = obj.p*obj.q
obj.e = 65537L
if progress_func:
progress_func('d\n')
obj.d=pubkey.inverse(obj.e, (obj.p-1)*(obj.q-1))
assert bits <= 1+obj.size(), "Generated key is too small"
return obj
def construct(tuple):
"""construct(tuple:(long,) : RSAobj
Construct an RSA object from a 2-, 3-, 5-, or 6-tuple of numbers.
"""
obj=RSAobj()
if len(tuple) not in [2,3,5,6]:
raise error, 'argument for construct() wrong length'
for i in range(len(tuple)):
field = obj.keydata[i]
setattr(obj, field, tuple[i])
if len(tuple) >= 5:
# Ensure p is smaller than q
if obj.p>obj.q:
(obj.p, obj.q)=(obj.q, obj.p)
if len(tuple) == 5:
# u not supplied, so we're going to have to compute it.
obj.u=pubkey.inverse(obj.p, obj.q)
return obj
class RSAobj(pubkey.pubkey):
keydata = ['n', 'e', 'd', 'p', 'q', 'u']
def _encrypt(self, plaintext, K=''):
if self.n<=plaintext:
raise error, 'Plaintext too large'
return (pow(plaintext, self.e, self.n),)
def _decrypt(self, ciphertext):
if (not hasattr(self, 'd')):
raise error, 'Private key not available in this object'
if self.n<=ciphertext[0]:
raise error, 'Ciphertext too large'
return pow(ciphertext[0], self.d, self.n)
def _sign(self, M, K=''):
return (self._decrypt((M,)),)
def _verify(self, M, sig):
m2=self._encrypt(sig[0])
if m2[0]==M:
return 1
else: return 0
def _blind(self, M, B):
tmp = pow(B, self.e, self.n)
return (M * tmp) % self.n
def _unblind(self, M, B):
tmp = pubkey.inverse(B, self.n)
return (M * tmp) % self.n
def can_blind (self):
"""can_blind() : bool
Return a Boolean value recording whether this algorithm can
blind data. (This does not imply that this
particular key object has the private information required to
to blind a message.)
"""
return 1
def size(self):
"""size() : int
Return the maximum number of bits that can be handled by this key.
"""
return number.size(self.n) - 1
def has_private(self):
"""has_private() : bool
Return a Boolean denoting whether the object contains
private components.
"""
if hasattr(self, 'd'):
return 1
else: return 0
def publickey(self):
"""publickey(): RSAobj
Return a new key object containing only the public key information.
"""
return construct((self.n, self.e))
class RSAobj_c(pubkey.pubkey):
keydata = ['n', 'e', 'd', 'p', 'q', 'u']
def __init__(self, key):
self.key = key
def __getattr__(self, attr):
if attr in self.keydata:
return getattr(self.key, attr)
else:
if self.__dict__.has_key(attr):
self.__dict__[attr]
else:
raise AttributeError, '%s instance has no attribute %s' % (self.__class__, attr)
def __getstate__(self):
d = {}
for k in self.keydata:
if hasattr(self.key, k):
d[k]=getattr(self.key, k)
return d
def __setstate__(self, state):
n,e = state['n'], state['e']
if not state.has_key('d'):
self.key = _fastmath.rsa_construct(n,e)
else:
d = state['d']
if not state.has_key('q'):
self.key = _fastmath.rsa_construct(n,e,d)
else:
p, q, u = state['p'], state['q'], state['u']
self.key = _fastmath.rsa_construct(n,e,d,p,q,u)
def _encrypt(self, plain, K):
return (self.key._encrypt(plain),)
def _decrypt(self, cipher):
return self.key._decrypt(cipher[0])
def _sign(self, M, K):
return (self.key._sign(M),)
def _verify(self, M, sig):
return self.key._verify(M, sig[0])
def _blind(self, M, B):
return self.key._blind(M, B)
def _unblind(self, M, B):
return self.key._unblind(M, B)
def can_blind (self):
return 1
def size(self):
return self.key.size()
def has_private(self):
return self.key.has_private()
def publickey(self):
return construct_c((self.key.n, self.key.e))
def generate_c(bits, randfunc, progress_func = None):
# Generate the prime factors of n
if progress_func:
progress_func('p,q\n')
p = q = 1L
while number.size(p*q) < bits:
p = pubkey.getPrime(bits/2, randfunc)
q = pubkey.getPrime(bits/2, randfunc)
# p shall be smaller than q (for calc of u)
if p > q:
(p, q)=(q, p)
if progress_func:
progress_func('u\n')
u=pubkey.inverse(p, q)
n=p*q
e = 65537L
if progress_func:
progress_func('d\n')
d=pubkey.inverse(e, (p-1)*(q-1))
key = _fastmath.rsa_construct(n,e,d,p,q,u)
obj = RSAobj_c(key)
## print p
## print q
## print number.size(p), number.size(q), number.size(q*p),
## print obj.size(), bits
assert bits <= 1+obj.size(), "Generated key is too small"
return obj
def construct_c(tuple):
key = apply(_fastmath.rsa_construct, tuple)
return RSAobj_c(key)
object = RSAobj
generate_py = generate
construct_py = construct
if _fastmath:
#print "using C version of RSA"
generate = generate_c
construct = construct_c
error = _fastmath.error
| apache-2.0 |
barachka/odoo | addons/point_of_sale/report/__init__.py | 381 | 1238 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pos_users_product
import account_statement
import pos_receipt
import pos_invoice
import pos_lines
import pos_details
import pos_payment_report
import pos_report
import pos_order_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
arnaudsj/Petrel | petrel/petrel/mock.py | 2 | 6112 | from collections import deque, defaultdict, namedtuple
import storm
python_id = id
STORM_TUPLE = 0
LIST = 1
TUPLE = 2
NAMEDTUPLE = 3
class MockSpout(storm.Spout):
def __init__(self, output_fields, data):
self.output_fields = output_fields
self.data = data
self.index = 0
def declareOutputFields(self):
return self.output_fields
def nextTuple(self):
if self.index < len(self.data):
storm.emit(self.data[self.index])
self.index += 1
class Mock(object):
def __init__(self):
self.output_type = {}
self.pending = defaultdict(deque)
self.processed = defaultdict(deque)
self.emitter = None
def __enter__(self):
self.old_emit = storm.emit
storm.emit = self.emit
self.old_emitMany = storm.emitMany
storm.emitMany = self.emitMany
return self
def __exit__(self, type, value, traceback):
storm.emit = self.old_emit
storm.emitMany = self.old_emitMany
def activate(self, emitter):
self.emitter = emitter
if isinstance(emitter, storm.Spout):
storm.MODE = storm.Spout
elif isinstance(emitter, (storm.Bolt, storm.BasicBolt)):
storm.MODE = storm.Bolt
else:
assert False, "Neither a spout nor a bolt!"
def emit(self, *args, **kwargs):
self.__emit(*args, **kwargs)
#return readTaskIds()
def __emit(self, *args, **kwargs):
if storm.MODE == storm.Bolt:
self.emitBolt(*args, **kwargs)
elif storm.MODE == storm.Spout:
self.emitSpout(*args, **kwargs)
def emitMany(self, *args, **kwargs):
if storm.MODE == storm.Bolt:
self.emitManyBolt(*args, **kwargs)
elif storm.MODE == storm.Spout:
self.emitManySpout(*args, **kwargs)
def emitManyBolt(self, tuples, stream=None, anchors = [], directTask=None):
for t in tuples:
self.emitBolt(t, stream, anchors, directTask)
def emitManySpout(self, tuples, stream=None, anchors = [], directTask=None):
for t in tuples:
self.emitSpout(t, stream, id, directTask)
def emitter_id(self, emitter=None):
if emitter is None:
emitter = self.emitter
return type(emitter).__name__, python_id(emitter)
def emitBolt(self, tup, stream=None, anchors = [], directTask=None):
# Nice idea, but throws off profiling
#assert len(tup) == len(self.emitter.declareOutputFields())
# TODO: We should probably be capturing "anchors" so tests can verify
# the topology is anchoring output tuples correctly.
self.pending[self.emitter_id()].append(storm.Tuple(id=None, component=None, stream=stream, task=directTask, values=tup))
def emitSpout(self, tup, stream=None, id=None, directTask=None):
# Nice idea, but throws off profiling
#assert len(tup) == len(self.emitter.declareOutputFields())
self.pending[self.emitter_id()].append(storm.Tuple(id=id, component=None, stream=stream, task=directTask, values=tup))
def read(self, source_emitter):
emitter_id = self.emitter_id(source_emitter)
result = self.pending[emitter_id].popleft()
self.processed[emitter_id].append(result)
return result
def get_output_type(self, emitter):
emitter_id = self.emitter_id(emitter)
if emitter_id not in self.output_type:
self.output_type[emitter_id] = namedtuple('%sTuple' % type(emitter).__name__, emitter.declareOutputFields())
return self.output_type[emitter_id]
@classmethod
def run_simple_topology(cls, config, emitters, result_type=NAMEDTUPLE):
"""Tests a simple topology. "Simple" means there it has no branches
or cycles. "emitters" is a list of emitters, starting with a spout
followed by 0 or more bolts that run in a chain."""
# The config is almost always required. The only known reason to pass
# None is when calling run_simple_topology() multiple times for the
# same components. This can be useful for testing spout ack() and fail()
# behavior.
if config is not None:
for emitter in emitters:
emitter.initialize(config, {})
with cls() as self:
# Read from the spout.
spout = emitters[0]
spout_id = self.emitter_id(spout)
old_length = -1
while len(self.pending[spout_id]) > old_length:
old_length = len(self.pending[spout_id])
self.activate(spout)
spout.nextTuple()
# For each bolt in the sequence, consume all upstream input.
for i, bolt in enumerate(emitters[1:]):
previous = emitters[i]
self.activate(bolt)
while len(self.pending[self.emitter_id(previous)]) > 0:
bolt.process(self.read(previous))
def make_storm_tuple(t, emitter):
return t
def make_python_list(t, emitter):
return list(t.values)
def make_python_tuple(t, emitter):
return tuple(t.values)
def make_named_tuple(t, emitter):
return self.get_output_type(emitter)(*t.values)
if result_type == STORM_TUPLE:
make = make_storm_tuple
elif result_type == LIST:
make = make_python_list
elif result_type == NAMEDTUPLE:
make = make_named_tuple
else:
assert False, 'Invalid result type specified: %s' % result_type
result_values = \
[ [ make(t, emitter) for t in self.processed[self.emitter_id(emitter)]] for emitter in emitters[:-1] ] + \
[ [ make(t, emitters[-1]) for t in self.pending[self.emitter_id(emitters[-1])] ] ]
return dict((k, v) for k, v in zip(emitters, result_values))
def run_simple_topology(*l, **kw):
return Mock.run_simple_topology(*l, **kw)
| bsd-3-clause |
Demolisty24/AlexaFood-Backend | venv/Lib/site-packages/wheel/signatures/ed25519py.py | 565 | 1695 | # -*- coding: utf-8 -*-
import warnings
import os
from collections import namedtuple
from . import djbec
__all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair',
'PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES']
PUBLICKEYBYTES=32
SECRETKEYBYTES=64
SIGNATUREBYTES=64
Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key
def crypto_sign_keypair(seed=None):
"""Return (verifying, secret) key from a given seed, or os.urandom(32)"""
if seed is None:
seed = os.urandom(PUBLICKEYBYTES)
else:
warnings.warn("ed25519ll should choose random seed.",
RuntimeWarning)
if len(seed) != 32:
raise ValueError("seed must be 32 random bytes or None.")
skbytes = seed
vkbytes = djbec.publickey(skbytes)
return Keypair(vkbytes, skbytes+vkbytes)
def crypto_sign(msg, sk):
"""Return signature+message given message and secret key.
The signature is the first SIGNATUREBYTES bytes of the return value.
A copy of msg is in the remainder."""
if len(sk) != SECRETKEYBYTES:
raise ValueError("Bad signing key length %d" % len(sk))
vkbytes = sk[PUBLICKEYBYTES:]
skbytes = sk[:PUBLICKEYBYTES]
sig = djbec.signature(msg, skbytes, vkbytes)
return sig + msg
def crypto_sign_open(signed, vk):
"""Return message given signature+message and the verifying key."""
if len(vk) != PUBLICKEYBYTES:
raise ValueError("Bad verifying key length %d" % len(vk))
rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk)
if not rc:
raise ValueError("rc != True", rc)
return signed[SIGNATUREBYTES:]
| mit |
tima/ansible | lib/ansible/modules/network/eos/eos_vrf.py | 9 | 9171 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: eos_vrf
version_added: "2.4"
author: "Ricardo Carrillo Cruz (@rcarrillocruz)"
short_description: Manage VRFs on Arista EOS network devices
description:
- This module provides declarative management of VRFs
on Arista EOS network devices.
notes:
- Tested against EOS 4.15
options:
name:
description:
- Name of the VRF.
required: true
rd:
description:
- Route distinguisher of the VRF
interfaces:
description:
- Identifies the set of interfaces that
should be configured in the VRF. Interfaces must be routed
interfaces in order to be placed into a VRF. The name of interface
should be in expanded format and not abbreviated.
aggregate:
description: List of VRFs definitions
purge:
description:
- Purge VRFs not defined in the I(aggregate) parameter.
default: no
delay:
description:
- Time in seconds to wait before checking for the operational state on remote
device. This wait is applicable for operational state arguments.
default: 10
state:
description:
- State of the VRF configuration.
default: present
choices: ['present', 'absent']
extends_documentation_fragment: eos
"""
EXAMPLES = """
- name: Create vrf
eos_vrf:
name: test
rd: 1:200
interfaces:
- Ethernet2
state: present
- name: Delete VRFs
eos_vrf:
name: test
state: absent
- name: Create aggregate of VRFs with purge
eos_vrf:
aggregate:
- { name: test4, rd: "1:204" }
- { name: test5, rd: "1:205" }
state: present
purge: yes
- name: Delete aggregate of VRFs
eos_vrf:
aggregate:
- name: test2
- name: test3
- name: test4
- name: test5
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- vrf definition test
- rd 1:100
- interface Ethernet1
- vrf forwarding test
"""
import re
import time
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.eos.eos import load_config, run_commands
from ansible.module_utils.network.eos.eos import eos_argument_spec, check_args
def search_obj_in_list(name, lst):
for o in lst:
if o['name'] == name:
return o
def map_obj_to_commands(updates, module):
commands = list()
want, have = updates
state = module.params['state']
purge = module.params['purge']
for w in want:
name = w['name']
rd = w['rd']
interfaces = w['interfaces']
obj_in_have = search_obj_in_list(name, have)
if state == 'absent':
if obj_in_have:
commands.append('no vrf definition %s' % name)
elif state == 'present':
if not obj_in_have:
commands.append('vrf definition %s' % name)
if rd is not None:
commands.append('rd %s' % rd)
if w['interfaces']:
for i in w['interfaces']:
commands.append('interface %s' % i)
commands.append('vrf forwarding %s' % w['name'])
else:
if w['rd'] is not None and w['rd'] != obj_in_have['rd']:
commands.append('vrf definition %s' % w['name'])
commands.append('rd %s' % w['rd'])
if w['interfaces']:
if not obj_in_have['interfaces']:
for i in w['interfaces']:
commands.append('interface %s' % i)
commands.append('vrf forwarding %s' % w['name'])
elif set(w['interfaces']) != obj_in_have['interfaces']:
missing_interfaces = list(set(w['interfaces']) - set(obj_in_have['interfaces']))
for i in missing_interfaces:
commands.append('interface %s' % i)
commands.append('vrf forwarding %s' % w['name'])
if purge:
for h in have:
obj_in_want = search_obj_in_list(h['name'], want)
if not obj_in_want:
commands.append('no vrf definition %s' % h['name'])
return commands
def map_config_to_obj(module):
objs = []
output = run_commands(module, ['show vrf'])
lines = output[0].strip().splitlines()[2:]
for l in lines:
if not l:
continue
splitted_line = re.split(r'\s{2,}', l.strip())
if len(splitted_line) == 1:
continue
else:
obj = {}
obj['name'] = splitted_line[0]
obj['rd'] = splitted_line[1]
obj['interfaces'] = []
if len(splitted_line) > 4:
obj['interfaces'] = []
for i in splitted_line[4].split(','):
obj['interfaces'].append(i.strip().lower())
objs.append(obj)
return objs
def map_params_to_obj(module):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
if item.get('interfaces'):
item['interfaces'] = [intf.replace(" ", "").lower() for intf in item.get('interfaces') if intf]
obj.append(item.copy())
else:
obj.append({
'name': module.params['name'],
'state': module.params['state'],
'rd': module.params['rd'],
'interfaces': [intf.replace(" ", "").lower() for intf in module.params['interfaces']] if module.params['interfaces'] else []
})
return obj
def check_declarative_intent_params(want, module):
if module.params['interfaces']:
time.sleep(module.params['delay'])
have = map_config_to_obj(module)
for w in want:
for i in w['interfaces']:
obj_in_have = search_obj_in_list(w['name'], have)
if obj_in_have:
interfaces = obj_in_have.get('interfaces')
if interfaces is not None and i not in interfaces:
module.fail_json(msg="Interface %s not configured on vrf %s" % (i, w['name']))
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
interfaces=dict(type='list'),
delay=dict(default=10, type='int'),
rd=dict(),
state=dict(default='present', choices=['present', 'absent'])
)
aggregate_spec = deepcopy(element_spec)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
purge=dict(default=False, type='bool')
)
argument_spec.update(element_spec)
argument_spec.update(eos_argument_spec)
required_one_of = [['name', 'aggregate']]
mutually_exclusive = [['name', 'aggregate']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module)
result['commands'] = commands
if commands:
commit = not module.check_mode
response = load_config(module, commands, commit=commit)
if response.get('diff') and module._diff:
result['diff'] = {'prepared': response.get('diff')}
result['session_name'] = response.get('session')
result['changed'] = True
if result['changed']:
check_declarative_intent_params(want, module)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
jameswatt2008/jameswatt2008.github.io | python/Python核心编程/网络编程/截图和代码/概述、SOCKET/多进程copy文件/test-复件/symbol.py | 2 | 2093 | #! /usr/bin/python3.5
"""Non-terminal symbols of Python grammar (from "graminit.h")."""
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, 'cd' to the top directory of
# the python source tree after building the interpreter and run:
#
# ./python Lib/symbol.py
#--start constants--
single_input = 256
file_input = 257
eval_input = 258
decorator = 259
decorators = 260
decorated = 261
async_funcdef = 262
funcdef = 263
parameters = 264
typedargslist = 265
tfpdef = 266
varargslist = 267
vfpdef = 268
stmt = 269
simple_stmt = 270
small_stmt = 271
expr_stmt = 272
testlist_star_expr = 273
augassign = 274
del_stmt = 275
pass_stmt = 276
flow_stmt = 277
break_stmt = 278
continue_stmt = 279
return_stmt = 280
yield_stmt = 281
raise_stmt = 282
import_stmt = 283
import_name = 284
import_from = 285
import_as_name = 286
dotted_as_name = 287
import_as_names = 288
dotted_as_names = 289
dotted_name = 290
global_stmt = 291
nonlocal_stmt = 292
assert_stmt = 293
compound_stmt = 294
async_stmt = 295
if_stmt = 296
while_stmt = 297
for_stmt = 298
try_stmt = 299
with_stmt = 300
with_item = 301
except_clause = 302
suite = 303
test = 304
test_nocond = 305
lambdef = 306
lambdef_nocond = 307
or_test = 308
and_test = 309
not_test = 310
comparison = 311
comp_op = 312
star_expr = 313
expr = 314
xor_expr = 315
and_expr = 316
shift_expr = 317
arith_expr = 318
term = 319
factor = 320
power = 321
atom_expr = 322
atom = 323
testlist_comp = 324
trailer = 325
subscriptlist = 326
subscript = 327
sliceop = 328
exprlist = 329
testlist = 330
dictorsetmaker = 331
classdef = 332
arglist = 333
argument = 334
comp_iter = 335
comp_for = 336
comp_if = 337
encoding_decl = 338
yield_expr = 339
yield_arg = 340
#--end constants--
sym_name = {}
for _name, _value in list(globals().items()):
if type(_value) is type(0):
sym_name[_value] = _name
def _main():
import sys
import token
if len(sys.argv) == 1:
sys.argv = sys.argv + ["Include/graminit.h", "Lib/symbol.py"]
token._main()
if __name__ == "__main__":
_main()
| gpl-2.0 |
cneill/designate-testing | designate/objects/validation_error.py | 7 | 1411 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
| apache-2.0 |
jackytu/newbrandx | src/oscar/apps/analytics/migrations/0002_auto_20140827_1705.py | 49 | 1091 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('analytics', '0001_initial'),
('catalogue', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='userproductview',
name='product',
field=models.ForeignKey(verbose_name='Product', to='catalogue.Product'),
preserve_default=True,
),
migrations.AddField(
model_name='userproductview',
name='user',
field=models.ForeignKey(verbose_name='User', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AddField(
model_name='productrecord',
name='product',
field=models.OneToOneField(verbose_name='Product', related_name='stats', to='catalogue.Product'),
preserve_default=True,
),
]
| bsd-3-clause |
prds21/barrial-movie | servers/playedto.py | 39 | 4908 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para playedto
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[playedto.py] test_video_exists(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url,timeout=1)
if "Reason for deletion" in data:
return False,"El archivo ha sido borrado de played.to."
if "The file is being converted" in data:
return False,"El fichero está en proceso"
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[playedto.py] url="+page_url)
# Lo pide una vez
headers = [['User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14']]
data = scrapertools.cache_page( page_url , headers=headers,timeout=1 )
#import time
#time.sleep(10)
# Lo pide una segunda vez, como si hubieras hecho click en el banner
#op=download1&usr_login=&id=z3nnqbspjyne&fname=Coriolanus_DVDrip_Castellano_by_ARKONADA.avi&referer=&hash=nmnt74bh4dihf4zzkxfmw3ztykyfxb24&imhuman=Continue+to+Video
op = scrapertools.get_match(data,'<input type="hidden" name="op" value="([^"]+)"')
id = scrapertools.get_match(data,'<input type="hidden" name="id" value="([^"]+)"')
fname = scrapertools.get_match(data,'<input type="hidden" name="fname" value="([^"]+)"')
hashstring = scrapertools.get_match(data,'<input type="hidden" name="hash" value="([^"]*)"')
imhuman = scrapertools.get_match(data,'<input type="submit" name="imhuman" value="([^"]+)"').replace(" ","+")
post = "op="+op+"&usr_login=&id="+id+"&fname="+fname+"&referer=&hash="+hashstring+"&imhuman="+imhuman
headers.append(["Referer",page_url])
data = scrapertools.cache_page( page_url , post=post, headers=headers,timeout=1 )
logger.info("data="+data)
# Extrae la URL
media_url = scrapertools.get_match( data , 'file: "([^"]+)"' )
video_urls = []
video_urls.append( [ scrapertools.get_filename_from_url(media_url)[-4:]+" [playedto]",media_url])
for video_url in video_urls:
logger.info("[playedto.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
# Añade manualmente algunos erróneos para evitarlos
encontrados = set()
encontrados.add("http://played.to/embed-theme.html")
encontrados.add("http://played.to/embed-jquery.html")
encontrados.add("http://played.to/embed-s.html")
encontrados.add("http://played.to/embed-images.html")
encontrados.add("http://played.to/embed-faq.html")
encontrados.add("http://played.to/embed-embed.html")
encontrados.add("http://played.to/embed-ri.html")
encontrados.add("http://played.to/embed-d.html")
encontrados.add("http://played.to/embed-css.html")
encontrados.add("http://played.to/embed-js.html")
encontrados.add("http://played.to/embed-player.html")
encontrados.add("http://played.to/embed-cgi.html")
encontrados.add("http://played.to/embed-new.html")
encontrados.add("http://played.to/embed-make.html")
encontrados.add("http://played.to/embed-contact.html")
encontrados.add("http://played.to/embed-privacy.html")
encontrados.add("http://played.to/embed-dmca.html")
encontrados.add("http://played.to/embed-tos.html")
devuelve = []
#http://played.to/z3nnqbspjyne
patronvideos = 'played.to/([a-z0-9A-Z]+)'
logger.info("[playedto.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[playedto]"
url = "http://played.to/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'playedto' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://played.to/embed-z3nnqbspjyne.html
patronvideos = 'played.to/embed-([a-z0-9A-Z]+)'
logger.info("[playedto.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[playedto]"
url = "http://played.to/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'playedto' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
def test():
video_urls = get_video_url("http://played.to/z3nnqbspjyne")
return len(video_urls)>0
| gpl-3.0 |
yawnosnorous/python-for-android | python-build/python-libs/ase/scripts/test.py | 64 | 5166 | import sys
import types
# Test imports.
import android
import BeautifulSoup
import gdata.docs.service
import sqlite3
import termios
import time
import xmpp
droid = android.Android()
def event_loop():
for i in range(10):
e = droid.eventPoll(1)
if e.result is not None:
return True
time.sleep(2)
return False
def test_clipboard():
previous = droid.getClipboard().result
msg = 'Hello, world!'
droid.setClipboard(msg)
echo = droid.getClipboard().result
droid.setClipboard(previous)
return echo == msg
def test_gdata():
# Create a client class which will make HTTP requests with Google Docs server.
client = gdata.docs.service.DocsService()
# Authenticate using your Google Docs email address and password.
username = droid.dialogGetInput('Username').result
password = droid.dialogGetPassword('Password', 'For ' + username).result
try:
client.ClientLogin(username, password)
except:
return False
# Query the server for an Atom feed containing a list of your documents.
documents_feed = client.GetDocumentListFeed()
# Loop through the feed and extract each document entry.
return bool(list(documents_feed.entry))
def test_gps():
droid.startLocating()
try:
return event_loop()
finally:
droid.stopLocating()
def test_sensors():
droid.startSensing()
try:
return event_loop()
finally:
droid.stopSensing()
def test_speak():
result = droid.ttsSpeak('Hello, world!')
return result.error is None
def test_phone_state():
droid.startTrackingPhoneState()
try:
return event_loop()
finally:
droid.stopTrackingPhoneState()
def test_ringer_silent():
result1 = droid.toggleRingerSilentMode()
result2 = droid.toggleRingerSilentMode()
return result1.error is None and result2.error is None
def test_ringer_volume():
get_result = droid.getRingerVolume()
if get_result.error is not None:
return False
droid.setRingerVolume(0)
set_result = droid.setRingerVolume(get_result.result)
if set_result.error is not None:
return False
return True
def test_get_last_known_location():
result = droid.getLastKnownLocation()
return result.error is None
def test_geocode():
result = droid.geocode(0.0, 0.0, 1)
return result.error is None
def test_wifi():
result1 = droid.toggleWifiState()
result2 = droid.toggleWifiState()
return result1.error is None and result2.error is None
def test_make_toast():
result = droid.makeToast('Hello, world!')
return result.error is None
def test_vibrate():
result = droid.vibrate()
return result.error is None
def test_notify():
result = droid.notify('Test Title', 'Hello, world!')
return result.error is None
def test_get_running_packages():
result = droid.getRunningPackages()
return result.error is None
def test_alert_dialog():
title = 'User Interface'
message = 'Welcome to the SL4A integration test.'
droid.dialogCreateAlert(title, message)
droid.dialogSetPositiveButtonText('Continue')
droid.dialogShow()
response = droid.dialogGetResponse().result
return response['which'] == 'positive'
def test_alert_dialog_with_buttons():
title = 'Alert'
message = ('This alert box has 3 buttons and '
'will wait for you to press one.')
droid.dialogCreateAlert(title, message)
droid.dialogSetPositiveButtonText('Yes')
droid.dialogSetNegativeButtonText('No')
droid.dialogSetNeutralButtonText('Cancel')
droid.dialogShow()
response = droid.dialogGetResponse().result
return response['which'] in ('positive', 'negative', 'neutral')
def test_spinner_progress():
title = 'Spinner'
message = 'This is simple spinner progress.'
droid.dialogCreateSpinnerProgress(title, message)
droid.dialogShow()
time.sleep(2)
droid.dialogDismiss()
return True
def test_horizontal_progress():
title = 'Horizontal'
message = 'This is simple horizontal progress.'
droid.dialogCreateHorizontalProgress(title, message, 50)
droid.dialogShow()
for x in range(0, 50):
time.sleep(0.1)
droid.dialogSetCurrentProgress(x)
droid.dialogDismiss()
return True
def test_alert_dialog_with_list():
title = 'Alert'
droid.dialogCreateAlert(title)
droid.dialogSetItems(['foo', 'bar', 'baz'])
droid.dialogShow()
response = droid.dialogGetResponse().result
return True
def test_alert_dialog_with_single_choice_list():
title = 'Alert'
droid.dialogCreateAlert(title)
droid.dialogSetSingleChoiceItems(['foo', 'bar', 'baz'])
droid.dialogSetPositiveButtonText('Yay!')
droid.dialogShow()
response = droid.dialogGetResponse().result
return True
def test_alert_dialog_with_multi_choice_list():
title = 'Alert'
droid.dialogCreateAlert(title)
droid.dialogSetMultiChoiceItems(['foo', 'bar', 'baz'], [])
droid.dialogSetPositiveButtonText('Yay!')
droid.dialogShow()
response = droid.dialogGetResponse().result
return True
if __name__ == '__main__':
for name, value in globals().items():
if name.startswith('test_') and isinstance(value, types.FunctionType):
print 'Running %s...' % name,
sys.stdout.flush()
if value():
print ' PASS'
else:
print ' FAIL'
| apache-2.0 |
Facetracker-project/facetracker-core | lib/youtube-dl/youtube_dl/extractor/discovery.py | 16 | 1687 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
parse_iso8601,
int_or_none,
)
class DiscoveryIE(InfoExtractor):
_VALID_URL = r'http://www\.discovery\.com\/[a-zA-Z0-9\-]*/[a-zA-Z0-9\-]*/videos/(?P<id>[a-zA-Z0-9_\-]*)(?:\.htm)?'
_TEST = {
'url': 'http://www.discovery.com/tv-shows/mythbusters/videos/mission-impossible-outtakes.htm',
'md5': '3c69d77d9b0d82bfd5e5932a60f26504',
'info_dict': {
'id': 'mission-impossible-outtakes',
'ext': 'flv',
'title': 'Mission Impossible Outtakes',
'description': ('Watch Jamie Hyneman and Adam Savage practice being'
' each other -- to the point of confusing Jamie\'s dog -- and '
'don\'t miss Adam moon-walking as Jamie ... behind Jamie\'s'
' back.'),
'duration': 156,
'timestamp': 1303099200,
'upload_date': '20110418',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
info = self._parse_json(self._search_regex(
r'(?s)<script type="application/ld\+json">(.*?)</script>',
webpage, 'video info'), video_id)
return {
'id': video_id,
'title': info['name'],
'url': info['contentURL'],
'description': info.get('description'),
'thumbnail': info.get('thumbnailUrl'),
'timestamp': parse_iso8601(info.get('uploadDate')),
'duration': int_or_none(info.get('duration')),
}
| gpl-2.0 |
Dandandan/wikiprogramming | jsrepl/extern/python/closured/lib/python2.7/smtpd.py | 76 | 18543 | #! /usr/bin/env python
"""An RFC 2821 smtp proxy.
Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]]
Options:
--nosetuid
-n
This program generally tries to setuid `nobody', unless this flag is
set. The setuid call will fail if this program is not run as root (in
which case, use this flag).
--version
-V
Print the version number and exit.
--class classname
-c classname
Use `classname' as the concrete SMTP proxy class. Uses `PureProxy' by
default.
--debug
-d
Turn on debugging prints.
--help
-h
Print this message and exit.
Version: %(__version__)s
If localhost is not given then `localhost' is used, and if localport is not
given then 8025 is used. If remotehost is not given then `localhost' is used,
and if remoteport is not given, then 25 is used.
"""
# Overview:
#
# This file implements the minimal SMTP protocol as defined in RFC 821. It
# has a hierarchy of classes which implement the backend functionality for the
# smtpd. A number of classes are provided:
#
# SMTPServer - the base class for the backend. Raises NotImplementedError
# if you try to use it.
#
# DebuggingServer - simply prints each message it receives on stdout.
#
# PureProxy - Proxies all messages to a real smtpd which does final
# delivery. One known problem with this class is that it doesn't handle
# SMTP errors from the backend server at all. This should be fixed
# (contributions are welcome!).
#
# MailmanProxy - An experimental hack to work with GNU Mailman
# <www.list.org>. Using this server as your real incoming smtpd, your
# mailhost will automatically recognize and accept mail destined to Mailman
# lists when those lists are created. Every message not destined for a list
# gets forwarded to a real backend smtpd, as with PureProxy. Again, errors
# are not handled correctly yet.
#
# Please note that this script requires Python 2.0
#
# Author: Barry Warsaw <barry@python.org>
#
# TODO:
#
# - support mailbox delivery
# - alias files
# - ESMTP
# - handle error codes from the backend smtpd
import sys
import os
import errno
import getopt
import time
import socket
import asyncore
import asynchat
__all__ = ["SMTPServer","DebuggingServer","PureProxy","MailmanProxy"]
program = sys.argv[0]
__version__ = 'Python SMTP proxy version 0.2'
class Devnull:
def write(self, msg): pass
def flush(self): pass
DEBUGSTREAM = Devnull()
NEWLINE = '\n'
EMPTYSTRING = ''
COMMASPACE = ', '
def usage(code, msg=''):
print >> sys.stderr, __doc__ % globals()
if msg:
print >> sys.stderr, msg
sys.exit(code)
class SMTPChannel(asynchat.async_chat):
COMMAND = 0
DATA = 1
def __init__(self, server, conn, addr):
asynchat.async_chat.__init__(self, conn)
self.__server = server
self.__conn = conn
self.__addr = addr
self.__line = []
self.__state = self.COMMAND
self.__greeting = 0
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__fqdn = socket.getfqdn()
try:
self.__peer = conn.getpeername()
except socket.error, err:
# a race condition may occur if the other end is closing
# before we can get the peername
self.close()
if err[0] != errno.ENOTCONN:
raise
return
print >> DEBUGSTREAM, 'Peer:', repr(self.__peer)
self.push('220 %s %s' % (self.__fqdn, __version__))
self.set_terminator('\r\n')
# Overrides base class for convenience
def push(self, msg):
asynchat.async_chat.push(self, msg + '\r\n')
# Implementation of base class abstract method
def collect_incoming_data(self, data):
self.__line.append(data)
# Implementation of base class abstract method
def found_terminator(self):
line = EMPTYSTRING.join(self.__line)
print >> DEBUGSTREAM, 'Data:', repr(line)
self.__line = []
if self.__state == self.COMMAND:
if not line:
self.push('500 Error: bad syntax')
return
method = None
i = line.find(' ')
if i < 0:
command = line.upper()
arg = None
else:
command = line[:i].upper()
arg = line[i+1:].strip()
method = getattr(self, 'smtp_' + command, None)
if not method:
self.push('502 Error: command "%s" not implemented' % command)
return
method(arg)
return
else:
if self.__state != self.DATA:
self.push('451 Internal confusion')
return
# Remove extraneous carriage returns and de-transparency according
# to RFC 821, Section 4.5.2.
data = []
for text in line.split('\r\n'):
if text and text[0] == '.':
data.append(text[1:])
else:
data.append(text)
self.__data = NEWLINE.join(data)
status = self.__server.process_message(self.__peer,
self.__mailfrom,
self.__rcpttos,
self.__data)
self.__rcpttos = []
self.__mailfrom = None
self.__state = self.COMMAND
self.set_terminator('\r\n')
if not status:
self.push('250 Ok')
else:
self.push(status)
# SMTP and ESMTP commands
def smtp_HELO(self, arg):
if not arg:
self.push('501 Syntax: HELO hostname')
return
if self.__greeting:
self.push('503 Duplicate HELO/EHLO')
else:
self.__greeting = arg
self.push('250 %s' % self.__fqdn)
def smtp_NOOP(self, arg):
if arg:
self.push('501 Syntax: NOOP')
else:
self.push('250 Ok')
def smtp_QUIT(self, arg):
# args is ignored
self.push('221 Bye')
self.close_when_done()
# factored
def __getaddr(self, keyword, arg):
address = None
keylen = len(keyword)
if arg[:keylen].upper() == keyword:
address = arg[keylen:].strip()
if not address:
pass
elif address[0] == '<' and address[-1] == '>' and address != '<>':
# Addresses can be in the form <person@dom.com> but watch out
# for null address, e.g. <>
address = address[1:-1]
return address
def smtp_MAIL(self, arg):
print >> DEBUGSTREAM, '===> MAIL', arg
address = self.__getaddr('FROM:', arg) if arg else None
if not address:
self.push('501 Syntax: MAIL FROM:<address>')
return
if self.__mailfrom:
self.push('503 Error: nested MAIL command')
return
self.__mailfrom = address
print >> DEBUGSTREAM, 'sender:', self.__mailfrom
self.push('250 Ok')
def smtp_RCPT(self, arg):
print >> DEBUGSTREAM, '===> RCPT', arg
if not self.__mailfrom:
self.push('503 Error: need MAIL command')
return
address = self.__getaddr('TO:', arg) if arg else None
if not address:
self.push('501 Syntax: RCPT TO: <address>')
return
self.__rcpttos.append(address)
print >> DEBUGSTREAM, 'recips:', self.__rcpttos
self.push('250 Ok')
def smtp_RSET(self, arg):
if arg:
self.push('501 Syntax: RSET')
return
# Resets the sender, recipients, and data, but not the greeting
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__state = self.COMMAND
self.push('250 Ok')
def smtp_DATA(self, arg):
if not self.__rcpttos:
self.push('503 Error: need RCPT command')
return
if arg:
self.push('501 Syntax: DATA')
return
self.__state = self.DATA
self.set_terminator('\r\n.\r\n')
self.push('354 End data with <CR><LF>.<CR><LF>')
class SMTPServer(asyncore.dispatcher):
def __init__(self, localaddr, remoteaddr):
self._localaddr = localaddr
self._remoteaddr = remoteaddr
asyncore.dispatcher.__init__(self)
try:
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
# try to re-use a server port if possible
self.set_reuse_addr()
self.bind(localaddr)
self.listen(5)
except:
# cleanup asyncore.socket_map before raising
self.close()
raise
else:
print >> DEBUGSTREAM, \
'%s started at %s\n\tLocal addr: %s\n\tRemote addr:%s' % (
self.__class__.__name__, time.ctime(time.time()),
localaddr, remoteaddr)
def handle_accept(self):
pair = self.accept()
if pair is not None:
conn, addr = pair
print >> DEBUGSTREAM, 'Incoming connection from %s' % repr(addr)
channel = SMTPChannel(self, conn, addr)
# API for "doing something useful with the message"
def process_message(self, peer, mailfrom, rcpttos, data):
"""Override this abstract method to handle messages from the client.
peer is a tuple containing (ipaddr, port) of the client that made the
socket connection to our smtp port.
mailfrom is the raw address the client claims the message is coming
from.
rcpttos is a list of raw addresses the client wishes to deliver the
message to.
data is a string containing the entire full text of the message,
headers (if supplied) and all. It has been `de-transparencied'
according to RFC 821, Section 4.5.2. In other words, a line
containing a `.' followed by other text has had the leading dot
removed.
This function should return None, for a normal `250 Ok' response;
otherwise it returns the desired response string in RFC 821 format.
"""
raise NotImplementedError
class DebuggingServer(SMTPServer):
# Do something with the gathered message
def process_message(self, peer, mailfrom, rcpttos, data):
inheaders = 1
lines = data.split('\n')
print '---------- MESSAGE FOLLOWS ----------'
for line in lines:
# headers first
if inheaders and not line:
print 'X-Peer:', peer[0]
inheaders = 0
print line
print '------------ END MESSAGE ------------'
class PureProxy(SMTPServer):
def process_message(self, peer, mailfrom, rcpttos, data):
lines = data.split('\n')
# Look for the last header
i = 0
for line in lines:
if not line:
break
i += 1
lines.insert(i, 'X-Peer: %s' % peer[0])
data = NEWLINE.join(lines)
refused = self._deliver(mailfrom, rcpttos, data)
# TBD: what to do with refused addresses?
print >> DEBUGSTREAM, 'we got some refusals:', refused
def _deliver(self, mailfrom, rcpttos, data):
import smtplib
refused = {}
try:
s = smtplib.SMTP()
s.connect(self._remoteaddr[0], self._remoteaddr[1])
try:
refused = s.sendmail(mailfrom, rcpttos, data)
finally:
s.quit()
except smtplib.SMTPRecipientsRefused, e:
print >> DEBUGSTREAM, 'got SMTPRecipientsRefused'
refused = e.recipients
except (socket.error, smtplib.SMTPException), e:
print >> DEBUGSTREAM, 'got', e.__class__
# All recipients were refused. If the exception had an associated
# error code, use it. Otherwise,fake it with a non-triggering
# exception code.
errcode = getattr(e, 'smtp_code', -1)
errmsg = getattr(e, 'smtp_error', 'ignore')
for r in rcpttos:
refused[r] = (errcode, errmsg)
return refused
class MailmanProxy(PureProxy):
def process_message(self, peer, mailfrom, rcpttos, data):
from cStringIO import StringIO
from Mailman import Utils
from Mailman import Message
from Mailman import MailList
# If the message is to a Mailman mailing list, then we'll invoke the
# Mailman script directly, without going through the real smtpd.
# Otherwise we'll forward it to the local proxy for disposition.
listnames = []
for rcpt in rcpttos:
local = rcpt.lower().split('@')[0]
# We allow the following variations on the theme
# listname
# listname-admin
# listname-owner
# listname-request
# listname-join
# listname-leave
parts = local.split('-')
if len(parts) > 2:
continue
listname = parts[0]
if len(parts) == 2:
command = parts[1]
else:
command = ''
if not Utils.list_exists(listname) or command not in (
'', 'admin', 'owner', 'request', 'join', 'leave'):
continue
listnames.append((rcpt, listname, command))
# Remove all list recipients from rcpttos and forward what we're not
# going to take care of ourselves. Linear removal should be fine
# since we don't expect a large number of recipients.
for rcpt, listname, command in listnames:
rcpttos.remove(rcpt)
# If there's any non-list destined recipients left,
print >> DEBUGSTREAM, 'forwarding recips:', ' '.join(rcpttos)
if rcpttos:
refused = self._deliver(mailfrom, rcpttos, data)
# TBD: what to do with refused addresses?
print >> DEBUGSTREAM, 'we got refusals:', refused
# Now deliver directly to the list commands
mlists = {}
s = StringIO(data)
msg = Message.Message(s)
# These headers are required for the proper execution of Mailman. All
# MTAs in existence seem to add these if the original message doesn't
# have them.
if not msg.getheader('from'):
msg['From'] = mailfrom
if not msg.getheader('date'):
msg['Date'] = time.ctime(time.time())
for rcpt, listname, command in listnames:
print >> DEBUGSTREAM, 'sending message to', rcpt
mlist = mlists.get(listname)
if not mlist:
mlist = MailList.MailList(listname, lock=0)
mlists[listname] = mlist
# dispatch on the type of command
if command == '':
# post
msg.Enqueue(mlist, tolist=1)
elif command == 'admin':
msg.Enqueue(mlist, toadmin=1)
elif command == 'owner':
msg.Enqueue(mlist, toowner=1)
elif command == 'request':
msg.Enqueue(mlist, torequest=1)
elif command in ('join', 'leave'):
# TBD: this is a hack!
if command == 'join':
msg['Subject'] = 'subscribe'
else:
msg['Subject'] = 'unsubscribe'
msg.Enqueue(mlist, torequest=1)
class Options:
setuid = 1
classname = 'PureProxy'
def parseargs():
global DEBUGSTREAM
try:
opts, args = getopt.getopt(
sys.argv[1:], 'nVhc:d',
['class=', 'nosetuid', 'version', 'help', 'debug'])
except getopt.error, e:
usage(1, e)
options = Options()
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-V', '--version'):
print >> sys.stderr, __version__
sys.exit(0)
elif opt in ('-n', '--nosetuid'):
options.setuid = 0
elif opt in ('-c', '--class'):
options.classname = arg
elif opt in ('-d', '--debug'):
DEBUGSTREAM = sys.stderr
# parse the rest of the arguments
if len(args) < 1:
localspec = 'localhost:8025'
remotespec = 'localhost:25'
elif len(args) < 2:
localspec = args[0]
remotespec = 'localhost:25'
elif len(args) < 3:
localspec = args[0]
remotespec = args[1]
else:
usage(1, 'Invalid arguments: %s' % COMMASPACE.join(args))
# split into host/port pairs
i = localspec.find(':')
if i < 0:
usage(1, 'Bad local spec: %s' % localspec)
options.localhost = localspec[:i]
try:
options.localport = int(localspec[i+1:])
except ValueError:
usage(1, 'Bad local port: %s' % localspec)
i = remotespec.find(':')
if i < 0:
usage(1, 'Bad remote spec: %s' % remotespec)
options.remotehost = remotespec[:i]
try:
options.remoteport = int(remotespec[i+1:])
except ValueError:
usage(1, 'Bad remote port: %s' % remotespec)
return options
if __name__ == '__main__':
options = parseargs()
# Become nobody
if options.setuid:
try:
import pwd
except ImportError:
print >> sys.stderr, \
'Cannot import module "pwd"; try running with -n option.'
sys.exit(1)
nobody = pwd.getpwnam('nobody')[2]
try:
os.setuid(nobody)
except OSError, e:
if e.errno != errno.EPERM: raise
print >> sys.stderr, \
'Cannot setuid "nobody"; try running with -n option.'
sys.exit(1)
classname = options.classname
if "." in classname:
lastdot = classname.rfind(".")
mod = __import__(classname[:lastdot], globals(), locals(), [""])
classname = classname[lastdot+1:]
else:
import __main__ as mod
class_ = getattr(mod, classname)
proxy = class_((options.localhost, options.localport),
(options.remotehost, options.remoteport))
try:
asyncore.loop()
except KeyboardInterrupt:
pass
| mit |
epitron/youtube-dl | youtube_dl/extractor/regiotv.py | 99 | 2261 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
sanitized_Request,
xpath_text,
xpath_with_ns,
)
class RegioTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?regio-tv\.de/video/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://www.regio-tv.de/video/395808.html',
'info_dict': {
'id': '395808',
'ext': 'mp4',
'title': 'Wir in Ludwigsburg',
'description': 'Mit unseren zuckersüßen Adventskindern, außerdem besuchen wir die Abendsterne!',
}
}, {
'url': 'http://www.regio-tv.de/video/395808',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
key = self._search_regex(
r'key\s*:\s*(["\'])(?P<key>.+?)\1', webpage, 'key', group='key')
title = self._og_search_title(webpage)
SOAP_TEMPLATE = '<?xml version="1.0" encoding="utf-8"?><soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"><soap:Body><{0} xmlns="http://v.telvi.de/"><key xsi:type="xsd:string">{1}</key></{0}></soap:Body></soap:Envelope>'
request = sanitized_Request(
'http://v.telvi.de/',
SOAP_TEMPLATE.format('GetHTML5VideoData', key).encode('utf-8'))
video_data = self._download_xml(request, video_id, 'Downloading video XML')
NS_MAP = {
'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'soap': 'http://schemas.xmlsoap.org/soap/envelope/',
}
video_url = xpath_text(
video_data, xpath_with_ns('.//video', NS_MAP), 'video url', fatal=True)
thumbnail = xpath_text(
video_data, xpath_with_ns('.//image', NS_MAP), 'thumbnail')
description = self._og_search_description(
webpage) or self._html_search_meta('description', webpage)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
| unlicense |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/PyQt4/QtGui/QMovie.py | 2 | 5661 | # encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python2.7/dist-packages/PyQt4/QtGui.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QMovie(__PyQt4_QtCore.QObject):
"""
QMovie(QObject parent=None)
QMovie(QIODevice, QByteArray format=QByteArray(), QObject parent=None)
QMovie(QString, QByteArray format=QByteArray(), QObject parent=None)
"""
def backgroundColor(self): # real signature unknown; restored from __doc__
""" QMovie.backgroundColor() -> QColor """
return QColor
def cacheMode(self): # real signature unknown; restored from __doc__
""" QMovie.cacheMode() -> QMovie.CacheMode """
pass
def currentFrameNumber(self): # real signature unknown; restored from __doc__
""" QMovie.currentFrameNumber() -> int """
return 0
def currentImage(self): # real signature unknown; restored from __doc__
""" QMovie.currentImage() -> QImage """
return QImage
def currentPixmap(self): # real signature unknown; restored from __doc__
""" QMovie.currentPixmap() -> QPixmap """
return QPixmap
def device(self): # real signature unknown; restored from __doc__
""" QMovie.device() -> QIODevice """
pass
def error(self, *args, **kwargs): # real signature unknown
""" QMovie.error[QImageReader.ImageReaderError] [signal] """
pass
def fileName(self): # real signature unknown; restored from __doc__
""" QMovie.fileName() -> QString """
pass
def finished(self, *args, **kwargs): # real signature unknown
""" QMovie.finished [signal] """
pass
def format(self): # real signature unknown; restored from __doc__
""" QMovie.format() -> QByteArray """
pass
def frameChanged(self, *args, **kwargs): # real signature unknown
""" QMovie.frameChanged[int] [signal] """
pass
def frameCount(self): # real signature unknown; restored from __doc__
""" QMovie.frameCount() -> int """
return 0
def frameRect(self): # real signature unknown; restored from __doc__
""" QMovie.frameRect() -> QRect """
pass
def isValid(self): # real signature unknown; restored from __doc__
""" QMovie.isValid() -> bool """
return False
def jumpToFrame(self, p_int): # real signature unknown; restored from __doc__
""" QMovie.jumpToFrame(int) -> bool """
return False
def jumpToNextFrame(self): # real signature unknown; restored from __doc__
""" QMovie.jumpToNextFrame() -> bool """
return False
def loopCount(self): # real signature unknown; restored from __doc__
""" QMovie.loopCount() -> int """
return 0
def nextFrameDelay(self): # real signature unknown; restored from __doc__
""" QMovie.nextFrameDelay() -> int """
return 0
def resized(self, *args, **kwargs): # real signature unknown
""" QMovie.resized[QSize] [signal] """
pass
def scaledSize(self): # real signature unknown; restored from __doc__
""" QMovie.scaledSize() -> QSize """
pass
def setBackgroundColor(self, QColor): # real signature unknown; restored from __doc__
""" QMovie.setBackgroundColor(QColor) """
pass
def setCacheMode(self, QMovie_CacheMode): # real signature unknown; restored from __doc__
""" QMovie.setCacheMode(QMovie.CacheMode) """
pass
def setDevice(self, QIODevice): # real signature unknown; restored from __doc__
""" QMovie.setDevice(QIODevice) """
pass
def setFileName(self, QString): # real signature unknown; restored from __doc__
""" QMovie.setFileName(QString) """
pass
def setFormat(self, QByteArray): # real signature unknown; restored from __doc__
""" QMovie.setFormat(QByteArray) """
pass
def setPaused(self, bool): # real signature unknown; restored from __doc__
""" QMovie.setPaused(bool) """
pass
def setScaledSize(self, QSize): # real signature unknown; restored from __doc__
""" QMovie.setScaledSize(QSize) """
pass
def setSpeed(self, p_int): # real signature unknown; restored from __doc__
""" QMovie.setSpeed(int) """
pass
def speed(self): # real signature unknown; restored from __doc__
""" QMovie.speed() -> int """
return 0
def start(self): # real signature unknown; restored from __doc__
""" QMovie.start() """
pass
def started(self, *args, **kwargs): # real signature unknown
""" QMovie.started [signal] """
pass
def state(self): # real signature unknown; restored from __doc__
""" QMovie.state() -> QMovie.MovieState """
pass
def stateChanged(self, *args, **kwargs): # real signature unknown
""" QMovie.stateChanged[QMovie.MovieState] [signal] """
pass
def stop(self): # real signature unknown; restored from __doc__
""" QMovie.stop() """
pass
def supportedFormats(self): # real signature unknown; restored from __doc__
""" QMovie.supportedFormats() -> list-of-QByteArray """
pass
def updated(self, *args, **kwargs): # real signature unknown
""" QMovie.updated[QRect] [signal] """
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
CacheAll = 1
CacheMode = None # (!) real value is ''
CacheNone = 0
MovieState = None # (!) real value is ''
NotRunning = 0
Paused = 1
Running = 2
| gpl-2.0 |
alvin319/CarnotKE | jyhton/Lib/xml/sax/__init__.py | 117 | 3413 | """Simple API for XML (SAX) implementation for Python.
This module provides an implementation of the SAX 2 interface;
information about the Java version of the interface can be found at
http://www.megginson.com/SAX/. The Python version of the interface is
documented at <...>.
This package contains the following modules:
handler -- Base classes and constants which define the SAX 2 API for
the 'client-side' of SAX for Python.
saxutils -- Implementation of the convenience classes commonly used to
work with SAX.
xmlreader -- Base classes and constants which define the SAX 2 API for
the parsers used with SAX for Python.
drivers2 -- Contains the driver for that wraps a Java sax implementation in python
objects.
"""
from xmlreader import InputSource
from handler import ContentHandler, ErrorHandler
from _exceptions import SAXException, SAXNotRecognizedException, \
SAXParseException, SAXNotSupportedException, \
SAXReaderNotAvailable
def parse(source, handler, errorHandler=ErrorHandler()):
parser = make_parser()
parser.setContentHandler(handler)
parser.setErrorHandler(errorHandler)
parser.parse(source)
def parseString(string, handler, errorHandler=ErrorHandler()):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
if errorHandler is None:
errorHandler = ErrorHandler()
parser = make_parser()
parser.setContentHandler(handler)
parser.setErrorHandler(errorHandler)
inpsrc = InputSource()
inpsrc.setByteStream(StringIO(string))
parser.parse(inpsrc)
# this is the parser list used by the make_parser function if no
# alternatives are given as parameters to the function
default_parser_list = ["xml.sax.drivers2.drv_javasax"]
# tell modulefinder that importing sax potentially imports expatreader
_false = 0
if _false:
import xml.sax.drivers2.drv_javasax
import os, sys
if os.environ.has_key("PY_SAX_PARSER"):
default_parser_list = os.environ["PY_SAX_PARSER"].split(",")
del os
_key = "python.xml.sax.parser"
if sys.platform[:4] == "java" and sys.registry.containsKey(_key):
default_parser_list = sys.registry.getProperty(_key).split(",")
def make_parser(parser_list = []):
"""Creates and returns a SAX parser.
Creates the first parser it is able to instantiate of the ones
given in the list created by doing parser_list +
default_parser_list. The lists must contain the names of Python
modules containing both a SAX parser and a create_parser function."""
for parser_name in parser_list + default_parser_list:
try:
return _create_parser(parser_name)
except ImportError,e:
import sys
if sys.modules.has_key(parser_name):
# The parser module was found, but importing it
# failed unexpectedly, pass this exception through
raise
except SAXReaderNotAvailable:
# The parser module detected that it won't work properly,
# so try the next one
pass
raise SAXReaderNotAvailable("No parsers found", None)
# --- Internal utility methods used by make_parser
def _create_parser(parser_name):
drv_module = __import__(parser_name,{},{},['create_parser'])
return drv_module.create_parser()
del sys
| apache-2.0 |
brodeau/aerobulk | python/plot_tests/plot_station_asf.py | 1 | 9926 | #!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# Post-diagnostic of STATION_ASF / L. Brodeau, 2019
import sys
from os import path as path
#from string import replace
import math
import numpy as nmp
from netCDF4 import Dataset,num2date
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
reload(sys)
sys.setdefaultencoding('utf8')
cy1 = '2016' ; # First year
cy2 = '2018' ; # Last year
jt0 = 0
jt0 = 17519
dir_figs='.'
size_fig=(13,7)
fig_ext='png'
clr_red = '#AD0000'
clr_blu = '#3749A3'
clr_gre = '#548F64'
clr_sat = '#ffed00'
clr_mod = '#008ab8'
rDPI=200.
L_ALGOS = [ 'COARE3p6' , 'ECMWF' , 'NCAR' ]
l_xtrns = [ '-noskin' , '-noskin' , '' ] ; # string to add to algo name (L_ALGOS) to get version without skin params turned on
l_color = [ '#ffed00' , '#008ab8' , '0.4' ] ; # colors to differentiate algos on the plot
l_width = [ 3 , 2 , 1 ] ; # line-width to differentiate algos on the plot
l_style = [ '-' , '-' , '--' ] ; # line-style
L_VNEM = [ 'qla' , 'qsb' , 'qt' , 'qlw' , 'taum' , 'dt_skin' ]
L_VARO = [ 'Qlat' , 'Qsen' , 'Qnet' , 'Qlw' , 'Tau' , 'dT_skin' ] ; # name of variable on figure
L_VARL = [ r'$Q_{lat}$', r'$Q_{sens}$' , r'$Q_{net}$' , r'$Q_{lw}$' , r'$|\tau|$' , r'$\Delta T_{skin}$' ] ; # name of variable in latex mode
L_VUNT = [ r'$W/m^2$' , r'$W/m^2$' , r'$W/m^2$' , r'$W/m^2$' , r'$N/m^2$' , 'K' ]
L_VMAX = [ 75. , 75. , 800. , 25. , 1.2 , -0.7 ]
L_VMIN = [ -250. , -125. , -400. , -150. , 0. , 0.7 ]
L_ANOM = [ True , True , True , True , True , False ]
#L_VNEM = [ 'qlw' ]
#L_VARO = [ 'Qlw' ] ; # name of variable on figure
#L_VARL = [ r'$Q_{lw}$' ] ; # name of variable in latex mode
#L_VUNT = [ r'$W/m^2$' ]
#L_VMAX = [ 25. ]
#L_VMIN = [ -150. ]
#L_ANOM = [ True ]
nb_algos = len(L_ALGOS) ; print(nb_algos)
# Getting arguments:
narg = len(sys.argv)
if narg != 2:
print 'Usage: '+sys.argv[0]+' <DIR_OUT_SASF>'; sys.exit(0)
cdir_data = sys.argv[1]
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
# Populating and checking existence of files to be read
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
def chck4f(cf):
cmesg = 'ERROR: File '+cf+' does not exist !!!'
if not path.exists(cf): print cmesg ; sys.exit(0)
###cf_in = nmp.empty((), dtype="S10")
cf_in = [] ; cf_in_ns = []
for ja in range(nb_algos):
cfi = cdir_data+'/output/'+'STATION_ASF-'+L_ALGOS[ja]+'_1h_'+cy1+'0101_'+cy2+'1231_gridT.nc'
chck4f(cfi)
cf_in.append(cfi)
# Same but without skin params:
for ja in range(nb_algos):
cfi = cdir_data+'/output/'+'STATION_ASF-'+L_ALGOS[ja]+l_xtrns[ja]+'_1h_'+cy1+'0101_'+cy2+'1231_gridT.nc'
chck4f(cfi)
cf_in_ns.append(cfi)
print('Files we are goin to use:')
for ja in range(nb_algos): print(cf_in[ja])
print(' --- same without cool-skin/warm-layer:')
for ja in range(nb_algos): print(cf_in_ns[ja])
#-----------------------------------------------------------------
# Getting time array from the first file:
id_in = Dataset(cf_in[0])
vt = id_in.variables['time_counter'][jt0:]
cunit_t = id_in.variables['time_counter'].units
clndr_t = id_in.variables['time_counter'].calendar
id_in.close()
Nt = len(vt)
print(' "time" => units = '+cunit_t+', calendar = "'+clndr_t+'"')
vtime = num2date(vt, units=cunit_t) ; # something understandable!
ii=Nt/300
ib=max(ii-ii%10,1)
xticks_d=int(30*ib)
font_inf = { 'fontname':'Open Sans', 'fontweight':'normal', 'fontsize':14 }
nb_var = len(L_VNEM)
xF = nmp.zeros((Nt,nb_algos))
xFa = nmp.zeros((Nt,nb_algos))
for ctest in ['skin','noskin']:
for jv in range(nb_var):
print('\n *** Treating variable: '+L_VARO[jv]+' ('+ctest+') !')
for ja in range(nb_algos):
#
if ctest == 'skin': id_in = Dataset(cf_in[ja])
if ctest == 'noskin': id_in = Dataset(cf_in_ns[ja])
xF[:,ja] = id_in.variables[L_VNEM[jv]][jt0:,1,1] # only the center point of the 3x3 spatial domain!
if ja == 0: cvar_lnm = id_in.variables[L_VNEM[jv]].long_name
id_in.close()
fig = plt.figure(num = jv, figsize=size_fig, facecolor='w', edgecolor='k')
ax1 = plt.axes([0.07, 0.22, 0.9, 0.75])
ax1.set_xticks(vtime[::xticks_d])
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H:%M:%S'))
plt.xticks(rotation='60')
for ja in range(nb_algos):
plt.plot(vtime, xF[:,ja], '-', color=l_color[ja], linestyle=l_style[ja], linewidth=l_width[ja], label=L_ALGOS[ja], zorder=10+ja)
ax1.set_ylim(L_VMIN[jv], L_VMAX[jv]) ; ax1.set_xlim(vtime[0],vtime[Nt-1])
plt.ylabel(L_VARL[jv]+' ['+L_VUNT[jv]+']')
ax1.grid(color='k', linestyle='-', linewidth=0.3)
plt.legend(bbox_to_anchor=(0.45, 0.2), ncol=1, shadow=True, fancybox=True)
ax1.annotate(cvar_lnm+' ('+ctest+')', xy=(0.3, 0.97), xycoords='axes fraction', bbox={'facecolor':'w', 'alpha':1., 'pad':10}, zorder=50, **font_inf)
plt.savefig(L_VARO[jv]+'_'+ctest+'.'+fig_ext, dpi=int(rDPI), transparent=False)
plt.close(jv)
if L_ANOM[jv]:
for ja in range(nb_algos): xFa[:,ja] = xF[:,ja] - nmp.mean(xF,axis=1)
if nmp.sum(xFa[:,:]) == 0.0:
print(' Well! Seems that for variable '+L_VARO[jv]+', choice of algo has no impact a all!')
print(' ==> skipping anomaly plot...')
else:
# Want a symetric y-range that makes sense for the anomaly we're looking at:
rmax = nmp.max(xFa) ; rmin = nmp.min(xFa)
rmax = max( abs(rmax) , abs(rmin) )
romagn = math.floor(math.log(rmax, 10)) ; # order of magnitude of the anomaly we're dealing with
rmlt = 10.**(int(romagn)) / 2.
yrng = math.copysign( math.ceil(abs(rmax)/rmlt)*rmlt , rmax)
#print 'yrng = ', yrng ; #sys.exit(0)
fig = plt.figure(num = 10+jv, figsize=size_fig, facecolor='w', edgecolor='k')
ax1 = plt.axes([0.07, 0.22, 0.9, 0.75])
ax1.set_xticks(vtime[::xticks_d])
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H:%M:%S'))
plt.xticks(rotation='60')
for ja in range(nb_algos):
plt.plot(vtime, xFa[:,ja], '-', color=l_color[ja], linewidth=l_width[ja], label=L_ALGOS[ja], zorder=10+ja)
ax1.set_ylim(-yrng,yrng) ; ax1.set_xlim(vtime[0],vtime[Nt-1])
plt.ylabel(L_VARL[jv]+' ['+L_VUNT[jv]+']')
ax1.grid(color='k', linestyle='-', linewidth=0.3)
plt.legend(bbox_to_anchor=(0.45, 0.2), ncol=1, shadow=True, fancybox=True)
ax1.annotate('Anomaly of '+cvar_lnm+' ('+ctest+')', xy=(0.3, 0.97), xycoords='axes fraction', bbox={'facecolor':'w', 'alpha':1., 'pad':10}, zorder=50, **font_inf)
plt.savefig(L_VARO[jv]+'_'+ctest+'_anomaly.'+fig_ext, dpi=int(rDPI), transparent=False)
plt.close(10+jv)
# Difference skin vs noskin:
xFns = nmp.zeros((Nt,nb_algos))
for jv in range(nb_var-1):
print('\n *** Treating variable: '+L_VARO[jv]+' ('+ctest+') !')
for ja in range(nb_algos-1):
id_in = Dataset(cf_in[ja])
xF[:,ja] = id_in.variables[L_VNEM[jv]][jt0:,1,1] # only the center point of the 3x3 spatial domain!
if ja == 0: cvar_lnm = id_in.variables[L_VNEM[jv]].long_name
id_in.close()
#
id_in = Dataset(cf_in_ns[ja])
xFns[:,ja] = id_in.variables[L_VNEM[jv]][jt0:,1,1] # only the center point of the 3x3 spatial domain!
if ja == 0: cvar_lnm = id_in.variables[L_VNEM[jv]].long_name
id_in.close()
xFa[:,ja] = xF[:,ja] - xFns[:,ja] ; # difference!
# Want a symetric y-range that makes sense for the anomaly we're looking at:
rmax = nmp.max(xFa) ; rmin = nmp.min(xFa)
rmax = max( abs(rmax) , abs(rmin) )
romagn = math.floor(math.log(rmax, 10)) ; # order of magnitude of the anomaly we're dealing with
rmlt = 10.**(int(romagn)) / 2.
yrng = math.copysign( math.ceil(abs(rmax)/rmlt)*rmlt , rmax)
print 'yrng = ', yrng ; #sys.exit(0)
for ja in range(nb_algos-1):
calgo = L_ALGOS[ja]
if nmp.sum(xFa[:,ja]) == 0.0:
print(' Well! Seems that for variable '+L_VARO[jv]+', and algo '+calgo+', skin param has no impact')
print(' ==> skipping difference plot...')
else:
fig = plt.figure(num = jv, figsize=size_fig, facecolor='w', edgecolor='k')
ax1 = plt.axes([0.07, 0.22, 0.9, 0.75])
ax1.set_xticks(vtime[::xticks_d])
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H:%M:%S'))
plt.xticks(rotation='60')
plt.plot(vtime, xFa[:,ja], '-', color=l_color[ja], linestyle=l_style[ja], linewidth=l_width[ja], label=None, zorder=10+ja)
ax1.set_ylim(-yrng,yrng) ; ax1.set_xlim(vtime[0],vtime[Nt-1])
plt.ylabel(L_VARL[jv]+' ['+L_VUNT[jv]+']')
ax1.grid(color='k', linestyle='-', linewidth=0.3)
#plt.legend(bbox_to_anchor=(0.45, 0.2), ncol=1, shadow=True, fancybox=True)
ax1.annotate(cvar_lnm+' ('+ctest+')', xy=(0.3, 0.97), xycoords='axes fraction', bbox={'facecolor':'w', 'alpha':1., 'pad':10}, zorder=50, **font_inf)
plt.savefig('diff_skin-noskin_'+L_VARO[jv]+'_'+calgo+'_'+ctest+'.'+fig_ext, dpi=int(rDPI), transparent=False)
plt.close(jv)
| gpl-3.0 |
florianfesti/boxes | boxes/generators/openbox.py | 1 | 1741 | #!/usr/bin/env python3
# Copyright (C) 2013-2014 Florian Festi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from boxes import *
class OpenBox(Boxes):
"""Box with top and front open"""
ui_group = "Box"
def __init__(self):
Boxes.__init__(self)
self.buildArgParser("x", "y", "h", "outside")
self.addSettingsArgs(edges.FingerJointSettings)
def render(self):
x, y, h = self.x, self.y, self.h
t = self.thickness
if self.outside:
x = self.adjustSize(x)
y = self.adjustSize(y, False)
h = self.adjustSize(h, False)
d2 = [edges.Bolts(2)]
d3 = [edges.Bolts(3)]
d2 = d3 = None
self.rectangularWall(x, h, "FFeF", bedBolts=d2, move="right")
self.rectangularWall(y, h, "Feef", bedBolts=d3, move="up")
self.rectangularWall(y, h, "Feef", bedBolts=d3)
#self.rectangularWall(x, h, "FFeF", bedBolts=d2, move="left up")
self.rectangularWall(x, y, "efff", bedBolts=[d2, d3, d2, d3], move="left")
#self.rectangularWall(x, y, "ffff", bedBolts=[d2, d3, d2, d3])
| gpl-3.0 |
alexbruy/QGIS | python/plugins/processing/algs/qgis/Gridify.py | 1 | 7375 | # -*- coding: utf-8 -*-
"""
***************************************************************************
Gridify.py
---------------------
Date : May 2010
Copyright : (C) 2010 by Michael Minn
Email : pyqgis at michaelminn dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Michael Minn'
__date__ = 'May 2010'
__copyright__ = '(C) 2010, Michael Minn'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import Qgis, QgsGeometry, QgsFeature, QgsPoint, QgsWkbTypes
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.ProcessingLog import ProcessingLog
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterNumber
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class Gridify(GeoAlgorithm):
INPUT = 'INPUT'
HSPACING = 'HSPACING'
VSPACING = 'VSPACING'
OUTPUT = 'OUTPUT'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Snap points to grid')
self.group, self.i18n_group = self.trAlgorithm('Vector general tools')
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input Layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterNumber(self.HSPACING,
self.tr('Horizontal spacing'), default=0.1))
self.addParameter(ParameterNumber(self.VSPACING,
self.tr('Vertical spacing'), default=0.1))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Snapped')))
def processAlgorithm(self, progress):
layer = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
hSpacing = self.getParameterValue(self.HSPACING)
vSpacing = self.getParameterValue(self.VSPACING)
if hSpacing <= 0 or vSpacing <= 0:
raise GeoAlgorithmExecutionException(
self.tr('Invalid grid spacing: %s/%s' % (hSpacing, vSpacing)))
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
layer.fields(), layer.wkbType(), layer.crs())
features = vector.features(layer)
total = 100.0 / len(features)
for current, f in enumerate(features):
geom = f.geometry()
geomType = geom.wkbType()
if geomType == QgsWkbTypes.Point:
points = self._gridify([geom.asPoint()], hSpacing, vSpacing)
newGeom = QgsGeometry.fromPoint(points[0])
elif geomType == QgsWkbTypes.MultiPoint:
points = self._gridify(geom.aMultiPoint(), hSpacing, vSpacing)
newGeom = QgsGeometry.fromMultiPoint(points)
elif geomType == QgsWkbTypes.LineString:
points = self._gridify(geom.asPolyline(), hSpacing, vSpacing)
if len(points) < 2:
ProcessingLog.addToLog(ProcessingLog.LOG_INFO,
self.tr('Failed to gridify feature with FID %s' % f.id()))
newGeom = None
else:
newGeom = QgsGeometry.fromPolyline(points)
elif geomType == QgsWkbTypes.MultiLineString:
polyline = []
for line in geom.asMultiPolyline():
points = self._gridify(line, hSpacing, vSpacing)
if len(points) > 1:
polyline.append(points)
if len(polyline) <= 0:
ProcessingLog.addToLog(ProcessingLog.LOG_INFO,
self.tr('Failed to gridify feature with FID %s' % f.id()))
newGeom = None
else:
newGeom = QgsGeometry.fromMultiPolyline(polyline)
elif geomType == QgsWkbTypes.Polygon:
polygon = []
for line in geom.asPolygon():
points = self._gridify(line, hSpacing, vSpacing)
if len(points) > 1:
polygon.append(points)
if len(polygon) <= 0:
ProcessingLog.addToLog(ProcessingLog.LOG_INFO,
self.tr('Failed to gridify feature with FID %s' % f.id()))
newGeom = None
else:
newGeom = QgsGeometry.fromPolygon(polygon)
elif geomType == QgsWkbTypes.MultiPolygon:
multipolygon = []
for polygon in geom.asMultiPolygon():
newPolygon = []
for line in polygon:
points = self._gridify(line, hSpacing, vSpacing)
if len(points) > 2:
newPolygon.append(points)
if len(newPolygon) > 0:
multipolygon.append(newPolygon)
if len(multipolygon) <= 0:
ProcessingLog.addToLog(ProcessingLog.LOG_INFO,
self.tr('Failed to gridify feature with FID %s' % f.id()))
newGeom = None
else:
newGeom = QgsGeometry.fromMultiPolygon(multipolygon)
if newGeom is not None:
feat = QgsFeature()
feat.setGeometry(newGeom)
feat.setAttributes(f.attributes())
writer.addFeature(feat)
progress.setPercentage(int(current * total))
del writer
def _gridify(self, points, hSpacing, vSpacing):
nPoints = []
for p in points:
nPoints.append(QgsPoint(round(p.x() / hSpacing, 0) * hSpacing,
round(p.y() / vSpacing, 0) * vSpacing))
i = 0
# Delete overlapping points
while i < len(nPoints) - 2:
if nPoints[i] == nPoints[i + 1]:
nPoints.pop(i + 1)
else:
i += 1
i = 0
# Delete line points that go out and return to the same place
while i < len(nPoints) - 3:
if nPoints[i] == nPoints[i + 2]:
nPoints.pop(i + 1)
nPoints.pop(i + 1)
# Step back to catch arcs
if i > 0:
i -= 1
else:
i += 1
i = 0
# Delete overlapping start/end points
while len(nPoints) > 1 and nPoints[0] == nPoints[len(nPoints) - 1]:
nPoints.pop(len(nPoints) - 1)
return nPoints
| gpl-2.0 |
yencarnacion/jaikuengine | .google_appengine/lib/endpoints-1.0/endpoints/users_id_token.py | 2 | 17916 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utility library for reading user information from an id_token.
This is an experimental library that can temporarily be used to extract
a user from an id_token. The functionality provided by this library
will be provided elsewhere in the future.
"""
import base64
try:
import json
except ImportError:
import simplejson as json
import logging
import os
import re
import time
import urllib
try:
from google.appengine.api import memcache
from google.appengine.api import oauth
from google.appengine.api import urlfetch
from google.appengine.api import users
except ImportError:
from google.appengine.api import memcache
from google.appengine.api import oauth
from google.appengine.api import urlfetch
from google.appengine.api import users
try:
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
_CRYPTO_LOADED = True
except ImportError:
_CRYPTO_LOADED = False
__all__ = ['get_current_user',
'InvalidGetUserCall',
'SKIP_CLIENT_ID_CHECK']
SKIP_CLIENT_ID_CHECK = ['*']
_CLOCK_SKEW_SECS = 300
_MAX_TOKEN_LIFETIME_SECS = 86400
_DEFAULT_CERT_URI = ('https://www.googleapis.com/service_accounts/v1/metadata/'
'raw/federated-signon@system.gserviceaccount.com')
_ENV_USE_OAUTH_SCOPE = 'ENDPOINTS_USE_OAUTH_SCOPE'
_ENV_AUTH_EMAIL = 'ENDPOINTS_AUTH_EMAIL'
_ENV_AUTH_DOMAIN = 'ENDPOINTS_AUTH_DOMAIN'
_EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
_TOKENINFO_URL = 'https://www.googleapis.com/oauth2/v1/tokeninfo'
_MAX_AGE_REGEX = re.compile(r'\s*max-age\s*=\s*(\d+)\s*')
_CERT_NAMESPACE = '__verify_jwt'
class _AppIdentityError(Exception):
pass
class InvalidGetUserCall(Exception):
"""Called get_current_user when the environment was not set up for it."""
def get_current_user():
"""Get user information from the id_token or oauth token in the request.
This should only be called from within an Endpoints request handler,
decorated with an @endpoints.method decorator. The decorator should include
the https://www.googleapis.com/auth/userinfo.email scope.
If the current request uses an id_token, this validates and parses the token
against the info in the current request handler and returns the user.
Or, for an Oauth token, this call validates the token against the tokeninfo
endpoint and oauth.get_current_user with the scopes provided in the method's
decorator.
Returns:
None if there is no token or it's invalid. If the token was valid, this
returns a User. Only the user's email field is guaranteed to be set.
Other fields may be empty.
Raises:
InvalidGetUserCall: if the environment variables necessary to determine the
endpoints user are not set. These are typically set when processing a
request using an Endpoints handler. If they are not set, it likely
indicates that this function was called from outside an Endpoints request
handler.
"""
if not _is_auth_info_available():
raise InvalidGetUserCall('No valid endpoints user in environment.')
if _ENV_USE_OAUTH_SCOPE in os.environ:
return oauth.get_current_user(os.environ[_ENV_USE_OAUTH_SCOPE])
if (_ENV_AUTH_EMAIL in os.environ and
_ENV_AUTH_DOMAIN in os.environ):
if not os.environ[_ENV_AUTH_EMAIL]:
return None
return users.User(os.environ[_ENV_AUTH_EMAIL],
os.environ[_ENV_AUTH_DOMAIN] or None)
return None
def _is_auth_info_available():
"""Check if user auth info has been set in environment variables."""
return ((_ENV_AUTH_EMAIL in os.environ and
_ENV_AUTH_DOMAIN in os.environ) or
_ENV_USE_OAUTH_SCOPE in os.environ)
def _maybe_set_current_user_vars(method, api_info=None, request=None):
"""Get user information from the id_token or oauth token in the request.
Used internally by Endpoints to set up environment variables for user
authentication.
Args:
method: The class method that's handling this request. This method
should be annotated with @endpoints.method.
api_info: An api_config._ApiInfo instance. Optional. If None, will attempt
to parse api_info from the implicit instance of the method.
request: The current request, or None.
"""
if _is_auth_info_available():
return
os.environ[_ENV_AUTH_EMAIL] = ''
os.environ[_ENV_AUTH_DOMAIN] = ''
try:
api_info = api_info or method.im_self.api_info
except AttributeError:
logging.warning('AttributeError when accessing %s.im_self. An unbound '
'method was probably passed as an endpoints handler.',
method.__name__)
scopes = method.method_info.scopes
audiences = method.method_info.audiences
allowed_client_ids = method.method_info.allowed_client_ids
else:
scopes = (method.method_info.scopes
if method.method_info.scopes is not None
else api_info.scopes)
audiences = (method.method_info.audiences
if method.method_info.audiences is not None
else api_info.audiences)
allowed_client_ids = (method.method_info.allowed_client_ids
if method.method_info.allowed_client_ids is not None
else api_info.allowed_client_ids)
if not scopes and not audiences and not allowed_client_ids:
return
token = _get_token(request)
if not token:
return None
if ((scopes == [_EMAIL_SCOPE] or scopes == (_EMAIL_SCOPE,)) and
allowed_client_ids):
logging.debug('Checking for id_token.')
time_now = long(time.time())
user = _get_id_token_user(token, audiences, allowed_client_ids, time_now,
memcache)
if user:
os.environ[_ENV_AUTH_EMAIL] = user.email()
os.environ[_ENV_AUTH_DOMAIN] = user.auth_domain()
return
if scopes:
logging.debug('Checking for oauth token.')
if _is_local_dev():
_set_bearer_user_vars_local(token, allowed_client_ids, scopes)
else:
_set_bearer_user_vars(allowed_client_ids, scopes)
def _get_token(request):
"""Get the auth token for this request.
Auth token may be specified in either the Authorization header or
as a query param (either access_token or bearer_token). We'll check in
this order:
1. Authorization header.
2. bearer_token query param.
3. access_token query param.
Args:
request: The current request, or None.
Returns:
The token in the request or None.
"""
auth_header = os.environ.get('HTTP_AUTHORIZATION')
if auth_header:
allowed_auth_schemes = ('OAuth', 'Bearer')
for auth_scheme in allowed_auth_schemes:
if auth_header.startswith(auth_scheme):
return auth_header[len(auth_scheme) + 1:]
return None
if request:
for key in ('bearer_token', 'access_token'):
token, _ = request.get_unrecognized_field_info(key)
if token:
return token
def _get_id_token_user(token, audiences, allowed_client_ids, time_now, cache):
"""Get a User for the given id token, if the token is valid.
Args:
token: The id_token to check.
audiences: List of audiences that are acceptable.
allowed_client_ids: List of client IDs that are acceptable.
time_now: The current time as a long (eg. long(time.time())).
cache: Cache to use (eg. the memcache module).
Returns:
A User if the token is valid, None otherwise.
"""
try:
parsed_token = _verify_signed_jwt_with_certs(token, time_now, cache)
except _AppIdentityError, e:
logging.debug('id_token verification failed: %s', e)
return None
except:
logging.debug('id_token verification failed.')
return None
if _verify_parsed_token(parsed_token, audiences, allowed_client_ids):
email = parsed_token['email']
return users.User(email)
def _set_oauth_user_vars(token_info, audiences, allowed_client_ids, scopes,
local_dev):
logging.warning('_set_oauth_user_vars is deprecated and will be removed '
'soon.')
return _set_bearer_user_vars(allowed_client_ids, scopes)
def _set_bearer_user_vars(allowed_client_ids, scopes):
"""Validate the oauth bearer token and set endpoints auth user variables.
If the bearer token is valid, this sets ENDPOINTS_USE_OAUTH_SCOPE. This
provides enough information that our endpoints.get_current_user() function
can get the user.
Args:
allowed_client_ids: List of client IDs that are acceptable.
scopes: List of acceptable scopes.
"""
for scope in scopes:
try:
client_id = oauth.get_client_id(scope)
except oauth.Error:
continue
if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
client_id not in allowed_client_ids):
logging.warning('Client ID is not allowed: %s', client_id)
return
os.environ[_ENV_USE_OAUTH_SCOPE] = scope
logging.debug('Returning user from matched oauth_user.')
return
logging.debug('Oauth framework user didn\'t match oauth token user.')
return None
def _set_bearer_user_vars_local(token, allowed_client_ids, scopes):
"""Validate the oauth bearer token on the dev server.
Since the functions in the oauth module return only example results in local
development, this hits the tokeninfo endpoint and attempts to validate the
token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we
can get the user from the token.
Args:
token: String with the oauth token to validate.
allowed_client_ids: List of client IDs that are acceptable.
scopes: List of acceptable scopes.
"""
result = urlfetch.fetch(
'%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token})))
if result.status_code != 200:
try:
error_description = json.loads(result.content)['error_description']
except (ValueError, KeyError):
error_description = ''
logging.error('Token info endpoint returned status %s: %s',
result.status_code, error_description)
return
token_info = json.loads(result.content)
if 'email' not in token_info:
logging.warning('Oauth token doesn\'t include an email address.')
return
if not token_info.get('verified_email'):
logging.warning('Oauth token email isn\'t verified.')
return
client_id = token_info.get('issued_to')
if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
client_id not in allowed_client_ids):
logging.warning('Client ID is not allowed: %s', client_id)
return
token_scopes = token_info.get('scope', '').split(' ')
if not any(scope in scopes for scope in token_scopes):
logging.warning('Oauth token scopes don\'t match any acceptable scopes.')
return
os.environ[_ENV_AUTH_EMAIL] = token_info['email']
os.environ[_ENV_AUTH_DOMAIN] = ''
logging.debug('Local dev returning user from token.')
return
def _is_local_dev():
return os.environ.get('SERVER_SOFTWARE', '').startswith('Development')
def _verify_parsed_token(parsed_token, audiences, allowed_client_ids):
if parsed_token.get('iss') != 'accounts.google.com':
logging.warning('Issuer was not valid: %s', parsed_token.get('iss'))
return False
aud = parsed_token.get('aud')
if not aud:
logging.warning('No aud field in token')
return False
cid = parsed_token.get('azp')
if aud != cid and aud not in audiences:
logging.warning('Audience not allowed: %s', aud)
return False
if list(allowed_client_ids) == SKIP_CLIENT_ID_CHECK:
logging.warning('Client ID check can\'t be skipped for ID tokens. '
'Id_token cannot be verified.')
return False
elif not cid or cid not in allowed_client_ids:
logging.warning('Client ID is not allowed: %s', cid)
return False
if 'email' not in parsed_token:
return False
return True
def _urlsafe_b64decode(b64string):
b64string = b64string.encode('ascii')
padded = b64string + '=' * ((4 - len(b64string)) % 4)
return base64.urlsafe_b64decode(padded)
def _get_cert_expiration_time(headers):
"""Get the expiration time for a cert, given the response headers.
Get expiration time from the headers in the result. If we can't get
a time from the headers, this returns 0, indicating that the cert
shouldn't be cached.
Args:
headers: A dict containing the response headers from the request to get
certs.
Returns:
An integer with the number of seconds the cert should be cached. This
value is guaranteed to be >= 0.
"""
cache_control = headers.get('Cache-Control', '')
for entry in cache_control.split(','):
match = _MAX_AGE_REGEX.match(entry)
if match:
cache_time_seconds = int(match.group(1))
break
else:
return 0
age = headers.get('Age')
if age is not None:
try:
age = int(age)
except ValueError:
age = 0
cache_time_seconds -= age
return max(0, cache_time_seconds)
def _get_cached_certs(cert_uri, cache):
certs = cache.get(cert_uri, namespace=_CERT_NAMESPACE)
if certs is None:
logging.debug('Cert cache miss')
try:
result = urlfetch.fetch(cert_uri)
except AssertionError:
return None
if result.status_code == 200:
certs = json.loads(result.content)
expiration_time_seconds = _get_cert_expiration_time(result.headers)
if expiration_time_seconds:
cache.set(cert_uri, certs, time=expiration_time_seconds,
namespace=_CERT_NAMESPACE)
else:
logging.error(
'Certs not available, HTTP request returned %d', result.status_code)
return certs
def _b64_to_long(b):
b = b.encode('ascii')
b += '=' * ((4 - len(b)) % 4)
b = base64.b64decode(b)
return long(b.encode('hex'), 16)
def _verify_signed_jwt_with_certs(
jwt, time_now, cache,
cert_uri=_DEFAULT_CERT_URI):
"""Verify a JWT against public certs.
See http://self-issued.info/docs/draft-jones-json-web-token.html.
The PyCrypto library included with Google App Engine is severely limited and
so you have to use it very carefully to verify JWT signatures. The first
issue is that the library can't read X.509 files, so we make a call to a
special URI that has the public cert in modulus/exponent form in JSON.
The second issue is that the RSA.verify method doesn't work, at least for
how the JWT tokens are signed, so we have to manually verify the signature
of the JWT, which means hashing the signed part of the JWT and comparing
that to the signature that's been encrypted with the public key.
Args:
jwt: string, A JWT.
time_now: The current time, as a long (eg. long(time.time())).
cache: Cache to use (eg. the memcache module).
cert_uri: string, URI to get cert modulus and exponent in JSON format.
Returns:
dict, The deserialized JSON payload in the JWT.
Raises:
_AppIdentityError: if any checks are failed.
"""
segments = jwt.split('.')
if len(segments) != 3:
raise _AppIdentityError('Wrong number of segments in token: %s' % jwt)
signed = '%s.%s' % (segments[0], segments[1])
signature = _urlsafe_b64decode(segments[2])
lsignature = long(signature.encode('hex'), 16)
header_body = _urlsafe_b64decode(segments[0])
try:
header = json.loads(header_body)
except:
raise _AppIdentityError('Can\'t parse header: %s' % header_body)
if header.get('alg') != 'RS256':
raise _AppIdentityError('Unexpected encryption algorithm: %s' %
header.get('alg'))
json_body = _urlsafe_b64decode(segments[1])
try:
parsed = json.loads(json_body)
except:
raise _AppIdentityError('Can\'t parse token: %s' % json_body)
certs = _get_cached_certs(cert_uri, cache)
if certs is None:
raise _AppIdentityError(
'Unable to retrieve certs needed to verify the signed JWT: %s' % jwt)
if not _CRYPTO_LOADED:
raise _AppIdentityError('Unable to load pycrypto library. Can\'t verify '
'id_token signature. See http://www.pycrypto.org '
'for more information on pycrypto.')
local_hash = SHA256.new(signed).hexdigest()
verified = False
for keyvalue in certs['keyvalues']:
modulus = _b64_to_long(keyvalue['modulus'])
exponent = _b64_to_long(keyvalue['exponent'])
key = RSA.construct((modulus, exponent))
hexsig = '%064x' % key.encrypt(lsignature, '')[0]
hexsig = hexsig[-64:]
verified = (hexsig == local_hash)
if verified:
break
if not verified:
raise _AppIdentityError('Invalid token signature: %s' % jwt)
iat = parsed.get('iat')
if iat is None:
raise _AppIdentityError('No iat field in token: %s' % json_body)
earliest = iat - _CLOCK_SKEW_SECS
exp = parsed.get('exp')
if exp is None:
raise _AppIdentityError('No exp field in token: %s' % json_body)
if exp >= time_now + _MAX_TOKEN_LIFETIME_SECS:
raise _AppIdentityError('exp field too far in future: %s' % json_body)
latest = exp + _CLOCK_SKEW_SECS
if time_now < earliest:
raise _AppIdentityError('Token used too early, %d < %d: %s' %
(time_now, earliest, json_body))
if time_now > latest:
raise _AppIdentityError('Token used too late, %d > %d: %s' %
(time_now, latest, json_body))
return parsed
| apache-2.0 |
whatsbcn/shellforge4 | shellforge/shellforge/cpu_i386.py | 1 | 12307 | #############################################################################
## ##
## cpu_i386.py --- Classes for i386 CPUs ##
## see http://www.secdev.org/projects/shellforge/ ##
## for more informations ##
## ##
## Copyright (C) 2003 Philippe Biondi <phil@secdev.org> ##
## ##
## This program is free software; you can redistribute it and/or modify it ##
## under the terms of the GNU General Public License version 2 as ##
## published by the Free Software Foundation. ##
## ##
## This program is distributed in the hope that it will be useful, but ##
## WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ##
## General Public License for more details. ##
## ##
#############################################################################
# $Id$
from cpu_any import *
import shellforge as sf
import struct
class Loaders_i386(Loaders):
def loader_xor(self, shcode, avoid="\x00"):
avd = []
for a in avoid.split(","):
if a.startswith("0x") and len(a) == 4:
avd.append(int(a[2:],16))
else:
avd += map(lambda x: ord(x),list(a))
needloader=0
for c in avd:
if chr(c) in shcode:
needloader=1
break
if not needloader:
return shcode
for i in range(64,256)+range(1,64):
ok=1
for c in avd:
if chr(c^i) in shcode:
ok=0
break
if ok:
key=i
break
if not ok:
error("xor loader: no suitable xor key found.")
sf.sflog.info("Applying xor loader. key=%#02x" % key)
shcode = "".join(map(lambda x: chr(ord(x)^key), shcode))
length = len(shcode)
if length < 0x100:
ld = ("\xeb\x0e\x90\x5e\x31\xc9\xb1"+chr(length)+"\x80\x36"+
chr(key)+"\x46\xe2\xfa\xeb\x05\xe8\xee\xff\xff\xff")
else:
if length & 0xff == 0:
length += 1
ld = ("\xeb\x0f\x5e\x31\xc9\x66\xb9"+
chr(length&0xff)+chr(length>>8)+
"\x80\x36"+chr(key)+
"\x46\xe2\xfa\xeb\x05\xe8\xec\xff\xff\xff")
ok=1
for c in avd:
if chr(c) in ld:
ok=0
break
if not ok:
error("xor loader: no suitable xor loader found")
return ld+shcode
def loader_alpha(self,shcode):
def mkcpl(x):
x = ord(x)
set="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
for c in set:
d = ord(c)^x
if chr(d) in set:
return 0,c,chr(d)
if chr(0xff^d) in set:
return 1,c,chr(0xff^d)
raise Exception,"No encoding found for %#02x"%x
sf.sflog.info("Applying alpha loader")
s="hAAAAX5AAAAHPPPPPPPPa"
shcode=list(shcode)
shcode.reverse()
shcode = "".join(shcode)
shcode += "\x90"*((-len(shcode))%4)
for b in range(len(shcode)/4):
T,C,D = 0,"",""
for i in range(4):
t,c,d = mkcpl(shcode[4*b+i])
T += t << i
C = c+C
D = d+D
s += "h%sX5%sP" % (C,D)
if T > 0:
s += "TY"
T = (2*T^T)%16
for i in range(4):
if T & 1:
s += "19"
T >>= 1
if T == 0:
break
s += "I"
return s+"T\xc3"
def loader_stackreloc(self,shcode,smart=1):
loader = ("\x58"+ # pop %eax
"\xe8\x00\x00\x00\x00"+ # call +0
"\x5b"+ # pop %ebx
"\x50"+ # push %eax
"\x83\xc3\xfa") # add $0xfffffffa,%ebx
if smart != "0":
loader += (
"\x89\xd8"+ # mov %ebx,%eax
"\x31\xe0"+ # xor %esp,%eax
"\xc1\xe8\x10"+ # shr $0x10,%eax
"\x85\xc0"+ # test %eax,%eax
"\x75\x02") # jne +2
loader += "\x89\xdc" # mov %ebx,%esp
return loader+shcode
def loader_compress(self, shcode, histo='5',length='2'):
loader = "\xeb\x38\x5e\x46\xfc\xad\x89\xc1\x89\xf7\x01\xcf\x31\xc0\xac\xa8\x80\x78\x05\xaa\xe2\xf8\xeb\x27\x3d\xff\x00\x00\x00\x75\x03\xac\xe2\xf1\x51\x56\x89\xc1\x24\x1f\x89\xfe\x29\xc6\x80\xe1\x7f\xc0\xe9\x05\xf3\xa4\x5e\x59\xe2\xd6\xeb\x05\xe8\xc3\xff\xff\xff\xe9" # compress.nasm r44
comp = ""
histo = int(histo,0)
length = int(length,0)
if histo != 5 or length != 2:
sf.sflog.warning("Compress: only works for histo and length default values")
sf.sflog.info("Compress: histo=%i length=%i" % (histo, length))
i = 0
while i < len(shcode):
c = shcode[i]
if ord(c)&0x80:
c = "\xff"+c
j = min(2**length-1, i, len(shcode)-i+1)
while j > 0:
p = shcode[:i].rfind(shcode[i:i+j])
# print >>sys.stderr,"%02i %4i %r %r" % (i,p,shcode[:i], shcode[i:i+j])
if p >= 0 and p >= i-2**histo:
sf.sflog.debug("Compress: found @%i %i %r in %r" % (i,p-i, shcode[i:i+j],shcode[max(0,i-2**histo):i]))
c = chr(0x80|(j<<histo)|(i-p))
i += j-1
break
j -= 1
comp += c
i += 1
comp = loader+struct.pack("I",len(comp))+comp
sf.sflog.info("Compress: [%i bytes] ==(C)==> [%i bytes]" % (len(shcode), len(comp)))
return comp
class CodeTuner_gcc_i386(CodeTuner):
def __init__(self, stack_reloc = 0, save_regs = 0):
CodeTuner.__init__(self)
self.stack_reloc = stack_reloc
self.save_regs = save_regs
def ____tune(self, code):
sf.sflog.info("Tuning assembler code")
codelines = code.splitlines()
preamb = []
rodata = []
textpream = []
mkstkframe = []
beforeebx = []
setebx = []
afterebx = []
afterleave = []
end = []
out = [["# Modified by shellforge v%s\n"%VERSION]]
st1 = []
st2 = []
st3 = []
state=0
for l in codelines:
sf.sflog.debug("[%i] %s"% (state, l))
if l.find("@PLT") >= 0:
error("Error at [%s]: Symbol not found" % (l.strip()), err=2)
if l.find("@GOT(") >= 0:
l = l.replace("mov","lea").replace("GOT","GOTOFF")
if state == 0:
if l.find(".rodata") >= 0:
state = 1
continue
elif l.find(".text") >= 0:
state = 2
else:
preamb.append(l);
if state == 1:
if l.find(".text") >= 0:
state = 2
else:
rodata.append(l)
if state == 2:
textpream.append(l)
if l.find("main:") >= 0:
state = 3
continue
if state == 3:
mkstkframe.append(l)
if l.find("mov") >=0 and l.find("%esp") >= 0 and l.find("%ebp") >= 0:
state = 4
continue
if state == 4:
if l.find("sub") >=0 and l.find(",%esp") >=0:
mkstkframe.append(l)
else:
if rodata:
state = 5
else:
state = 7
if state == 5:
if l.find("call") >= 0 and l.find(".L") >= 0:
state = 6
else:
beforeebx.append(l)
if state == 6:
setebx.append(l)
if l.find("GLOBAL_OFFSET_TABLE") >= 0:
state = 7
continue
if state == 7:
if l.find("leave") >= 0:
state = 8
else:
afterebx.append(l)
if state == 8:
if (l.find(".Lfe1:") >= 0 or
(l.find(".size") >= 0 and l.find("main") >= 0)):
state = 9
else:
afterleave.append(l)
if state == 9:
end.append(l)
if state != 9:
self.automaton_error()
out += [preamb, textpream]
if self.stack_reloc:
out += [[ "\tpopl %eax",
"\tcall .L649",
".L649:",
"\tpopl %ebx",
"\tpushl %eax",
"\taddl $[main-.L649],%ebx",
"\tmovl %ebx, %eax",
"\txorl %esp, %eax",
"\tshrl $16, %eax",
"\ttest %eax, %eax",
"\tjnz .Lnotinstack",
"\tmovl %ebx,%esp",
".Lnotinstack:" ], mkstkframe, beforeebx ]
else:
out += [mkstkframe]
if self.save_regs:
out += [["\tpusha"]]
out += [beforeebx]
if rodata:
out += [["\tcall .L649",
".L649:",
"\tpopl %ebx",
"\taddl $[main-.L649],%ebx" ]]
out += [afterebx]
if self.save_regs:
out += [["\tpopa"]]
out += [afterleave, rodata, end]
out = reduce(lambda x,y: x+["#---------"]+y, out)
return "\n".join(out)+"\n"
class Binutils_i386(Binutils_BinaryTarget):
def __init__(self, *args, **kargs):
Binutils_BinaryTarget.__init__(self, *args, **kargs)
self.CFLAGS += "-march=i386 -fPIC"
# OSX tests #
# self.CFLAGS += " -fPIC -fPIE -march=i386"
# self.ASSEMBLEFLAGS += " -fPIC -fPIE -march=i386"
# self.CFLAGS += " -march=i386 -fPIC -fno-zero-initialized-in-bss"
##########################
## ASM used for loaders ##
##########################
# XOR decrypt for shellcodes < 256 bytes
# --------------------------------------
#
# "\xeb\x0d\x5e\x31\xc9\xb1" LENGTH "\x80\x36" XORKEY
# "\x46\xe2\xfa\xeb\x05\xe8\xee\xff\xff\xff"
#
# .text
# .align 4
# .globl main
# .type main,@function
#
# main:
# jmp .l2
# .l1:
# pop %esi
# xorl %ecx, %ecx
# movb LENGTH, %cl
# .loop:
# xorb XORKEY, (%esi)
# incl %esi
# loop .loop
# jmp .l3
# .l2:
# call .l1
# .l3:
# #SHELLCODE HERE
# XOR decrypt for shellcodes >= 256 bytes
# --------------------------------------
# "\xeb\x0f\x5e\x31\xc9\x66\xb9" LENGTHW "\x80\x36" XORKEY
# "\x46\xe2\xfa\xeb\x05\xe8\xec\xff\xff\xff"
#
# .text
# .align 4
# .globl main
# .type main,@function
#
# main:
# jmp .l2
# .l1:
# pop %esi
# xorl %ecx, %ecx
# movw LENGTH, %cx
# .loop:
# xorb XORKEY, (%esi)
# incl %esi
# loop .loop
# jmp .l3
# .l2:
# call .l1
# .l3:
# #SHELLCODE
| gpl-2.0 |
mortbauer/openfoam-extend-Breeder-other-scripting-PyFoam | PyFoam/Infrastructure/CTestRun.py | 1 | 41961 | # ICE Revision: $Id$
"""A wrapper to run a solver as a CTest"""
import sys
import os
from os import path
import subprocess
import shutil
import traceback
import inspect
from PyFoam.ThirdParty.six.moves import cPickle as pickle
import time
from PyFoam.Applications.CloneCase import CloneCase
from PyFoam.Applications.Runner import Runner
from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from PyFoam.Applications.SamplePlot import SamplePlot
from PyFoam.Applications.TimelinePlot import TimelinePlot
from PyFoam.Applications.Decomposer import Decomposer
from PyFoam.Basics.Data2DStatistics import Data2DStatistics
from PyFoam.ThirdParty.six import print_,PY3,iteritems
callbackMethods=[]
def isCallback(f):
callbackMethods.append(f.__name__)
return f
class CTestRun(object):
"""This class runs a solver on a test case, examines the results
and fails if they don't live up the expectations"""
def __init__(self):
pass
def __new__(cls,*args,**kwargs):
obj=super(CTestRun,cls).__new__(cls,*args,**kwargs)
obj.__parameters={}
obj.__parametersClosedForWriting=False
obj.setParameters(sizeClass="unknown",
parallel=False,
autoDecompose=True,
doReconstruct=True,
nrCpus=None,
originalCaseBasis=None)
obj.__addToClone=[]
called=[]
obj.__recursiveInit(obj.__class__,called)
obj.__setParameterAsUsed(["nrCpus","autoDecompose","doReconstruct"])
obj.__parametersClosedForWriting=True
return obj
def __recursiveInit(self,theClass,called):
"""Automatically call the 'init'-method of the whole tree"""
# print_(theClass)
for b in theClass.__bases__:
if b not in [object,CTestRun]:
self.__recursiveInit(b,called)
# subclass overwrites superclasses
if "init" in dir(theClass):
# make sure this is only called once
if PY3:
toCall=theClass.init.__call__
else:
toCall=theClass.init.im_func
if not toCall in called:
theClass.init(self)
called.append(toCall)
# print_("Calling init for",theClass)
def addToClone(self,*args):
for a in args:
self.__addToClone.append(a)
def setParameters(self,**kwargs):
"""Update the parameters with a set of keyword-arguments"""
if self.__parametersClosedForWriting:
self.warn("Tried to modify parameters after the initialization phase",
kwargs)
caller=inspect.stack()[1]
setter="Set by %s in %s line %d" % (caller[3],caller[1],caller[2])
for k,v in iteritems(kwargs):
self.__parameters[k]={"value":v,
"setter":setter,
"used":False}
def parameterValues(self):
vals={}
for k,v in iteritems(self.__parameters):
vals[k]=v["value"]
return vals
def __setParameterAsUsed(self,keys):
for k in keys:
if k in self.__parameters:
self.__parameters[k]["used"]=True
def __getitem__(self,key):
"""Get a parameter"""
try:
parameter=self.__parameters[key]
except KeyError:
e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e'
print_("Unknown parameter",key,"(Parameters:",list(self.__parameters.keys()),")")
raise e
parameter["used"]=True
return parameter["value"]
def shortTestName(self):
return type(self).__name__
def testName(self):
"""Return the full test name with which this test is identified"""
result=self.shortTestName()+"_"+self["solver"]
if self["parallel"]:
result+="_parallel_"+str(self["nrCpus"])+"Cpus"
else:
result+="_serial"
result+="_"+self["sizeClass"]
return result
timeoutDefinitions=[
("unknown",60),
("tiny",60), # a minute
("small",300), # 5 minutes
("medium",1800), # half an hour
("big",7200), # 2 hours
("huge",43200), # 12 hours
("monster",172800), # 2 days
("unlimited",2592000) # 30 days
]
def sizeClassString(self):
return ", ".join(["%s = %ds"% t for t in CTestRun.timeoutDefinitions])
def setTimeout(self,quiet=False):
if self["sizeClass"]=="unknown":
if not quiet:
self.warn("The parameter 'sizeClass' has not been set yet. Assuming 'tiny'")
self.toSmallTimeout=0
self.proposedSizeClass="unknown"
try:
self.timeout=dict(CTestRun.timeoutDefinitions)[self["sizeClass"]]
index=-1
for i,v in enumerate(CTestRun.timeoutDefinitions):
if v[0]==self["sizeClass"]:
index=i
break
if index>2:
self.proposedSizeClass,self.toSmallTimeout=CTestRun.timeoutDefinitions[index-2]
except KeyError:
self.fatalFail("sizeClass is specified as",self["sizeClass"],
". Valid values are with their timeout values",
self.sizeClassString())
self.timeout=dict(CTestRun.timeoutDefinitions["unknown"]) # just in case that we continue
def __doInit(self,
solver,
originalCase,
minimumRunTime=None,
referenceData=None,
tailLength=50,
headLength=50,
**kwargs):
"""Initialzation method to be called before running the actual
test (purpose of this method is to avoid cascaded of
constructor-calls
@param solver: name of the solver to test
@param originalCase: location of the original case files (they
will be copied)
@param minimumRuntime: the solver has to run at least to this time
to be considered "ran successful"
@param referenceData: directory with data that is used for testing
@param tailLength: output that many lines from the end of the solver output
@param headLength: output that many lines from the beginning of the solver output
"""
print_("Creating test",self.testName())
self.__setParameterAsUsed(["solver","originalCase","minimumRunTime",
"referenceData","tailLength","headLength"])
self.__failed=False
self.__failMessage=""
self.__runInfo=None
self.__tailLength=tailLength
self.__headLength=headLength
self.setTimeout()
self.solver=self.which(solver)
if not self.solver:
self.fatalFail("Solver",solver,"not in PATH")
print_("Using solver",self.solver)
if self["originalCaseBasis"]:
originalCase=path.join(self["originalCaseBasis"],originalCase)
print_("Expanding original case path with",self["originalCaseBasis"])
self.originalCase=path.expandvars(originalCase)
if not path.exists(self.originalCase):
self.fatalFail("Original case",self.originalCase,"does not exist")
print_("Original case",self.originalCase)
self.caseDir=path.join(self.workDir(),self.testName()+"_runDir")
print_("Running case in",self.caseDir)
if path.exists(self.caseDir):
if self.removeOldCase:
self.warn("Removing old case",self.caseDir)
shutil.rmtree(self.caseDir)
elif self.doClone:
self.fatalFail(self.caseDir,"already existing")
else:
self.fail(self.caseDir,"already existing")
if referenceData:
self.referenceData=path.join(self.dataDir(),referenceData)
if not path.exists(self.referenceData):
self.fatalFail("Data directory",self.referenceData,"does not exist")
print_("Using reference data from")
else:
self.referenceData=None
print_("No reference data specified")
if self.doReadRunInfo:
print_("Attempting to read the runInfo-file")
self.readRunInfo()
self.minimumRunTime=minimumRunTime
print_()
def readRunInfo(self):
"""read the runInfo from a file"""
pick=pickle.Unpickler(open(path.join(self.caseDir,"runInfo.pickle"),"rb"))
self.__runInfo=pick.load()
def writeRunInfo(self):
"""read the runInfo from a file"""
pick=pickle.Pickler(open(path.join(self.caseDir,"runInfo.pickle"),"wb"))
pick.dump(self.__runInfo)
def wrapACallback(self,name):
"""Has to be a separate method because the loop in
wrapCallbacks didn't work"""
original=getattr(self,name)
if PY3:
original_callable=getattr(original,'__func__')
else:
original_callable=getattr(original,'im_func')
def wrapped(*args,**kwargs):
# print_("Wrapping",name,args,kwargs,original_callable)
return self.runAndCatchExceptions(original_callable,self,*args,**kwargs)
setattr(self,name,wrapped)
def wrapCallbacks(self):
"""Wrap the callback methods with a Python exception handler.
This is not done here so that methoids that the child classes
overwrote will be wrapped to"""
# not yet working
for m in callbackMethods:
print_("Wrapping method",m)
# setattr(self,m,original)
self.wrapACallback(m)
def processOptions(self):
"""Select which phase of the test should be run"""
from optparse import OptionParser,OptionGroup
parser = OptionParser(usage="%prog: [options]")
phases=OptionGroup(parser,
"Phase",
"Select which phases to run")
parser.add_option_group(phases)
phases.add_option("--no-clone",
action="store_false",
dest="doClone",
default=True,
help="Skip cloning phase")
phases.add_option("--no-preparation",
action="store_false",
dest="doPreparation",
default=True,
help="Skip preparation phase")
phases.add_option("--no-serial-pre-tests",
action="store_false",
dest="doSerialPreTests",
default=True,
help="Skip pre-run test phase")
phases.add_option("--no-decompose",
action="store_false",
dest="doDecompose",
default=True,
help="Skip decomposition phase")
phases.add_option("--no-parallel-preparation",
action="store_false",
dest="doParallelPreparation",
default=True,
help="Skip the parallel preparation phase")
phases.add_option("--no-pre-tests",
action="store_false",
dest="doPreTests",
default=True,
help="Skip pre-run test phase")
phases.add_option("--no-simulation",
action="store_false",
dest="doSimulation",
default=True,
help="Skip simulation phase")
phases.add_option("--no-postprocessing",
action="store_false",
dest="doPostprocessing",
default=True,
help="Skip postprocessing phase")
phases.add_option("--no-post-tests",
action="store_false",
dest="doPostTests",
default=True,
help="Skip post-run test phase")
phases.add_option("--no-reconstruction",
action="store_false",
dest="doReconstruction",
default=True,
help="Skip reconstruction phase")
phases.add_option("--no-serial-post-tests",
action="store_false",
dest="doSerialPostTests",
default=True,
help="Skip serial post-run test phase")
phases.add_option("--jump-to-tests",
action="store_true",
dest="jumpToTests",
default=False,
help="Skip everything except the final tests")
behave=OptionGroup(parser,
"Behaviour",
"Determine the behaviour")
parser.add_option_group(behave)
behave.add_option("--fatal-not-fatal",
action="store_true",
dest="fatalIsNotFatal",
default=False,
help="Continue running the tests although a fatal error occured")
behave.add_option("--remove-old-case",
action="store_true",
dest="removeOldCase",
default=False,
help="Remove the case directory if it exists")
info=OptionGroup(parser,
"Info",
"Information about the test (all these options print to the screen and stop the test before doing anything)")
parser.add_option_group(info)
info.add_option("--parameter-value",
action="store",
dest="parameterValue",
default=None,
help="Just print the value of a parameter. Nothing if the parameter does not exist")
info.add_option("--dump-parameters",
action="store_true",
dest="dumpParameters",
default=False,
help="Dump all the parameter values")
info.add_option("--verbose-dump-parameters",
action="store_true",
dest="verboseDumpParameters",
default=False,
help="Dump all the parameter values plus the information where they were set")
data=OptionGroup(parser,
"Data",
"Reading and writing data that allows rerunning cases")
parser.add_option_group(data)
data.add_option("--read-run-info",
action="store_true",
dest="readRunInfo",
default=False,
help="Read the runInfo-File if it exists in the runDirectory (mostly used to test tests without running the solver)")
data.add_option("--print-run-info",
action="store_true",
dest="printRunInfo",
default=False,
help="Print the runInfo when it becomes available")
script=OptionGroup(parser,
"Script parameters",
"Information about the test (all these options print to the screen and stop the test before doing anything and can be used as input in scripts)")
parser.add_option_group(script)
script.add_option("--print-test-name",
action="store_true",
dest="printTestName",
default=False,
help="Print the test name under which this test will be known to the world")
script.add_option("--timeout",
action="store_true",
dest="timeout",
default=False,
help="Print the timeout for this test")
(options, args) = parser.parse_args()
if options.parameterValue:
try:
print_(self[options.parameterValue])
sys.exit(0)
except KeyError:
sys.exit(1)
if options.printTestName:
print_(self.testName())
sys.exit(0)
if options.timeout:
self.setTimeout(quiet=True)
print_(self.timeout)
sys.exit(0)
if options.dumpParameters or options.verboseDumpParameters:
keys=list(self.__parameters.keys())
keys.sort()
maxLen=max([len(n) for n in keys])
for k in keys:
print_(k," "*(maxLen-len(k)),":",self[k])
if options.verboseDumpParameters:
print_(" ",self.__parameters[k]["setter"])
print_()
sys.exit(0)
self.doReadRunInfo=options.readRunInfo
self.doPrintRunInfo=options.printRunInfo
self.doClone=options.doClone
self.doPreparation=options.doPreparation
self.doSerialPreTests=options.doSerialPreTests
self.doDecompose=options.doDecompose
self.doParallelPreparation=options.doParallelPreparation
self.doPreTests=options.doPreTests
self.doSimulation=options.doSimulation
self.doPostprocessing=options.doPostprocessing
self.doPostTests=options.doPostTests
self.doReconstruction=options.doReconstruction
self.doSerialPostTests=options.doSerialPostTests
if options.jumpToTests:
self.doClone=False
self.doPreparation=False
self.doSerialPreTests=False
self.doDecompose=False
self.doParallelPreparation=False
self.doPreTests=False
self.doSimulation=False
self.doPostprocessing=False
self.fatalIsNotFatal=options.fatalIsNotFatal
self.removeOldCase=options.removeOldCase
def run(self):
"""Run the actual test"""
startTime=time.time()
self.processOptions()
self.__doInit(**self.parameterValues())
self.wrapCallbacks()
self.__runParallel=False
if self.doClone:
self.status("Cloning case")
clone=CloneCase([self.originalCase,self.caseDir]+
["--add="+a for a in self.__addToClone])
else:
self.status("Skipping cloning")
if self.doPreparation:
if self.referenceData:
if path.exists(path.join(self.referenceData,"copyToCase")):
self.status("Copying reference data")
self.cloneData(path.join(self.referenceData,"copyToCase"),
self.caseDir)
else:
self.status("No reference data - No 'copyToCase' in",self.referenceData)
if path.exists(path.join(self.referenceData,"additionalFunctionObjects")):
self.status("Adding function objects")
self.addFunctionObjects(path.join(self.referenceData,"additionalFunctionObjects"))
else:
self.status("No additional function objects - No 'additionalFunctionObjects' in",self.referenceData)
self.status("Preparing mesh")
self.meshPrepare()
self.status("Preparing case")
self.casePrepare()
else:
self.status("Skipping case preparation")
if self.doSerialPreTests:
self.status("Running serial pre-run tests")
self.runTests("serialPreRunTest",warnSerial=True)
else:
self.status("Skipping the serial pre-tests")
if self["parallel"]:
if self.doDecompose:
self.status("Decomposing the case")
if self["autoDecompose"]:
self.autoDecompose()
else:
self.decompose()
else:
self.status("Skipping the decomposition")
self.__runParallel=self["parallel"]
if self["parallel"]:
if self.doParallelPreparation:
self.status("Parallel preparation of the case")
self.parallelPrepare()
else:
self.status("Skipping parallel preparation")
if self.doPreTests:
self.status("Running pre-run tests")
self.runTests("preRunTest")
else:
self.status("Skipping the pre-tests")
if self.doSimulation:
self.status("Run solver")
self.__runInfo=dict(self.execute(self.solver).getData())
self.writeRunInfo()
print_()
if not self.runInfo()["OK"]:
self.fail("Solver",self.solver,"ended with an error")
else:
try:
self.status("Solver ran until time",self.runInfo()["time"])
except KeyError:
self.fail("No information how long the solver ran")
else:
self.status("Skipping running of the simulation")
if self.doPrintRunInfo:
print_()
print_("runInfo used in further tests")
import pprint
printer=pprint.PrettyPrinter()
printer.pprint(self.__runInfo)
if self.doPostprocessing:
self.status("Running postprocessing tools")
self.postprocess()
else:
self.status("Skipping the postprocessing tools")
if self.doPostTests:
self.status("Running post-run tests")
self.runTests("postRunTest")
else:
self.status("Skipping the post-run tests")
self.__runParallel=False
if self["parallel"]:
if self.doReconstruction and self["doReconstruct"]:
self.status("Reconstructing the case")
if self["autoDecompose"]:
self.autoReconstruct()
else:
self.reconstruct()
else:
self.status("Skipping the reconstruction")
if self.doSerialPostTests:
self.status("Running serial post-run tests")
self.runTests("serialPostRunTest",warnSerial=True)
else:
self.status("Skipping the serial post-tests")
if self.minimumRunTime:
try:
if float(self.runInfo()["time"])<self.minimumRunTime:
self.fail("Solver only ran to",self.runInfo()["time"],
"but should at least run to",self.minimumRunTime)
except KeyError:
self.fail("No information about run-time. Should have run to",
self.minimumRunTime)
except TypeError:
self.warn("Silently ignoring missing runInfo()")
runTime=time.time()-startTime
self.status("Total running time",runTime,"seconds")
if runTime>self.timeout:
self.warn("Running time",runTime,"bigger than assigned timeout",
self.timeout,". Consider other sizeclass than",
self["sizeClass"],"from sizeclasses",self.sizeClassString())
elif runTime<self.toSmallTimeout:
self.warn("Running time",runTime,"much smaller than assigned timeout",
self.timeout,". Consider other sizeclass than",
self["sizeClass"],"from sizeclasses",
self.sizeClassString(),"for instance",self.proposedSizeClass)
return self.endTest()
# make configuration-dependent
def workDir(self):
try:
return os.environ["PYFOAM_CTESTRUN_WORKDIR"]
except KeyError:
if not hasattr(self,"__checkWorkDir"):
self.__checkWorkDir=None
self.warn("No environment variable PYFOAM_CTESTRUN_WORKDIR defined. Using current directory")
return path.curdir
# make configuration-dependent
def dataDir(self):
try:
return os.environ["PYFOAM_CTESTRUN_DATADIR"]
except KeyError:
if not hasattr(self,"__checkDataDir"):
self.__checkDataDir=None
self.warn("No environment variable PYFOAM_CTESTRUN_DATADIR defined. Using current directory")
return path.curdir
def addFunctionObjects(self,templateFile):
"""Add entries for libraries and functionObjects to the controlDict
(if they don't exist
@param templateFile: file withe the data that should be added
"""
tf=ParsedParameterFile(templateFile)
cd=self.controlDict()
touchedCD=False
if "libs" in tf:
touchedCD=True
if not "libs" in cd:
cd["libs"]=[]
for l in tf["libs"]:
if l in cd["libs"]:
self.warn(l,"already in 'libs' in the controlDict")
else:
cd["libs"].append(l)
if "functions" in tf:
touchedCD=True
if not "functions" in cd:
cd["functions"]={}
for k,v in iteritems(tf["functions"]):
if k in cd["functions"]:
self.warn("Overwriting function object",k)
cd["functions"][k]=v
if touchedCD:
cd.writeFile()
def cloneData(self,src,dst):
"""Copy files recurivly into a case
@param src: the source directory the files come fro
@param dst: the destination directory the files go to"""
for f in os.listdir(src):
if f[0]=='.':
self.warn("Ignoring dot-file",path.join(src,f))
continue
if path.isdir(path.join(src,f)):
if not path.exists(path.join(dst,f)):
os.mkdir(path.join(dst,f))
self.cloneData(path.join(src,f),path.join(dst,f))
else:
if path.exists(path.join(dst,f)):
self.warn("File",path.join(dst,f),"exists already in case. Overwritten")
shutil.copy(path.join(src,f),path.join(dst,f))
def runCommand(self,*args):
"""Run a command and let it directly write to the output"""
p=subprocess.Popen(" ".join([str(a) for a in args]),
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
print_(p.communicate()[0])
sts=p.returncode
if sts!=0:
self.fail("Command"," ".join(args),"ended with status",sts)
def shell(self,
*args):
"""Run a command in the case directory and let it directly
write to the output
@param workingDirectory: change to this directory"""
workingDirectory=None
if not workingDirectory:
workingDirectory=self.caseDir
cmd=" ".join([str(a) for a in args])
self.status("Executing",cmd,"in",workingDirectory)
oldDir=os.getcwd()
os.chdir(workingDirectory)
p=subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.status("Output of the command")
self.line()
print_(p.communicate()[0])
self.line()
sts=p.returncode
if sts!=0:
self.fail("Command",cmd,"ended with status",sts)
else:
self.status(cmd,"ran OK")
os.chdir(oldDir)
def execute(self,*args,**kwargs):
"""Execute the passed arguments on the case and check if
everything went alright
@param regexps: a list of regular expressions that the output should be scanned for"""
try:
regexps=kwargs["regexps"]
if type(regexps)!=list:
self.fail(regexps,"is not a list of strings")
raise KeyError
except KeyError:
regexps=None
if len(args)==1 and type(args[0])==str:
args=[a.replace("%case%",self.solution().name) for a in args[0].split()]
pyArgs=["--silent","--no-server-process"]
if self.__runParallel:
pyArgs+=["--procnr=%d" % self["nrCpus"]]
argList=list(args)+\
["-case",self.caseDir]
self.status("Executing"," ".join(argList))
if regexps:
self.status("Also looking for the expressions",'"'+('" "'.join(regexps))+'"')
pyArgs+=[r'--custom-regexp=%s' % r for r in regexps]
runner=Runner(args=pyArgs+argList)
self.status("Execution ended")
if not runner["OK"]:
self.fail("Running "," ".join(argList),"failed")
else:
self.status("Execution was OK")
if "warnings" in runner:
self.status(runner["warnings"],"during execution")
print_()
self.status("Output of"," ".join(argList),":")
if runner["lines"]>(self.__tailLength+self.__headLength):
self.status("The first",self.__headLength,"lines of the output.",
"Of a total of",runner["lines"])
self.line()
self.runCommand("head","-n",self.__headLength,runner["logfile"])
self.line()
print_()
self.status("The last",self.__tailLength,"lines of the output.",
"Of a total of",runner["lines"])
self.line()
self.runCommand("tail","-n",self.__tailLength,runner["logfile"])
self.line()
else:
self.line()
self.runCommand("cat",runner["logfile"])
self.line()
self.status("End of output")
print_()
return runner
def runInfo(self):
"""return the run information. If the solver was actually run"""
if self.__runInfo==None:
self.fatalFail("runInfo() called although solver was not yet run")
else:
return self.__runInfo
def solution(self):
"""Access to a SolutionDirectory-object that represents the
current solution"""
if not hasattr(self,"_solution"):
self._solution=SolutionDirectory(self.caseDir,
archive=None)
return self._solution
def controlDict(self):
"""Access a representation of the controlDict of the case"""
if not hasattr(self,"_controlDict"):
self._controlDict=ParsedParameterFile(self.solution().controlDict())
return self._controlDict
def line(self):
self.status("/\\"*int((78-len("TEST "+self.shortTestName()+" :"))/2))
def status(self,*args):
"""print a status message about the test"""
print_("TEST",self.shortTestName(),":",end="")
for a in args:
print_(a,end="")
print_()
def messageGeneral(self,prefix,say,*args):
"""Everything that passes through this method will be repeated
in the end
@param args: arbitrary number of arguments that build the
fail-message
@param prefix: General classification of the message
"""
msg=prefix.upper()+": "+str(args[0])
for a in args[1:]:
msg+=" "+str(a)
print_()
print_(say,msg)
print_()
self.__failMessage+=msg+"\n"
def failGeneral(self,prefix,*args):
"""@param args: arbitrary number of arguments that build the
fail-message
@param prefix: General classification of the failure
"""
self.__failed=True
self.messageGeneral(prefix,"Test failed:",*args)
def warn(self,*args):
"""@param args: arbitrary number of arguments that build the
warning-message"""
self.messageGeneral("warning","",*args)
def fail(self,*args):
"""To be called if the test failed but other tests should be tried
@param args: arbitrary number of arguments that build the
fail-message"""
self.failGeneral("failure",*args)
def fatalFail(self,*args):
"""@param args: arbitrary number of arguments that build the
fail-message"""
self.failGeneral("fatal failure",*args)
if not self.fatalIsNotFatal:
self.endTest()
def endTest(self):
unused=[]
for k,v in iteritems(self.__parameters):
if not v["used"]:
unused.append(k)
if len(unused)>0:
self.warn("Unused parameters (possible typo):",unused)
print_()
if self.__failed:
print_("Test failed.")
print_()
print_("Summary of failures")
print_(self.__failMessage)
print_()
sys.exit(1)
else:
print_("Test successful")
print_()
if len(self.__failMessage)>0:
print_("Summary of warnings")
print_(self.__failMessage)
print_()
sys.exit(0)
def which(self,command):
"""Like the regular which command - return the full path to an
executable"""
for d in os.environ["PATH"].split(os.pathsep):
if path.exists(path.join(d,command)):
return path.join(d,command)
return None
def runAndCatchExceptions(self,func,*args,**kwargs):
"""Run a callable and catch Python-exceptions if they occur
@param func: The actual thing to be run"""
try:
func(*args,**kwargs)
return True
except SystemExit:
e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e'
self.fail("sys.exit() called somewhere while executing",
func.__name__,":",e)
traceback.print_exc()
raise e
except Exception:
e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e'
self.fail("Python problem during execution of",
func.__name__,":",e)
traceback.print_exc()
return False
def runTests(self,namePrefix,warnSerial=False):
"""Run all methods that fit a certain name prefix"""
self.status("Looking for tests that fit the prefix",namePrefix)
cnt=0
for n in dir(self):
if n.find(namePrefix)==0:
meth=getattr(self,n)
if not inspect.ismethod(meth):
self.fail("Found attribute",n,
"that fits the prefix",namePrefix,
"in test class but it is not a method")
else:
self.status("Running the test",n)
if not self["parallel"] and warnSerial:
self.warn("This is a serial test. No need to have special serial tests like",n)
self.runAndCatchExceptions(meth)
cnt+=1
if cnt>0:
self.status(cnt,"tests with prefix",namePrefix,"run")
else:
self.status("No test fit the prefix",namePrefix)
def generalTest(self,
testFunction,
args,
*message):
if not testFunction(*args):
self.fail(*message)
def compareSamples(self,
data,
reference,
fields,
time=None,
line=None,
scaleData=1,
offsetData=0,
scaleX=1,
offsetX=0,
useReferenceForComparison=False):
"""Compare sample data and return the statistics
@param data: the name of the data directory
@param reference:the name of the directory with the reference data
@param fields: list of the fields to compare
@param time: the time to compare for. If empty the latest time is used"""
timeOpt=["--latest-time"]
if time:
timeOpt=["--time="+str(time)]
if line:
timeOpt+=["--line=%s" % line]
addOpt=[]
if useReferenceForComparison:
addOpt.append("--use-reference-for-comparison")
sample=SamplePlot(args=[self.caseDir,
"--silent",
"--dir="+data,
"--reference-dir="+reference,
"--tolerant-reference-time",
"--compare",
"--index-tolerant-compare",
"--common-range-compare",
"--metrics",
"--scale-data=%f" % scaleData,
"--scale-x=%f" % scaleX,
"--offset-data=%f" % offsetData,
"--offset-x=%f" % offsetX
]+
timeOpt+
addOpt+
["--field="+f for f in fields])
return Data2DStatistics(metrics=sample["metrics"],
compare=sample["compare"],
noStrings=True,
failureValue=0)
def compareTimelines(self,
data,
reference,
fields):
"""Compare timeline data and return the statistics
@param data: the name of the data directory
@param reference:the name of the directory with the reference data
@param fields: list of the fields to compare"""
sample=TimelinePlot(args=[self.caseDir,
"--silent",
"--dir="+data,
"--reference-dir="+reference,
"--compare",
"--basic-mode=lines",
"--metrics"]+
["--field="+f for f in fields])
return Data2DStatistics(metrics=sample["metrics"],
compare=sample["compare"],
noStrings=True,
failureValue=0)
def isNotEqual(self,value,target=0,tolerance=1e-10,message=""):
self.generalTest(
lambda x,y:abs(x-y)>tolerance,
(value,target),
message,"( value",value,"within tolerance",tolerance,
"of target",target,")")
def isEqual(self,value,target=0,tolerance=1e-10,message=""):
self.generalTest(
lambda x,y:abs(x-y)<tolerance,
(value,target),
message,"( value",value," not within tolerance",tolerance,
"of target",target,")")
def isBigger(self,value,threshold=0,message=""):
self.generalTest(
lambda x:x>threshold,
(value),
message,"( value",value," not bigger than",threshold)
def isSmaller(self,value,threshold=0,message=""):
self.generalTest(
lambda x:x<threshold,
(value),
message,"( value",value," not smaller than",threshold)
def preRunTestCheckMesh(self):
"""This test is always run. If this is not desirable it has to
be overridden in a child-class"""
self.execute("checkMesh")
def autoDecompose(self):
"""Decomposition used if no callback is specified"""
deco=Decomposer(args=[self.caseDir,
str(self["nrCpus"]),
"--all-regions"])
def autoReconstruct(self):
"""Reconstruction used if no callback is specified"""
self.execute("reconstructPar","-latestTime")
@isCallback
def meshPrepare(self):
"""Callback to prepare the mesh for the case. Default
behaviour is to run blockMesh on the case"""
result=self.execute("blockMesh")
if not result["OK"]:
self.fatalFail("blockMesh was not able to create a mesh")
@isCallback
def casePrepare(self):
"""Callback to prepare the case. Default behaviour is to do
nothing"""
pass
@isCallback
def parallelPrepare(self):
"""Callback to prepare the case in parallel (after it was decomposed).
Default behaviour is to do nothing"""
pass
@isCallback
def postprocess(self):
"""Callback to run after the solver has finished. Default
behaviour is to do nothing"""
pass
@isCallback
def decompose(self):
"""Callback to do the decomposition (if automatic is not sufficient)"""
self.fatalFail("Manual decomposition specified but no callback for manual decomposition specified")
@isCallback
def reconstruct(self):
"""Callback to do the reconstruction (if automatic is not sufficient)"""
self.warn("Manual decomposition specified, but no callback 'reconstruct' implemented. Using the automatic reconstruction")
self.autoReconstruct()
# Should work with Python3 and Python2
| gpl-2.0 |
yunli2004/OpenClos | jnpr/openclos/tests/unit/test_l3Clos.py | 1 | 20266 | '''
Created on Jul 22, 2014
@author: moloyc
'''
import os
import sys
sys.path.insert(0,os.path.abspath(os.path.dirname(__file__) + '/' + '../..')) #trick to make it run from CLI
import unittest
import shutil
from flexmock import flexmock
from jnpr.openclos.l3Clos import L3ClosMediation
from jnpr.openclos.model import Pod, Device, InterfaceLogical, InterfaceDefinition, TrapGroup
from test_dao import InMemoryDao
from jnpr.openclos.exception import PodNotFound
def getPodDict():
return {"devicePassword": "Embe1mpls", "leafCount": 3, "leafSettings": [{"deviceType":"qfx5100-48s-6q"}],
"spineAS": 100, "spineCount": 2, "spineDeviceType": "qfx5100-24q-2p", "interConnectPrefix": "192.168.0.0/24",
"vlanPrefix": "172.16.0.0/22", "topologyType": "threeStage", "loopbackPrefix": "10.0.0.0/24", "leafAS": 200,
"managementPrefix": "192.168.48.216/24", "hostOrVmCountPerLeaf": 254, "inventory" : "inventoryUnitTest.json"}
class TestL3Clos(unittest.TestCase):
def setUp(self):
self._conf = {}
self._conf['outputDir'] = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'out')
self._conf['deviceFamily'] = {
"qfx5100-24q-2p": {
"ports": 'et-0/0/[0-23]'
},
"qfx5100-48s-6q": {
"uplinkPorts": 'et-0/0/[48-53]',
"downlinkPorts": 'xe-0/0/[0-47]'
},
"ex4300-24p": {
"uplinkPorts": 'et-0/1/[0-3]',
"downlinkPorts": 'ge-0/0/[0-23]'
}
}
self._dao = InMemoryDao.getInstance()
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
def tearDown(self):
''' Deletes 'out' folder under test dir'''
shutil.rmtree(self._conf['outputDir'], ignore_errors=True)
InMemoryDao._destroy()
self.l3ClosMediation = None
def testLoadClosDefinition(self):
pods = self.l3ClosMediation.loadClosDefinition()
self.assertEqual(2, len(pods))
def testLoadNonExistingClosDefinition(self):
pods = self.l3ClosMediation.loadClosDefinition('non-existing.yaml')
self.assertIsNone(pods)
with self._dao.getReadSession() as session:
self.assertEqual(0, len(self._dao.getAll(session, Pod)))
def testCreatePod(self):
podDict = getPodDict()
self.l3ClosMediation.createPod('pod1', podDict)
with self._dao.getReadSession() as session:
self.assertEqual(1, session.query(Pod).count())
def testUpdatePod(self):
podDict = getPodDict()
pod = self.l3ClosMediation.createPod('pod1', podDict)
inventoryDict = {
"spines" : [
{ "name" : "spine-01", "macAddress" : "10:0e:7e:af:35:41", "deployStatus": "deploy" },
{ "name" : "spine-02", "macAddress" : "10:0e:7e:af:50:c1" }
],
"leafs" : [
{ "name" : "leaf-01", "family" : "qfx5100-48s-6q", "macAddress" : "88:e0:f3:1c:d6:01", "deployStatus": "deploy" },
{ "name" : "leaf-02", "family" : "qfx5100-48s-6q", "macAddress" : "10:0e:7e:b8:9d:01" },
{ "name" : "leaf-03", "family" : "ex4300-24p", "macAddress" : "10:0e:7e:b8:9d:01" }
]
}
self.l3ClosMediation.updatePod(pod.id, podDict, inventoryDict)
with self._dao.getReadSession() as session:
pod = session.query(Pod).one()
self.assertEqual(5, len(pod.devices))
deployCount = 0
for device in pod.devices:
if device.deployStatus == "deploy":
deployCount += 1
self.assertEqual(2, deployCount)
def testUpdatePodInvalidId(self):
with self.assertRaises(PodNotFound) as ve:
self.l3ClosMediation.updatePod("invalid_id", None)
def createPodSpineLeaf(self):
podDict = getPodDict()
pod = self.l3ClosMediation.createPod('pod1', podDict)
return pod
def testCablingPlanAndDeviceConfig(self):
self._conf['DOT'] = {'ranksep' : '5 equally', 'colors': ['red', 'green', 'blue']}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
pod = self.createPodSpineLeaf()
self.assertEqual(True, self.l3ClosMediation.createCablingPlan(pod.id))
self.assertEqual(True, self.l3ClosMediation.createDeviceConfig(pod.id))
def testCreateLinks(self):
pod = self.createPodSpineLeaf()
# force close current session and get new session to make sure merge and flush took place properly
podId = pod.id
with self._dao.getReadSession() as session:
spine01Port0 = session.query(InterfaceDefinition).join(Device).filter(InterfaceDefinition.name == 'et-0/0/0').filter(Device.name == 'spine-01').filter(Device.pod_id == podId).one()
self.assertIsNotNone(spine01Port0.peer)
self.assertEqual('et-0/0/48', spine01Port0.peer.name)
self.assertEqual('leaf-01', spine01Port0.peer.device.name)
spine02Port0 = session.query(InterfaceDefinition).join(Device).filter(InterfaceDefinition.name == 'et-0/0/0').filter(Device.name == 'spine-02').filter(Device.pod_id == podId).one()
self.assertIsNotNone(spine02Port0.peer)
self.assertEqual('et-0/0/49', spine02Port0.peer.name)
self.assertEqual('leaf-01', spine02Port0.peer.device.name)
spine01Port1 = session.query(InterfaceDefinition).join(Device).filter(InterfaceDefinition.name == 'et-0/0/1').filter(Device.name == 'spine-01').filter(Device.pod_id == podId).one()
self.assertIsNotNone(spine01Port1.peer)
self.assertEqual('et-0/0/48', spine01Port1.peer.name)
self.assertEqual('leaf-02', spine01Port1.peer.device.name)
spine02Port1 = session.query(InterfaceDefinition).join(Device).filter(InterfaceDefinition.name == 'et-0/0/1').filter(Device.name == 'spine-02').filter(Device.pod_id == podId).one()
self.assertIsNotNone(spine02Port1.peer)
self.assertEqual('et-0/0/49', spine02Port1.peer.name)
self.assertEqual('leaf-02', spine02Port1.peer.device.name)
spine01Port2 = session.query(InterfaceDefinition).join(Device).filter(InterfaceDefinition.name == 'et-0/0/2').filter(Device.name == 'spine-01').filter(Device.pod_id == podId).one()
self.assertIsNotNone(spine01Port2.peer)
self.assertEqual('uplink-0', spine01Port2.peer.name)
self.assertEqual('leaf-03', spine01Port2.peer.device.name)
spine02Port2 = session.query(InterfaceDefinition).join(Device).filter(InterfaceDefinition.name == 'et-0/0/2').filter(Device.name == 'spine-02').filter(Device.pod_id == podId).one()
self.assertIsNotNone(spine02Port2.peer)
self.assertEqual('uplink-1', spine02Port2.peer.name)
self.assertEqual('leaf-03', spine02Port2.peer.device.name)
def testCreateLeafAndIfds(self):
with self._dao.getReadSession() as session:
from test_model import createPod
pod = createPod('test', session)
pod.spineCount = 6
leaves = [{ "name" : "leaf-01", "family" : "ex4300-24p", "macAddress" : "88:e0:f3:1c:d6:01", "deployStatus": "deploy" }]
self.l3ClosMediation._createLeafAndIfds(session, pod, leaves)
interfaces = session.query(InterfaceDefinition).all()
self.assertEqual(6, len(interfaces))
def testGetLeafSpineFromPod(self):
self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
pod = session.query(Pod).one()
leafSpineDict = self.l3ClosMediation._getLeafSpineFromPod(pod)
self.assertEqual(3, len(leafSpineDict['leafs']))
self.assertEqual(2, len(leafSpineDict['spines']))
def testAllocateLoopback(self):
pod = self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
ifl = session.query(InterfaceLogical).join(Device).filter(InterfaceLogical.name == 'lo0.0').filter(Device.name == 'leaf-01').filter(Device.pod_id == pod.id).one()
self.assertEqual('10.0.0.1/32', ifl.ipaddress)
ifl = session.query(InterfaceLogical).join(Device).filter(InterfaceLogical.name == 'lo0.0').filter(Device.name == 'spine-02').filter(Device.pod_id == pod.id).one()
self.assertEqual('10.0.0.5/32', ifl.ipaddress)
self.assertEqual('10.0.0.0/29', pod.allocatedLoopbackBlock)
self.assertEqual(5, session.query(InterfaceLogical).join(Device).filter(InterfaceLogical.name == 'lo0.0').filter(Device.pod_id == pod.id).count())
def testAllocateIrb(self):
pod = self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
self.assertEqual('172.16.0.0/22', pod.allocatedIrbBlock)
ifl = session.query(InterfaceLogical).join(Device).filter(InterfaceLogical.name == 'irb.1').filter(Device.name == 'leaf-01').filter(Device.pod_id == pod.id).one()
self.assertEqual('172.16.0.1/24', ifl.ipaddress)
ifl = session.query(InterfaceLogical).join(Device).filter(InterfaceLogical.name == 'irb.1').filter(Device.name == 'leaf-02').filter(Device.pod_id == pod.id).one()
self.assertEqual('172.16.1.1/24', ifl.ipaddress)
def testAllocateInterconnect(self):
pod = self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
ifl = session.query(InterfaceLogical).join(Device).filter(InterfaceLogical.name == 'et-0/0/0.0').filter(Device.name == 'spine-01').filter(Device.pod_id == pod.id).one()
belowIfd = session.query(InterfaceDefinition).filter(InterfaceDefinition.id == ifl.layer_below_id).one()
self.assertEqual('et-0/0/0', belowIfd.name)
self.assertEqual('192.168.0.0/31', ifl.ipaddress)
ifl = session.query(InterfaceLogical).join(Device).filter(InterfaceLogical.name == 'et-0/0/48.0').filter(Device.name == 'leaf-01').filter(Device.pod_id == pod.id).one()
belowIfd = session.query(InterfaceDefinition).filter(InterfaceDefinition.id == ifl.layer_below_id).one()
self.assertEqual('et-0/0/48', belowIfd.name)
self.assertEqual('192.168.0.1/31', ifl.ipaddress)
def testAllocateAsNumber(self):
self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
self.assertEqual(100, session.query(Device).filter(Device.role == 'spine').all()[0].asn)
self.assertEqual(201, session.query(Device).filter(Device.role == 'leaf').all()[1].asn)
def testCreatePolicyOptionSpine(self):
self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
pod = session.query(Pod).one()
device = Device("test", "qfx5100-24q-2p", "user", "pwd", "spine", "mac", "mgmtIp", pod)
device.pod.allocatedIrbBlock = '10.0.0.0/28'
device.pod.allocatedLoopbackBlock = '11.0.0.0/28'
configlet = self.l3ClosMediation._createPolicyOption(session, device)
self.assertTrue('irb_in' not in configlet and '10.0.0.0/28' in configlet)
self.assertTrue('lo0_in' not in configlet and '11.0.0.0/28' in configlet)
self.assertTrue('lo0_out' not in configlet)
self.assertTrue('irb_out' not in configlet)
def testCreatePolicyOptionLeaf(self):
self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
pod = session.query(Pod).one()
device = Device("test", "qfx5100-48s-6q", "user", "pwd", "leaf", "mac", "mgmtIp", pod)
device.pod.allocatedIrbBlock = '10.0.0.0/28'
device.pod.allocatedLoopbackBlock = '11.0.0.0/28'
mockSession = flexmock(session)
mockSession.should_receive('query.join.filter.filter.one').and_return(InterfaceLogical("test", device, '12.0.0.0/28'))
configlet = self.l3ClosMediation._createPolicyOption(session, device)
self.assertTrue('irb_in' not in configlet and '10.0.0.0/28' in configlet)
self.assertTrue('lo0_in' not in configlet and '11.0.0.0/28' in configlet)
self.assertTrue('lo0_out' not in configlet and '12.0.0.0/28' in configlet)
self.assertTrue('irb_out' not in configlet)
def testInitWithTemplate(self):
from jinja2 import TemplateNotFound
self.assertIsNotNone(self.l3ClosMediation._templateEnv.get_template('protocolBgp.txt'))
with self.assertRaises(TemplateNotFound) as e:
self.l3ClosMediation._templateEnv.get_template('unknown-template')
self.assertTrue('unknown-template' in e.exception.message)
def createTrapGroupsInDb(self, dao):
newtargets = []
for newtarget in ['1.2.3.4', '1.2.3.5']:
newtargets.append ( TrapGroup ( 'networkdirector_trap_group', newtarget, int('10162') ) )
newtargets.append ( TrapGroup ( 'openclos_trap_group', newtarget, 20162 ) )
with self._dao.getReadWriteSession() as session:
self._dao.createObjects(session, newtargets)
def testGetOpenClosTrapGroupSettingsNoStagedZtp(self):
with self._dao.getReadSession() as session:
self.assertEqual(0, len(self.l3ClosMediation._getOpenclosTrapGroupSettings(session)))
def testGetOpenClosTrapGroupSettingsWithStagedZtp(self):
self._conf['deploymentMode'] = {'ztpStaged': True}
self._conf['snmpTrap'] = {'openclos_trap_group': {'port': 20162, 'target': '1.2.3.4'}}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
with self._dao.getReadSession() as session:
self.assertEqual(1, len(self.l3ClosMediation._getOpenclosTrapGroupSettings(session)))
self.assertEqual(1, len(self.l3ClosMediation._getOpenclosTrapGroupSettings(session)[0]['targetIp']))
self.assertEqual('1.2.3.4', self.l3ClosMediation._getOpenclosTrapGroupSettings(session)[0]['targetIp'][0])
self.createTrapGroupsInDb(self._dao)
with self._dao.getReadSession() as session:
self.assertEqual(1, len(self.l3ClosMediation._getOpenclosTrapGroupSettings(session)))
self.assertEqual(20162, self.l3ClosMediation._getOpenclosTrapGroupSettings(session)[0]['port'])
self.assertEqual(2, len(self.l3ClosMediation._getOpenclosTrapGroupSettings(session)[0]['targetIp']))
def testCreateSnmpTrapAndEventSpine(self):
self._conf['snmpTrap'] = {'openclos_trap_group': {'port': 20162, 'target': '1.2.3.4'}}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
self.createTrapGroupsInDb(self._dao)
device = Device("test", "qfx5100-48s-6q", "user", "pwd", "spine", "mac", "mgmtIp", None)
with self._dao.getReadSession() as session:
configlet = self.l3ClosMediation._createSnmpTrapAndEvent(session, device)
self.assertEqual('', configlet)
def testCreateSnmpTrapAndEventLeafNoStagedZtp(self):
self._conf['snmpTrap'] = {'openclos_trap_group': {'port': 20162, 'target': '1.2.3.4'}}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
self.createTrapGroupsInDb(self._dao)
device = Device("test", "qfx5100-48s-6q", "user", "pwd", "leaf", "mac", "mgmtIp", None)
with self._dao.getReadSession() as session:
configlet = self.l3ClosMediation._createSnmpTrapAndEvent(session, device)
self.assertEqual('', configlet)
def testCreateSnmpTrapAndEventLeafWith2ndStageZtp(self):
self._conf['deploymentMode'] = {'ztpStaged': True}
self._conf['snmpTrap'] = {'openclos_trap_group': {'port': 20162, 'target': '1.2.3.4'}}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
self.createTrapGroupsInDb(self._dao)
device = Device("test", "qfx5100-48s-6q", "user", "pwd", "leaf", "mac", "mgmtIp", None)
with self._dao.getReadSession() as session:
configlet = self.l3ClosMediation._createSnmpTrapAndEvent(session, device)
self.assertTrue('' == configlet)
def testCreateRoutingOptionsStatic(self):
self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
pod = session.query(Pod).one()
device = Device("test", "qfx5100-48s-6q", "user", "pwd", "leaf", "mac", "mgmtIp", pod)
device.pod.outOfBandGateway = '10.0.0.254'
device.pod.outOfBandAddressList = '10.0.10.5/32, 10.0.20.5/32'
configlet = self.l3ClosMediation._createRoutingOptionsStatic(session, device)
self.assertEquals(1, configlet.count('static'))
self.assertEquals(2, configlet.count('route'))
def testCreateAccessInterface(self):
with self._dao.getReadSession() as session:
from test_model import createPod
pod = createPod('test', session)
device = Device("test", "qfx5100-48s-6q", "user", "pwd", "leaf", "mac", "mgmtIp", pod)
configlet = self.l3ClosMediation._createAccessPortInterfaces(session, device)
self.assertEquals(96, configlet.count('family ethernet-switching'))
self.assertTrue('xe-0/0/0' in configlet)
self.assertTrue('xe-0/0/47' in configlet)
self.assertTrue('ge-0/0/0' in configlet)
self.assertTrue('ge-0/0/47' in configlet)
def testCreateAccessInterfaceEx4300(self):
self._conf['deviceFamily']['ex4300-48p'] = {
"uplinkPorts": 'et-0/0/[48-51]',
"downlinkPorts": 'ge-0/0/[0-47]'
}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
with self._dao.getReadSession() as session:
from test_model import createPod
pod = createPod('test', session)
device = Device("test", "ex4300-48p", "user", "pwd", "leaf", "mac", "mgmtIp", pod)
configlet = self.l3ClosMediation._createAccessPortInterfaces(session, device)
self.assertEquals(48, configlet.count('family ethernet-switching'))
self.assertTrue('ge-0/0/0' in configlet)
self.assertTrue('ge-0/0/47' in configlet)
def testCreateLeafGenericConfig(self):
self._conf['snmpTrap'] = {'openclos_trap_group': {'port': 20162, 'target': '5.6.7.8'}}
self._conf['deviceFamily']['ex4300-24p'] = {"uplinkPorts": 'et-0/1/[0-3]', "downlinkPorts": 'ge-0/0/[0-23]'}
self._conf['deploymentMode'] = {'ztpStaged': True}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
self.createPodSpineLeaf()
with self._dao.getReadSession() as session:
pod = session.query(Pod).one()
pod.outOfBandGateway = '10.0.0.254'
pod.outOfBandAddressList = '10.0.10.5/32'
leafSettings = self.l3ClosMediation._createLeafGenericConfigsFor2Stage(session, pod)
self.assertTrue(1, len(leafSettings))
configlet = leafSettings[0].config
self.assertTrue('' != configlet)
#print configlet
self.assertTrue('trap-group openclos_trap_group' in configlet)
self.assertEquals(1, configlet.count('static'))
self.assertEquals(2, configlet.count('route'))
def testGetSnmpTrapTargets(self):
with self._dao.getReadSession() as session:
self.assertEqual(0, len(self.l3ClosMediation._getSnmpTrapTargets(session)))
def testGetSnmpTrapTargetsWithStagedZtp(self):
self._conf['snmpTrap'] = {'openclos_trap_group': {'port': 20162, 'target': '5.6.7.8'}}
self._conf['deploymentMode'] = {'ztpStaged': True}
self.l3ClosMediation = L3ClosMediation(self._conf, InMemoryDao)
with self._dao.getReadSession() as session:
self.assertEqual(1, len(self.l3ClosMediation._getSnmpTrapTargets(session)))
self.assertEqual('5.6.7.8', self.l3ClosMediation._getSnmpTrapTargets(session)[0])
if __name__ == '__main__':
unittest.main() | apache-2.0 |
jhjguxin/PyCDC | Karrigell-2.3.5/Karrigell_GUI.py | 1 | 2316 | """Karrigell HTTP Server
Written by Pierre Quentel quentel.pierre@wanadoo.fr
Published under the BSD licence. See the file LICENCE.txt
This script launches Karrigell with webservers.SimpleAsyncHTTPServer.Server as web server.
It is built on the asyncore/asynchat framework (non-blocking sockets, use of
the select() function) and partly copied from the medusa web server
References :
- medusa : http://www.amk.ca/python/code/medusa.html for medusa
- Sockets HOWTO on www.python.org
Requests are handled by class RequestHandler (one instance per request)
"""
import webservers.SimpleAsyncHTTPServer
import KarrigellRequestHandler
import k_config
import traceback
import sys
class asyncRequestHandler(webservers.SimpleAsyncHTTPServer.DialogManager,
KarrigellRequestHandler.KarrigellRequestHandler):
def handle_data(self):
KarrigellRequestHandler.KarrigellRequestHandler.handle_data(self)
def send_error(self, code, message=None):
KarrigellRequestHandler.KarrigellRequestHandler.send_error(self,code,message)
def handle_error(self):
traceback.print_exc(file=sys.stderr)
# Launch the server
s=webservers.SimpleAsyncHTTPServer.Server(('',k_config.port),asyncRequestHandler)
print "Karrigell %s running on port %s" %(KarrigellRequestHandler.__version__,k_config.port)
if k_config.debug:
print "Debug level %s" %k_config.debug
if k_config.silent:
print "Silent mode"
import thread
# start the server in a different thread
thread.start_new_thread(s.loop, ())
# GUI to stop the server and log
from Tkinter import *
from ScrolledText import ScrolledText
import tkFont
class Output:
maxlines = 100
def __init__(self,textWidget):
self.textWidget = textWidget
def write(self,data):
self.textWidget.insert(END,data)
l = int(self.textWidget.index(END).split('.')[0])
if l > self.maxlines:
self.textWidget.delete(1.0,'%s.0' %(l-self.maxlines))
self.textWidget.see(END)
def stop_server():
s.close_all()
sys.exit()
root = Tk()
Button(root,text="Stop server",command = stop_server).pack()
tw = ScrolledText(root,width=80,height=40,bg="black",foreground="white",
font=tkFont.Font(family="courier",size=10,weight="bold"))
tw.pack()
sys.stderr = Output(tw)
root.mainloop() | gpl-3.0 |
40223249-1/-w16b_test | static/Brython3.1.1-20150328-091302/Lib/_abcoll.py | 688 | 5155 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
via collections; they are defined here only to alleviate certain
bootstrapping issues. Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
"ByteString",
]
"""
### collection related types which are not exposed through builtin ###
## iterators ##
#fixme brython
#bytes_iterator = type(iter(b''))
bytes_iterator = type(iter(''))
#fixme brython
#bytearray_iterator = type(iter(bytearray()))
#callable_iterator = ???
dict_keyiterator = type(iter({}.keys()))
dict_valueiterator = type(iter({}.values()))
dict_itemiterator = type(iter({}.items()))
list_iterator = type(iter([]))
list_reverseiterator = type(iter(reversed([])))
range_iterator = type(iter(range(0)))
set_iterator = type(iter(set()))
str_iterator = type(iter(""))
tuple_iterator = type(iter(()))
zip_iterator = type(iter(zip()))
## views ##
dict_keys = type({}.keys())
dict_values = type({}.values())
dict_items = type({}.items())
## misc ##
dict_proxy = type(type.__dict__)
"""
def abstractmethod(self):
return self
### ONE-TRICK PONIES ###
#class Iterable(metaclass=ABCMeta):
class Iterable:
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if any("__iter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
#class Sized(metaclass=ABCMeta):
class Sized:
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if any("__len__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
#class Container(metaclass=ABCMeta):
class Container:
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if any("__contains__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def keys(self):
return KeysView(self)
def items(self):
return ItemsView(self)
def values(self):
return ValuesView(self)
def __eq__(self, other):
if not isinstance(other, Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MutableMapping(Mapping):
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
#MutableMapping.register(dict)
| agpl-3.0 |
atruberg/django-custom | django/contrib/gis/geos/linestring.py | 224 | 5607 | from django.contrib.gis.geos.base import numpy
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.point import Point
from django.contrib.gis.geos import prototypes as capi
from django.utils.six.moves import xrange
class LineString(GEOSGeometry):
_init_func = capi.create_linestring
_minlength = 2
#### Python 'magic' routines ####
def __init__(self, *args, **kwargs):
"""
Initializes on the given sequence -- may take lists, tuples, NumPy arrays
of X,Y pairs, or Point objects. If Point objects are used, ownership is
_not_ transferred to the LineString object.
Examples:
ls = LineString((1, 1), (2, 2))
ls = LineString([(1, 1), (2, 2)])
ls = LineString(array([(1, 1), (2, 2)]))
ls = LineString(Point(1, 1), Point(2, 2))
"""
# If only one argument provided, set the coords array appropriately
if len(args) == 1: coords = args[0]
else: coords = args
if isinstance(coords, (tuple, list)):
# Getting the number of coords and the number of dimensions -- which
# must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
ncoords = len(coords)
if coords: ndim = len(coords[0])
else: raise TypeError('Cannot initialize on empty sequence.')
self._checkdim(ndim)
# Incrementing through each of the coordinates and verifying
for i in xrange(1, ncoords):
if not isinstance(coords[i], (tuple, list, Point)):
raise TypeError('each coordinate should be a sequence (list or tuple)')
if len(coords[i]) != ndim: raise TypeError('Dimension mismatch.')
numpy_coords = False
elif numpy and isinstance(coords, numpy.ndarray):
shape = coords.shape # Using numpy's shape.
if len(shape) != 2: raise TypeError('Too many dimensions.')
self._checkdim(shape[1])
ncoords = shape[0]
ndim = shape[1]
numpy_coords = True
else:
raise TypeError('Invalid initialization input for LineStrings.')
# Creating a coordinate sequence object because it is easier to
# set the points using GEOSCoordSeq.__setitem__().
cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim==3))
for i in xrange(ncoords):
if numpy_coords: cs[i] = coords[i,:]
elif isinstance(coords[i], Point): cs[i] = coords[i].tuple
else: cs[i] = coords[i]
# If SRID was passed in with the keyword arguments
srid = kwargs.get('srid', None)
# Calling the base geometry initialization with the returned pointer
# from the function.
super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)
def __iter__(self):
"Allows iteration over this LineString."
for i in xrange(len(self)):
yield self[i]
def __len__(self):
"Returns the number of points in this LineString."
return len(self._cs)
def _get_single_external(self, index):
return self._cs[index]
_get_single_internal = _get_single_external
def _set_list(self, length, items):
ndim = self._cs.dims #
hasz = self._cs.hasz # I don't understand why these are different
# create a new coordinate sequence and populate accordingly
cs = GEOSCoordSeq(capi.create_cs(length, ndim), z=hasz)
for i, c in enumerate(items):
cs[i] = c
ptr = self._init_func(cs.ptr)
if ptr:
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(self.srid)
else:
# can this happen?
raise GEOSException('Geometry resulting from slice deletion was invalid.')
def _set_single(self, index, value):
self._checkindex(index)
self._cs[index] = value
def _checkdim(self, dim):
if dim not in (2, 3): raise TypeError('Dimension mismatch.')
#### Sequence Properties ####
@property
def tuple(self):
"Returns a tuple version of the geometry from the coordinate sequence."
return self._cs.tuple
coords = tuple
def _listarr(self, func):
"""
Internal routine that returns a sequence (list) corresponding with
the given function. Will return a numpy array if possible.
"""
lst = [func(i) for i in xrange(len(self))]
if numpy: return numpy.array(lst) # ARRRR!
else: return lst
@property
def array(self):
"Returns a numpy array for the LineString."
return self._listarr(self._cs.__getitem__)
@property
def merged(self):
"Returns the line merge of this LineString."
return self._topology(capi.geos_linemerge(self.ptr))
@property
def x(self):
"Returns a list or numpy array of the X variable."
return self._listarr(self._cs.getX)
@property
def y(self):
"Returns a list or numpy array of the Y variable."
return self._listarr(self._cs.getY)
@property
def z(self):
"Returns a list or numpy array of the Z variable."
if not self.hasz: return None
else: return self._listarr(self._cs.getZ)
# LinearRings are LineStrings used within Polygons.
class LinearRing(LineString):
_minLength = 4
_init_func = capi.create_linearring
| bsd-3-clause |
NicolasLM/sauna | sauna/scheduler.py | 1 | 3161 | import time
import fractions
from functools import reduce
from logging import getLogger
logger = getLogger(__name__)
class Scheduler:
def __init__(self, jobs):
"""
Create a new Scheduler.
>>> s = Scheduler([Job(1, max, 100, 200)])
>>> for jobs in s:
... time.sleep(s.tick_duration)
:param jobs: Sequence of jobs to schedule
"""
periodicities = {job.periodicity for job in jobs}
self.tick_duration = reduce(lambda x, y: fractions.gcd(x, y),
periodicities)
self._ticks = self.find_minimum_ticks_required(self.tick_duration,
periodicities)
self._jobs = jobs
self._current_tick = 0
logger.debug('Scheduler has {} ticks, each one is {} seconds'.
format(self._ticks, self.tick_duration))
@staticmethod
def find_minimum_ticks_required(tick_duration, periodicities):
"""Find the minimum number of ticks required to execute all jobs
at once."""
ticks = 1
for periodicity in reversed(sorted(periodicities)):
if ticks % periodicity != 0:
ticks *= int(periodicity / tick_duration)
return ticks
def __iter__(self):
return self
def __next__(self):
jobs = [job for job in self._jobs
if ((self._current_tick * self.tick_duration)
% job.periodicity) == 0
]
if jobs:
logger.debug('Tick {}, scheduled {}'.
format(self._current_tick, jobs))
self._current_tick += 1
if self._current_tick >= self._ticks:
self._current_tick = 0
for job in jobs:
job()
return jobs
def run(self):
"""Shorthand for iterating over all jobs forever.
>>> print_time = lambda: print(time.time())
>>> s = Scheduler([Job(1, print_time)])
>>> s.run()
1470146095.0748773
1470146096.076028
"""
for _ in self:
time.sleep(self.tick_duration)
class Job:
def __init__(self, periodicity, func, *func_args, **func_kwargs):
"""
Create a new Job to be scheduled and run periodically.
:param periodicity: Number of seconds to wait between job runs
:param func: callable that perform the job action
:param func_args: arguments of the callable
:param func_kwargs: keyword arguments of the callable
"""
if not callable(func):
raise ValueError('func attribute must be callable')
self.periodicity = periodicity
self.func = func
self.func_args = func_args
self.func_kwargs = func_kwargs
def __repr__(self):
try:
name = self.func.__name__
except AttributeError:
name = 'unknown'
return '<Job {} every {} seconds>'.format(name,
self.periodicity)
def __call__(self, *args, **kwargs):
self.func(*self.func_args, **self.func_kwargs)
| bsd-2-clause |
tareqalayan/ansible | lib/ansible/modules/storage/netapp/na_cdot_volume.py | 23 | 15029 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: na_cdot_volume
short_description: Manage NetApp cDOT volumes
extends_documentation_fragment:
- netapp.ontap
version_added: '2.3'
author: Sumit Kumar (sumit4@netapp.com)
description:
- Create or destroy volumes on NetApp cDOT
options:
state:
description:
- Whether the specified volume should exist or not.
required: true
choices: ['present', 'absent']
name:
description:
- The name of the volume to manage.
required: true
infinite:
description:
- Set True if the volume is an Infinite Volume.
type: bool
default: 'no'
online:
description:
- Whether the specified volume is online, or not.
type: bool
default: 'yes'
aggregate_name:
description:
- The name of the aggregate the flexvol should exist on. Required when C(state=present).
size:
description:
- The size of the volume in (size_unit). Required when C(state=present).
size_unit:
description:
- The unit used to interpret the size parameter.
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: 'gb'
vserver:
description:
- Name of the vserver to use.
required: true
junction_path:
description:
- Junction path where to mount the volume
required: false
version_added: '2.6'
export_policy:
description:
- Export policy to set for the specified junction path.
required: false
default: default
version_added: '2.6'
snapshot_policy:
description:
- Snapshot policy to set for the specified volume.
required: false
default: default
version_added: '2.6'
'''
EXAMPLES = """
- name: Create FlexVol
na_cdot_volume:
state: present
name: ansibleVolume
infinite: False
aggregate_name: aggr1
size: 20
size_unit: mb
vserver: ansibleVServer
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
junction_path: /ansibleVolume
export_policy: all_nfs_networks
snapshot_policy: daily
- name: Make FlexVol offline
na_cdot_volume:
state: present
name: ansibleVolume
infinite: False
online: False
vserver: ansibleVServer
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppCDOTVolume(object):
def __init__(self):
self._size_unit_map = dict(
bytes=1,
b=1,
kb=1024,
mb=1024 ** 2,
gb=1024 ** 3,
tb=1024 ** 4,
pb=1024 ** 5,
eb=1024 ** 6,
zb=1024 ** 7,
yb=1024 ** 8
)
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
is_infinite=dict(required=False, type='bool', default=False, aliases=['infinite']),
is_online=dict(required=False, type='bool', default=True, aliases=['online']),
size=dict(type='int'),
size_unit=dict(default='gb',
choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb',
'pb', 'eb', 'zb', 'yb'], type='str'),
aggregate_name=dict(type='str'),
vserver=dict(required=True, type='str', default=None),
junction_path=dict(required=False, type='str', default=None),
export_policy=dict(required=False, type='str', default='default'),
snapshot_policy=dict(required=False, type='str', default='default'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['aggregate_name', 'size'])
],
supports_check_mode=True
)
p = self.module.params
# set up state variables
self.state = p['state']
self.name = p['name']
self.is_infinite = p['is_infinite']
self.is_online = p['is_online']
self.size_unit = p['size_unit']
self.vserver = p['vserver']
self.junction_path = p['junction_path']
self.export_policy = p['export_policy']
self.snapshot_policy = p['snapshot_policy']
if p['size'] is not None:
self.size = p['size'] * self._size_unit_map[self.size_unit]
else:
self.size = None
self.aggregate_name = p['aggregate_name']
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_ontap_zapi(module=self.module, vserver=self.vserver)
def get_volume(self):
"""
Return details about the volume
:param:
name : Name of the volume
:return: Details about the volume. None if not found.
:rtype: dict
"""
volume_info = netapp_utils.zapi.NaElement('volume-get-iter')
volume_attributes = netapp_utils.zapi.NaElement('volume-attributes')
volume_id_attributes = netapp_utils.zapi.NaElement('volume-id-attributes')
volume_id_attributes.add_new_child('name', self.name)
volume_attributes.add_child_elem(volume_id_attributes)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(volume_attributes)
volume_info.add_child_elem(query)
result = self.server.invoke_successfully(volume_info, True)
return_value = None
if result.get_child_by_name('num-records') and \
int(result.get_child_content('num-records')) >= 1:
volume_attributes = result.get_child_by_name(
'attributes-list').get_child_by_name(
'volume-attributes')
# Get volume's current size
volume_space_attributes = volume_attributes.get_child_by_name(
'volume-space-attributes')
current_size = volume_space_attributes.get_child_content('size')
# Get volume's state (online/offline)
volume_state_attributes = volume_attributes.get_child_by_name(
'volume-state-attributes')
current_state = volume_state_attributes.get_child_content('state')
is_online = None
if current_state == "online":
is_online = True
elif current_state == "offline":
is_online = False
return_value = {
'name': self.name,
'size': current_size,
'is_online': is_online,
}
return return_value
def create_volume(self):
create_parameters = {'volume': self.name,
'containing-aggr-name': self.aggregate_name,
'size': str(self.size),
}
if self.junction_path:
create_parameters['junction-path'] = str(self.junction_path)
if self.export_policy != 'default':
create_parameters['export-policy'] = str(self.export_policy)
if self.snapshot_policy != 'default':
create_parameters['snapshot-policy'] = str(self.snapshot_policy)
volume_create = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-create', **create_parameters)
try:
self.server.invoke_successfully(volume_create,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
self.module.fail_json(msg='Error provisioning volume %s of size %s: %s' % (self.name, self.size, to_native(e)),
exception=traceback.format_exc())
def delete_volume(self):
if self.is_infinite:
volume_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-destroy-async', **{'volume-name': self.name})
else:
volume_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-destroy', **{'name': self.name, 'unmount-and-offline':
'true'})
try:
self.server.invoke_successfully(volume_delete,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
self.module.fail_json(msg='Error deleting volume %s: %s' % (self.name, to_native(e)),
exception=traceback.format_exc())
def rename_volume(self):
"""
Rename the volume.
Note: 'is_infinite' needs to be set to True in order to rename an
Infinite Volume.
"""
if self.is_infinite:
volume_rename = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-rename-async',
**{'volume-name': self.name, 'new-volume-name': str(
self.name)})
else:
volume_rename = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-rename', **{'volume': self.name, 'new-volume-name': str(
self.name)})
try:
self.server.invoke_successfully(volume_rename,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
self.module.fail_json(msg='Error renaming volume %s: %s' % (self.name, to_native(e)),
exception=traceback.format_exc())
def resize_volume(self):
"""
Re-size the volume.
Note: 'is_infinite' needs to be set to True in order to rename an
Infinite Volume.
"""
if self.is_infinite:
volume_resize = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-size-async',
**{'volume-name': self.name, 'new-size': str(
self.size)})
else:
volume_resize = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-size', **{'volume': self.name, 'new-size': str(
self.size)})
try:
self.server.invoke_successfully(volume_resize,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
self.module.fail_json(msg='Error re-sizing volume %s: %s' % (self.name, to_native(e)),
exception=traceback.format_exc())
def change_volume_state(self):
"""
Change volume's state (offline/online).
Note: 'is_infinite' needs to be set to True in order to change the
state of an Infinite Volume.
"""
state_requested = None
if self.is_online:
# Requested state is 'online'.
state_requested = "online"
if self.is_infinite:
volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-online-async',
**{'volume-name': self.name})
else:
volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-online',
**{'name': self.name})
else:
# Requested state is 'offline'.
state_requested = "offline"
if self.is_infinite:
volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-offline-async',
**{'volume-name': self.name})
else:
volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children(
'volume-offline',
**{'name': self.name})
try:
self.server.invoke_successfully(volume_change_state,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
self.module.fail_json(msg='Error changing the state of volume %s to %s: %s' %
(self.name, state_requested, to_native(e)),
exception=traceback.format_exc())
def apply(self):
changed = False
volume_exists = False
rename_volume = False
resize_volume = False
volume_detail = self.get_volume()
if volume_detail:
volume_exists = True
if self.state == 'absent':
changed = True
elif self.state == 'present':
if str(volume_detail['size']) != str(self.size):
resize_volume = True
changed = True
if (volume_detail['is_online'] is not None) and (volume_detail['is_online'] != self.is_online):
changed = True
if self.is_online is False:
# Volume is online, but requested state is offline
pass
else:
# Volume is offline but requested state is online
pass
else:
if self.state == 'present':
changed = True
if changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
if not volume_exists:
self.create_volume()
else:
if resize_volume:
self.resize_volume()
if volume_detail['is_online'] is not \
None and volume_detail['is_online'] != \
self.is_online:
self.change_volume_state()
# Ensure re-naming is the last change made.
if rename_volume:
self.rename_volume()
elif self.state == 'absent':
self.delete_volume()
self.module.exit_json(changed=changed)
def main():
v = NetAppCDOTVolume()
v.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
vitorio/pygrow | grow/deployments/stats/stats.py | 1 | 1214 | import collections
import os
from protorpc import protojson
from . import messages
class Stats(object):
def __init__(self, pod, paths_to_contents=None):
self.pod = pod
if paths_to_contents is None:
paths_to_contents = pod.export()
self.paths_to_contents = paths_to_contents
def get_num_files_per_type(self):
file_counts = collections.defaultdict(int)
for path in self.paths_to_contents.keys():
ext = os.path.splitext(path)[-1]
file_counts[ext] += 1
ms = []
for ext, count in file_counts.iteritems():
ms.append(messages.FileCountMessage(ext=ext, count=count))
return ms
def to_message(self):
message = messages.StatsMessage()
message.num_collections = len(self.pod.list_collections())
message.num_files_per_type = self.get_num_files_per_type()
message.locales = [str(locale) for locale in self.pod.list_locales()]
message.langs = self.pod.catalogs.list_locales()
if self.pod.catalogs.exists:
catalog = self.pod.catalogs.get_template()
message.num_messages = len(catalog)
else:
message.num_messages = 0
return message
def to_string(self):
return protojson.encode_message(self.to_message())
| mit |
ridfrustum/lettuce | tests/integration/lib/Django-1.2.5/tests/regressiontests/decorators/tests.py | 39 | 5210 | from unittest import TestCase
from sys import version_info
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.http import HttpResponse, HttpRequest
from django.utils.functional import allow_lazy, lazy, memoize
from django.views.decorators.http import require_http_methods, require_GET, require_POST
from django.views.decorators.vary import vary_on_headers, vary_on_cookie
from django.views.decorators.cache import cache_page, never_cache, cache_control
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required, permission_required, user_passes_test
from django.contrib.admin.views.decorators import staff_member_required
def fully_decorated(request):
"""Expected __doc__"""
return HttpResponse('<html><body>dummy</body></html>')
fully_decorated.anything = "Expected __dict__"
# django.views.decorators.http
fully_decorated = require_http_methods(["GET"])(fully_decorated)
fully_decorated = require_GET(fully_decorated)
fully_decorated = require_POST(fully_decorated)
# django.views.decorators.vary
fully_decorated = vary_on_headers('Accept-language')(fully_decorated)
fully_decorated = vary_on_cookie(fully_decorated)
# django.views.decorators.cache
fully_decorated = cache_page(60*15)(fully_decorated)
fully_decorated = cache_control(private=True)(fully_decorated)
fully_decorated = never_cache(fully_decorated)
# django.contrib.auth.decorators
# Apply user_passes_test twice to check #9474
fully_decorated = user_passes_test(lambda u:True)(fully_decorated)
fully_decorated = login_required(fully_decorated)
fully_decorated = permission_required('change_world')(fully_decorated)
# django.contrib.admin.views.decorators
fully_decorated = staff_member_required(fully_decorated)
# django.utils.functional
fully_decorated = memoize(fully_decorated, {}, 1)
fully_decorated = allow_lazy(fully_decorated)
fully_decorated = lazy(fully_decorated)
class DecoratorsTest(TestCase):
def test_attributes(self):
"""
Tests that django decorators set certain attributes of the wrapped
function.
"""
# Only check __name__ on Python 2.4 or later since __name__ can't be
# assigned to in earlier Python versions.
if version_info[0] >= 2 and version_info[1] >= 4:
self.assertEquals(fully_decorated.__name__, 'fully_decorated')
self.assertEquals(fully_decorated.__doc__, 'Expected __doc__')
self.assertEquals(fully_decorated.__dict__['anything'], 'Expected __dict__')
def test_user_passes_test_composition(self):
"""
Test that the user_passes_test decorator can be applied multiple times
(#9474).
"""
def test1(user):
user.decorators_applied.append('test1')
return True
def test2(user):
user.decorators_applied.append('test2')
return True
def callback(request):
return request.user.decorators_applied
callback = user_passes_test(test1)(callback)
callback = user_passes_test(test2)(callback)
class DummyUser(object): pass
class DummyRequest(object): pass
request = DummyRequest()
request.user = DummyUser()
request.user.decorators_applied = []
response = callback(request)
self.assertEqual(response, ['test2', 'test1'])
def test_cache_page_new_style(self):
"""
Test that we can call cache_page the new way
"""
def my_view(request):
return "response"
my_view_cached = cache_page(123)(my_view)
self.assertEqual(my_view_cached(HttpRequest()), "response")
my_view_cached2 = cache_page(123, key_prefix="test")(my_view)
self.assertEqual(my_view_cached2(HttpRequest()), "response")
def test_cache_page_old_style(self):
"""
Test that we can call cache_page the old way
"""
def my_view(request):
return "response"
my_view_cached = cache_page(my_view, 123)
self.assertEqual(my_view_cached(HttpRequest()), "response")
my_view_cached2 = cache_page(my_view, 123, key_prefix="test")
self.assertEqual(my_view_cached2(HttpRequest()), "response")
my_view_cached3 = cache_page(my_view)
self.assertEqual(my_view_cached3(HttpRequest()), "response")
my_view_cached4 = cache_page()(my_view)
self.assertEqual(my_view_cached4(HttpRequest()), "response")
# For testing method_decorator, a decorator that assumes a single argument.
# We will get type arguments if there is a mismatch in the number of arguments.
def simple_dec(func):
def wrapper(arg):
return func("test:" + arg)
return wraps(func)(wrapper)
simple_dec_m = method_decorator(simple_dec)
class MethodDecoratorTests(TestCase):
"""
Tests for method_decorator
"""
def test_method_decorator(self):
class Test(object):
@simple_dec_m
def say(self, arg):
return arg
self.assertEqual("test:hello", Test().say("hello"))
| gpl-3.0 |
VFoteinos/e-science | tests/web/ClusterTest.py | 5 | 8011 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
unittest class used in Selenium tests. Every test_cluster_* inherits methods
from ClusterTest.
@author: Ioannis Stenos, Nick Vrionis
'''
from selenium import webdriver
import sys, os
from os.path import join, dirname, abspath
sys.path.append(join(dirname(abspath(__file__)), '../../webapp'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings")
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
from ConfigParser import RawConfigParser, NoSectionError
from backend.okeanos_utils import check_quota, check_credentials
from random import randint
import unittest, time, re
BASE_DIR = join(dirname(abspath(__file__)), "../..")
class ClusterTest(unittest.TestCase):
'''
setUp method is common for all test_cluster tests.
Defines the path to okeanos token and the base url for
the selenium test.
'''
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
parser = RawConfigParser()
config_file = join(BASE_DIR, '.private/.config.txt')
self.name = 'testcluster'
parser.read(config_file)
try:
self.token = parser.get('cloud \"~okeanos\"', 'token')
self.auth_url = parser.get('cloud \"~okeanos\"', 'url')
self.base_url = parser.get('deploy', 'url')
self.project_name = parser.get('project', 'name')
auth = check_credentials(self.token)
try:
list_of_projects = auth.get_projects(state='active')
except Exception:
self.assertTrue(False,'Could not get list of projects')
for project in list_of_projects:
if project['name'] == self.project_name:
self.project_id = project['id']
except NoSectionError:
self.token = 'INVALID_TOKEN'
self.auth_url = "INVALID_AUTH_URL"
self.base_url = "INVALID_APP_URL"
self.project_name = "INVALID_PROJECT_NAME"
print 'Current authentication details are kept off source control. ' \
'\nUpdate your .config.txt file in <projectroot>/.private/'
def login(self):
'''Method used for login by all test_cluster tests'''
driver = self.driver
driver.get(self.base_url + "#/homepage")
driver.find_element_by_id("id_login").click()
for i in range(30):
try:
if "~Okeanos Token" == driver.find_element_by_css_selector("h2").text: break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_id("token").clear()
driver.find_element_by_id("token").send_keys(self.token)
driver.find_element_by_xpath("//button[@type='login']").click()
if (self.is_element_present(By.XPATH, "//div[@id='id_alert_wrongtoken']/strong") == True):
self.assertTrue(False,'Invalid token')
try:
element = WebDriverWait(driver, 30).until(
EC.presence_of_element_located((By.ID, "id_title_user_welcome_route"))
)
except: self.fail("time out")
driver.find_element_by_id("id_services_dd").click()
driver.find_element_by_id("id_create_cluster").click()
try:
element = WebDriverWait(driver, 30).until(
EC.presence_of_element_located((By.ID, "id_title_cluster_create_route"))
)
except: self.fail("time out")
user_quota = check_quota(self.token, self.project_id)
list = Select(driver.find_element_by_id("project_id")).options
no_project = True
for index in range(0,len(list)):
if re.match(self.project_name, list[index].text):
Select(driver.find_element_by_id("project_id")).select_by_visible_text(list[index].text)
no_project = False
break
if no_project:
self.assertTrue(False,'No project found with given project name')
driver.find_element_by_id("cluster_name").clear()
cluster_name = 'test_cluster' + str(randint(0,9999))
driver.find_element_by_id("cluster_name").send_keys(cluster_name)
hadoop_image = 'Hadoop-2.5.2'
Select(driver.find_element_by_id("os_systems")).select_by_visible_text(hadoop_image)
return driver
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
def calculate_cluster_resources(self, resource_list, available_resource):
'''
Method used by test_cluster_cpu, test_cluster_memory and
test_cluster_disk to calculate the resources needed to be binded
during each test in cluster/create screen. Those resources are
the buttons pressed by selenium test and are not really binded
in ~okeanos. The result is that much less real resources are binded
in ~okeanos for the tests.
'''
avail = available_resource
cluster_size = 0
vms = []
# Create a vms list with values the combinations of resource size
# and number of vms we will bind in selenium with a particular,
# resource flavor e.g for test_cluster_disk vms = [{100:2}, {80:1}]
# means 2 vms with 100 disk size each and 1 vm with 80 disk size.
for resource in reversed(resource_list):
if (available_resource/resource) >= 1:
vms.append({resource: available_resource/resource})
if available_resource%resource ==0:
break
available_resource = available_resource - resource*(available_resource/resource)
# If the vms list has two or more elements
if len(vms) >= 2:
# Find the remaining resource that we will bind in ~okeanos for the test.
remaining_resource = avail - vms[0].values()[0] *vms[0].keys()[0] - vms[1].values()[0] * vms[1].keys()[0]
# Calculate the cluster_size we will use as input in selenium
cluster_size = vms[0].values()[0] + vms[1].values()[0]
# Select the buttons selenium will press
# in create_cluster screen
slave = str(resource_list.index(vms[0].keys()[0]) + 1)
master = str(resource_list.index(vms[1].keys()[0])+ 1)
# If the vms list has zero elements
elif len(vms) == 0:
raise RuntimeError
# If the vms list has only one element
else:
remaining_resource = 0
cluster_size = cluster_size + vms[0].values()[0]
slave = str(resource_list.index(vms[0].keys()[0]) + 1)
master = str(resource_list.index(vms[0].keys()[0]) + 1)
return cluster_size, master, slave, remaining_resource
| agpl-3.0 |
VShangxiao/tornado | demos/benchmark/gen_benchmark.py | 99 | 1189 | #!/usr/bin/env python
#
# A simple benchmark of the tornado.gen module.
# Runs in two modes, testing new-style (@coroutine and Futures)
# and old-style (@engine and Tasks) coroutines.
from timeit import Timer
from tornado import gen
from tornado.options import options, define, parse_command_line
define('num', default=10000, help='number of iterations')
# These benchmarks are delicate. They hit various fast-paths in the gen
# machinery in order to stay synchronous so we don't need an IOLoop.
# This removes noise from the results, but it's easy to change things
# in a way that completely invalidates the results.
@gen.engine
def e2(callback):
callback()
@gen.engine
def e1():
for i in range(10):
yield gen.Task(e2)
@gen.coroutine
def c2():
pass
@gen.coroutine
def c1():
for i in range(10):
yield c2()
def main():
parse_command_line()
t = Timer(e1)
results = t.timeit(options.num) / options.num
print('engine: %0.3f ms per iteration' % (results * 1000))
t = Timer(c1)
results = t.timeit(options.num) / options.num
print('coroutine: %0.3f ms per iteration' % (results * 1000))
if __name__ == '__main__':
main()
| apache-2.0 |
m4ll0k/Infoga | recon/google.py | 1 | 1041 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# @name : Infoga - Email Information Gathering
# @url : http://github.com/m4ll0k
# @author : Momo Outaadi (m4ll0k)
from lib.output import *
from lib.request import *
from lib.parser import *
class Google(Request):
def __init__(self,target):
Request.__init__(self)
self.target = target
def search(self):
test('Searching "%s" in Google...'%(self.target))
base_url = 'https://www.google.com/search?q=intext:%22%40{target}%22&num=50'.format(
target=self.target)
mails = []
# First 350 results (page 0 to 6)
for page in range(0, 7):
url = base_url + "&start=" + str(page)
try:
resp = self.send(
method = 'GET',
url = url
)
if "detected unusual traffic" in resp.text:
break
for email in self.getemail(resp.content,self.target):
if email not in mails:
mails.append(email)
except:
pass
return mails
def getemail(self,content,target):
return parser(content,target).email()
| gpl-3.0 |
efortuna/AndroidSDKClone | ndk_experimental/prebuilt/linux-x86_64/lib/python2.7/lib-tk/Tkinter.py | 43 | 157840 | """Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import Tkinter
from Tkconstants import *
tk = Tkinter.Tk()
frame = Tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = Tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = Tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
__version__ = "$Revision: 81008 $"
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
import FixTk
import _tkinter # If this fails your Python may not be configured for Tk
tkinter = _tkinter # b/w compat for export
TclError = _tkinter.TclError
from types import *
from Tkconstants import *
import re
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
# These are not always defined, e.g. not on Win32 with Tk 8.0 :-(
try: _tkinter.createfilehandler
except AttributeError: _tkinter.createfilehandler = None
try: _tkinter.deletefilehandler
except AttributeError: _tkinter.deletefilehandler = None
_magic_re = re.compile(r'([\\{}])')
_space_re = re.compile(r'([\s])')
def _join(value):
"""Internal function."""
return ' '.join(map(_stringify, value))
def _stringify(value):
"""Internal function."""
if isinstance(value, (list, tuple)):
if len(value) == 1:
value = _stringify(value[0])
if value[0] == '{':
value = '{%s}' % value
else:
value = '{%s}' % _join(value)
else:
if isinstance(value, basestring):
value = unicode(value)
else:
value = str(value)
if not value:
value = '{}'
elif _magic_re.search(value):
# add '\' before special characters and spaces
value = _magic_re.sub(r'\\\1', value)
value = _space_re.sub(r'\\\1', value)
elif value[0] == '"' or _space_re.search(value):
value = '{%s}' % value
return value
def _flatten(tuple):
"""Internal function."""
res = ()
for item in tuple:
if type(item) in (TupleType, ListType):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if type(cnfs) is DictionaryType:
return cnfs
elif type(cnfs) in (NoneType, StringType):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError), msg:
print "_cnfmerge: fallback due to:", msg
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code=0):
"""Internal function. Calling it will raise the exception SystemExit."""
try:
code = int(code)
except ValueError:
pass
raise SystemExit, code
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.set(value)
elif not self._tk.call("info", "exists", self._name):
self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
cbname = self._master._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._master.deletecommand(cbname)
def trace_vinfo(self):
"""Return all trace callback information."""
return map(self._tk.split, self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name)))
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, basestring):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
if isinstance(value, bool):
value = int(value)
return Variable.set(self, value)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
return self._tk.getboolean(self._tk.globalgetvar(self._name))
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
return _default_root.tk.getboolean(s)
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(kw.items()))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
pass # obsolete since Tk 4.0
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
return self.tk.getboolean(s)
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING, except on X11, where the default
is to try UTF8_STRING and fall back to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('clipboard', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use. A keyword parameter type specifies the form of data to be
fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
before STRING."""
if 'displayof' not in kw: kw['displayof'] = self._w
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('selection', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if type(data) is StringType:
data = [self.tk.split(data)]
return map(self.__winfo_parseitem, data)
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if type(func) is StringType:
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
@property
def _windowingsystem(self):
"""Internal function."""
try:
return self._root()._windowingsystem_cached
except AttributeError:
ws = self._root()._windowingsystem_cached = \
self.tk.call('tk', 'windowingsystem')
return ws
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if not isinstance(item, (basestring, int)):
break
elif isinstance(item, int):
nv.append('%d' % item)
else:
# format it to proper Tcl code if it contains space
nv.append(_stringify(item))
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.im_func
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
import sys
exc, val, tb = sys.exc_type, sys.exc_value, sys.exc_traceback
root = self._root()
root.report_callback_exception(exc, val, tb)
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(
self.tk.call(_flatten((self._w, cmd)))):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) is StringType:
x = self.tk.split(
self.tk.call(_flatten((self._w, cmd, '-'+cnf))))
return (x[0][1:],) + x[1:]
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def __contains__(self, key):
raise TypeError("Tkinter objects don't support 'in' tests.")
def keys(self):
"""Return a list of all resource names of this widget."""
return map(lambda x: x[0][1:],
self.tk.split(self.tk.call(self._w, 'configure')))
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w)))
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w)))
# Grid methods that apply to the master
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if type(cnf) is StringType and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
res = self.tk.call('grid',
command, self._w, index)
words = self.tk.splitlist(res)
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if not value:
value = None
elif '.' in value:
value = getdouble(value)
else:
value = getint(value)
dict[key] = value
return dict
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
if not res: return None
# In Tk 7.5, -width can be a float
if '.' in res: return getdouble(res)
return getint(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return map(self._nametowidget,
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args)))
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.call('image', 'names')
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.call('image', 'types')
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit, msg:
raise SystemExit, msg
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
return map(self._nametowidget, self.tk.call(args))
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if hasattr(func, '__call__'):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import sys, os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
if not sys.flags.ignore_environment:
# Issue #16248: Honor the -E flag to avoid code injection.
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError, \
"tk.h version (%s) doesn't match libtk.a version (%s)" \
% (_tkinter.TK_VERSION, tk_version)
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError, \
"tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version)
if TkVersion < 4.0:
raise RuntimeError, \
"Tk 4.0 or higher is required; found Tk %s" \
% str(TkVersion)
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls execfile on BASENAME.py and CLASSNAME.py if
such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec 'from Tkinter import *' in dir
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
execfile(class_py, dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
execfile(base_py, dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
import traceback, sys
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('pack', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('place', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
words = self.tk.splitlist(
self.tk.call('grid', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = []
for k in cnf.keys():
if type(k) is ClassType:
classes.append((k, cnf[k]))
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
return 'end'
def AtInsert(*args):
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
return 'sel.first'
def AtSelLast():
return 'sel.last'
def At(x, y=None):
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return map(getdouble,
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args)))
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if type(cnf) in (DictionaryType, TupleType):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def curselection(self):
"""Return list of indices of currently selected item."""
# XXX Ought to apply self._getints()...
return self.tk.splitlist(self.tk.call(
self._w, 'curselection'))
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (not included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (not included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
pass # obsolete since Tk 4.0
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
return self.tk.getboolean(self.tk.call(
self._w, 'debug', boolean))
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError, 'unknown option -'+kwargs.keys()[0]
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError, 'Too early to create image'
self.tk = master.tk
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage()
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names(): return _default_root.tk.call('image', 'names')
def image_types(): return _default_root.tk.call('image', 'types')
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self.tk.call(self._w, 'bbox', index)
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
cnf = {}
for x in self.tk.split(
self.tk.call(self._w,
'paneconfigure', tagOrId)):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) == StringType and not kw:
x = self.tk.split(self.tk.call(
self._w, 'paneconfigure', tagOrId, '-'+cnf))
return (x[0][1:],) + x[1:]
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.call(self._w, 'panes')
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
try:
text = text + unicode("\nThis should be a cedilla: \347",
"iso-8859-1")
except NameError:
pass # no unicode support
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
| apache-2.0 |
synth3tk/the-blue-alliance | controllers/api/api_district_controller.py | 1 | 7325 | import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from consts.district_type import DistrictType
from consts.event_type import EventType
from datetime import datetime
from database.event_query import DistrictEventsQuery
from google.appengine.ext import ndb
from database.team_query import DistrictTeamsQuery
from helpers.district_helper import DistrictHelper
from helpers.event_helper import EventHelper
from helpers.model_to_dict import ModelToDict
from models import team
from models.district_team import DistrictTeam
from models.event import Event
from models.event_team import EventTeam
from models.team import Team
class ApiDistrictControllerBase(ApiBaseController):
def _set_district(self, district):
self.district_abbrev = district
self.district = DistrictType.abbrevs[self.district_abbrev]
@property
def _validators(self):
return [("district_id_validator", self.district_abbrev)]
class ApiDistrictListController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_list_controller_{}" # year
CACHE_VERSION = 3
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictListController, self).__init__(*args, **kw)
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.year)
@property
def _validators(self):
'''
No validators for this endpoint
'''
return []
def _track_call(self, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/list', year)
def _render(self, year=None):
all_cmp_event_keys = Event.query(Event.year == int(self.year), Event.event_type_enum == EventType.DISTRICT_CMP).fetch(None, keys_only=True)
events = ndb.get_multi(all_cmp_event_keys)
district_keys = [DistrictType.type_abbrevs[event.event_district_enum] for event in events]
districts = list()
for key in district_keys:
dictionary = dict()
dictionary["key"] = key
dictionary["name"] = DistrictType.type_names[DistrictType.abbrevs[key]]
districts.append(dictionary)
return json.dumps(districts, ensure_ascii=True)
class ApiDistrictEventsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_events_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 1
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictEventsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/events', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev)
events = DistrictEventsQuery('{}{}'.format(self.year, self.district_abbrev)).fetch()
events = [ModelToDict.eventConverter(event) for event in events]
return json.dumps(events, ensure_ascii=True)
class ApiDistrictRankingsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_rankings_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 61
def __init__(self, *args, **kw):
super(ApiDistrictRankingsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/rankings', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev)
if self.year < 2009:
return json.dumps([], ensure_ascii=True)
event_keys = Event.query(Event.year == self.year, Event.event_district_enum == self.district).fetch(None, keys_only=True)
if not event_keys:
return json.dumps([], ensure_ascii=True)
events = ndb.get_multi(event_keys)
event_futures = ndb.get_multi_async(event_keys)
event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True)
team_futures = ndb.get_multi_async(set([ndb.Key(Team, et_key.id().split('_')[1]) for et_key in event_team_keys_future.get_result()]))
events = [event_future.get_result() for event_future in event_futures]
EventHelper.sort_events(events)
team_totals = DistrictHelper.calculate_rankings(events, team_futures, self.year)
rankings = []
current_rank = 1
for key, points in team_totals:
point_detail = {}
point_detail["rank"] = current_rank
point_detail["team_key"] = key
point_detail["event_points"] = {}
for event in points["event_points"]:
event_key = event[0].key_name
point_detail["event_points"][event_key] = event[1]
event_details = Event.get_by_id(event_key)
point_detail["event_points"][event[0].key_name]['district_cmp'] = True if event_details.event_type_enum == EventType.DISTRICT_CMP else False
if "rookie_bonus" in points:
point_detail["rookie_bonus"] = points["rookie_bonus"]
else:
point_detail["rookie_bonus"] = 0
point_detail["point_total"] = points["point_total"]
rankings.append(point_detail)
current_rank += 1
return json.dumps(rankings)
class ApiDistrictTeamsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_teams_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictTeamsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/teams', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev)
district_teams = DistrictTeamsQuery('{}{}'.format(self.year, self.district_abbrev)).fetch()
district_teams_dict = [ModelToDict.teamConverter(team) for team in district_teams]
return json.dumps(district_teams_dict, ensure_ascii=True)
| mit |
PostCenter/botlang | botlang/parser/s_expressions.py | 1 | 8935 | import ast as python_ast
from botlang.ast.ast import *
class SExpression(object):
"""
https://en.wikipedia.org/wiki/S-expression
"""
OPENING_PARENS = ['(', '[', '{']
CLOSING_PARENS = [')', ']', '}']
def to_ast(self):
raise NotImplementedError
def accept(self, visitor):
raise NotImplementedError
def copy(self):
raise NotImplementedError
def is_tree(self):
return False
def is_atom(self):
return False
class Atom(SExpression):
def __init__(self, token, source_reference):
self.code = token
self.source_reference = source_reference
def __repr__(self):
return 'Atom({})'.format(self.code)
def accept(self, visitor):
return visitor.visit_atom(self)
def copy(self):
return Atom(self.code, self.source_reference)
@property
def token(self):
return self.code
def is_atom(self):
return True
def to_ast(self, quoted_parent=False):
try:
return self.as_boolean_value()
except ValueError:
pass
try:
return self.as_integer_value()
except ValueError:
pass
try:
return self.as_float_value()
except ValueError:
pass
if self.is_string():
return self.as_string_value()
if self.is_symbol() or quoted_parent:
return self.as_symbol_value(quoted_parent)
return self.as_identifier()
def is_boolean(self):
return self.code == '#t' or self.code == '#f'
def is_integer(self):
try:
self.as_integer_value()
except ValueError:
return False
else:
return True
def is_float(self):
try:
self.as_float_value()
except ValueError:
return False
else:
return True
def is_number(self):
return self.is_integer() or self.is_float()
def is_identifier(self):
return \
not self.is_boolean() \
and not self.is_number() \
and not self.is_string() \
and not self.is_symbol()
def as_boolean_value(self):
if self.code == '#t':
return Val(True).add_code_reference(self)
if self.code == '#f':
return Val(False).add_code_reference(self)
raise ValueError
def as_integer_value(self):
return Val(int(self.code)).add_code_reference(self)
def as_float_value(self):
return Val(float(self.code)).add_code_reference(self)
def as_quoted(self):
return self.to_ast(quoted_parent=True)
def as_string_value(self):
return Val(
python_ast.literal_eval(self.code.replace('\n', '\\n'))
).add_code_reference(self)
def as_symbol_value(self, quoted_parent):
symbol = self.token if quoted_parent else self.token[1:]
return Val(symbol).add_code_reference(self)
def as_identifier(self):
return Id(self.token).add_code_reference(self)
def is_string(self):
return self.code.startswith('"') and self.code.endswith('"')
def is_symbol(self):
return self.code.startswith("'")
class Tree(SExpression):
def __init__(self, children, code, source_reference, quoted=False):
self.children = children
self.code = code
self.source_reference = source_reference
self.quoted = quoted
def __repr__(self):
return 'Tree({})'.format(self.children)
def accept(self, visitor):
return visitor.visit_tree(self)
def copy(self):
return Tree(
[child.copy() for child in self.children],
self.code,
self.source_reference,
self.quoted
)
def is_tree(self):
return True
def as_quoted(self):
return ListVal([
child.as_quoted() for child in self.children
]).add_code_reference(self)
def to_ast(self):
if self.quoted or len(self.children) == 0:
return self.as_quoted()
first = self.children[0].code
if first == 'if':
return self.if_node()
if first == 'cond':
return self.cond_node()
if first == 'and':
return self.and_node()
if first == 'or':
return self.or_node()
if first == 'define':
return self.define_node()
if first == 'local':
return self.local_node()
if first == 'begin':
return self.begin_node()
if first == 'fun' or first == 'function':
return self.function_node(self.children)
if first == 'bot-node':
return self.bot_node()
if first == 'node-result':
return self.bot_result_node()
if first == 'module':
return self.module_definition_node()
if first == 'provide':
return self.module_export_node()
if first == 'require':
return self.module_import_node()
if first == 'define-syntax-rule':
return self.define_syntax_rule_node()
return self.application_node()
def module_definition_node(self):
module_body = BodySequence(
[s_expr.to_ast() for s_expr in self.children[2:]]
).add_code_reference(self)
return ModuleDefinition(
self.children[1].to_ast(),
module_body
).add_code_reference(self)
def module_export_node(self):
return ModuleFunctionExport(
[identifier.to_ast() for identifier in self.children[1:]]
).add_code_reference(self)
def module_import_node(self):
return ModuleImport(
self.children[1].to_ast()
).add_code_reference(self)
def if_node(self):
return If(
self.children[1].to_ast(),
self.children[2].to_ast(),
self.children[3].to_ast()
).add_code_reference(self)
def cond_node(self):
return Cond(
[child.to_cond_clause_ast_node() for child in self.children[1:]]
).add_code_reference(self)
def to_cond_clause_ast_node(self):
first = self.children[0].code
if first == 'else':
return CondElseClause(
self.children[1].to_ast()
).add_code_reference(self)
return CondPredicateClause(
self.children[0].to_ast(),
self.children[1].to_ast()
).add_code_reference(self)
def and_node(self):
return And(
self.children[1].to_ast(),
self.children[2].to_ast()
).add_code_reference(self)
def or_node(self):
return Or(
self.children[1].to_ast(),
self.children[2].to_ast()
).add_code_reference(self)
def define_node(self):
return Definition(
self.children[1].code,
self.children[2].to_ast()
).add_code_reference(self)
def local_node(self):
return Local(
[
Definition(
d.children[0].code,
d.children[1].to_ast()
).add_code_reference(d)
for d in self.children[1].children
],
self.children[2].to_ast()
).add_code_reference(self)
def begin_node(self):
return BodySequence(
[s_expr.to_ast() for s_expr in self.children[1:]]
).add_code_reference(self)
def function_node(self, children):
function_body = BodySequence(
[s_expr.to_ast() for s_expr in children[2:]]
).add_code_reference(self)
return Fun(
[identifier.code for identifier in children[1].children],
function_body
).add_code_reference(self)
def bot_node(self):
bot_node_body = BodySequence(
[s_expr.to_ast() for s_expr in self.children[2:]]
).add_code_reference(self)
return BotNode(
[identifier.code for identifier in self.children[1].children],
bot_node_body
).add_code_reference(self)
def bot_result_node(self):
return BotResult(
self.children[1].to_ast(),
self.children[2].to_ast(),
self.children[3].to_ast()
).add_code_reference(self)
def application_node(self):
return App(
self.children[0].to_ast(),
[s_expr.to_ast() for s_expr in self.children[1:]]
).add_code_reference(self)
def define_syntax_rule_node(self):
pattern = self.children[1].children
pattern_node = SyntaxPattern(pattern[0], pattern[1:])
return DefineSyntax(
pattern_node.add_code_reference(pattern_node),
self.children[2]
).add_code_reference(self)
| mit |
Zhongqilong/kbengine | kbe/src/lib/python/Tools/i18n/makelocalealias.py | 40 | 3124 | #!/usr/bin/env python3
"""
Convert the X11 locale.alias file into a mapping dictionary suitable
for locale.py.
Written by Marc-Andre Lemburg <mal@genix.com>, 2004-12-10.
"""
import locale
import sys
# Location of the alias file
LOCALE_ALIAS = '/usr/share/X11/locale/locale.alias'
def parse(filename):
with open(filename, encoding='latin1') as f:
lines = list(f)
data = {}
for line in lines:
line = line.strip()
if not line:
continue
if line[:1] == '#':
continue
locale, alias = line.split()
# Fix non-standard locale names, e.g. ks_IN@devanagari.UTF-8
if '@' in alias:
alias_lang, _, alias_mod = alias.partition('@')
if '.' in alias_mod:
alias_mod, _, alias_enc = alias_mod.partition('.')
alias = alias_lang + '.' + alias_enc + '@' + alias_mod
# Strip ':'
if locale[-1] == ':':
locale = locale[:-1]
# Lower-case locale
locale = locale.lower()
# Ignore one letter locale mappings (except for 'c')
if len(locale) == 1 and locale != 'c':
continue
# Normalize encoding, if given
if '.' in locale:
lang, encoding = locale.split('.')[:2]
encoding = encoding.replace('-', '')
encoding = encoding.replace('_', '')
locale = lang + '.' + encoding
if encoding.lower() == 'utf8':
# Ignore UTF-8 mappings - this encoding should be
# available for all locales
continue
data[locale] = alias
return data
def pprint(data):
items = sorted(data.items())
for k, v in items:
print(' %-40s%a,' % ('%a:' % k, v))
def print_differences(data, olddata):
items = sorted(olddata.items())
for k, v in items:
if k not in data:
print('# removed %a' % k)
elif olddata[k] != data[k]:
print('# updated %a -> %a to %a' % \
(k, olddata[k], data[k]))
# Additions are not mentioned
def optimize(data):
locale_alias = locale.locale_alias
locale.locale_alias = data.copy()
for k, v in data.items():
del locale.locale_alias[k]
if locale.normalize(k) != v:
locale.locale_alias[k] = v
newdata = locale.locale_alias
errors = check(data)
locale.locale_alias = locale_alias
if errors:
sys.exit(1)
return newdata
def check(data):
# Check that all alias definitions from the X11 file
# are actually mapped to the correct alias locales.
errors = 0
for k, v in data.items():
if locale.normalize(k) != v:
print('ERROR: %a -> %a != %a' % (k, locale.normalize(k), v),
file=sys.stderr)
errors += 1
return errors
if __name__ == '__main__':
data = locale.locale_alias.copy()
data.update(parse(LOCALE_ALIAS))
data = optimize(data)
print_differences(data, locale.locale_alias)
print()
print('locale_alias = {')
pprint(data)
print('}')
| lgpl-3.0 |
cuilishen/cuilishenMissionPlanner | Lib/site-packages/numpy/numarray/alter_code1.py | 102 | 9390 | """
This module converts code written for numarray to run with numpy
Makes the following changes:
* Changes import statements
import numarray.package
--> import numpy.numarray.package as numarray_package
with all numarray.package in code changed to numarray_package
import numarray --> import numpy.numarray as numarray
import numarray.package as <yyy> --> import numpy.numarray.package as <yyy>
from numarray import <xxx> --> from numpy.numarray import <xxx>
from numarray.package import <xxx>
--> from numpy.numarray.package import <xxx>
package can be convolve, image, nd_image, mlab, linear_algebra, ma,
matrix, fft, random_array
* Makes search and replace changes to:
- .imaginary --> .imag
- .flat --> .ravel() (most of the time)
- .byteswapped() --> .byteswap(False)
- .byteswap() --> .byteswap(True)
- .info() --> numarray.info(self)
- .isaligned() --> .flags.aligned
- .isbyteswapped() --> (not .dtype.isnative)
- .typecode() --> .dtype.char
- .iscontiguous() --> .flags.contiguous
- .is_c_array() --> .flags.carray and .dtype.isnative
- .is_fortran_contiguous() --> .flags.fortran
- .is_f_array() --> .dtype.isnative and .flags.farray
- .itemsize() --> .itemsize
- .nelements() --> .size
- self.new(type) --> numarray.newobj(self, type)
- .repeat(r) --> .repeat(r, axis=0)
- .size() --> .size
- self.type() -- numarray.typefrom(self)
- .typecode() --> .dtype.char
- .stddev() --> .std()
- .togglebyteorder() --> numarray.togglebyteorder(self)
- .getshape() --> .shape
- .setshape(obj) --> .shape=obj
- .getflat() --> .ravel()
- .getreal() --> .real
- .setreal() --> .real =
- .getimag() --> .imag
- .setimag() --> .imag =
- .getimaginary() --> .imag
- .setimaginary() --> .imag
"""
__all__ = ['convertfile', 'convertall', 'converttree', 'convertsrc']
import sys
import os
import re
import glob
def changeimports(fstr, name, newname):
importstr = 'import %s' % name
importasstr = 'import %s as ' % name
fromstr = 'from %s import ' % name
fromall=0
name_ = name
if ('.' in name):
name_ = name.replace('.','_')
fstr = re.sub(r'(import\s+[^,\n\r]+,\s*)(%s)' % name,
"\\1%s as %s" % (newname, name), fstr)
fstr = fstr.replace(importasstr, 'import %s as ' % newname)
fstr = fstr.replace(importstr, 'import %s as %s' % (newname,name_))
if (name_ != name):
fstr = fstr.replace(name, name_)
ind = 0
Nlen = len(fromstr)
Nlen2 = len("from %s import " % newname)
while 1:
found = fstr.find(fromstr,ind)
if (found < 0):
break
ind = found + Nlen
if fstr[ind] == '*':
continue
fstr = "%sfrom %s import %s" % (fstr[:found], newname, fstr[ind:])
ind += Nlen2 - Nlen
return fstr, fromall
flatindex_re = re.compile('([.]flat(\s*?[[=]))')
def addimport(astr):
# find the first line with import on it
ind = astr.find('import')
start = astr.rfind(os.linesep, 0, ind)
astr = "%s%s%s%s" % (astr[:start], os.linesep,
"import numpy.numarray as numarray",
astr[start:])
return astr
def replaceattr(astr):
astr = astr.replace(".imaginary", ".imag")
astr = astr.replace(".byteswapped()",".byteswap(False)")
astr = astr.replace(".byteswap()", ".byteswap(True)")
astr = astr.replace(".isaligned()", ".flags.aligned")
astr = astr.replace(".iscontiguous()",".flags.contiguous")
astr = astr.replace(".is_fortran_contiguous()",".flags.fortran")
astr = astr.replace(".itemsize()",".itemsize")
astr = astr.replace(".size()",".size")
astr = astr.replace(".nelements()",".size")
astr = astr.replace(".typecode()",".dtype.char")
astr = astr.replace(".stddev()",".std()")
astr = astr.replace(".getshape()", ".shape")
astr = astr.replace(".getflat()", ".ravel()")
astr = astr.replace(".getreal", ".real")
astr = astr.replace(".getimag", ".imag")
astr = astr.replace(".getimaginary", ".imag")
# preserve uses of flat that should be o.k.
tmpstr = flatindex_re.sub(r"@@@@\2",astr)
# replace other uses of flat
tmpstr = tmpstr.replace(".flat",".ravel()")
# put back .flat where it was valid
astr = tmpstr.replace("@@@@", ".flat")
return astr
info_re = re.compile(r'(\S+)\s*[.]\s*info\s*[(]\s*[)]')
new_re = re.compile(r'(\S+)\s*[.]\s*new\s*[(]\s*(\S+)\s*[)]')
toggle_re = re.compile(r'(\S+)\s*[.]\s*togglebyteorder\s*[(]\s*[)]')
type_re = re.compile(r'(\S+)\s*[.]\s*type\s*[(]\s*[)]')
isbyte_re = re.compile(r'(\S+)\s*[.]\s*isbyteswapped\s*[(]\s*[)]')
iscarr_re = re.compile(r'(\S+)\s*[.]\s*is_c_array\s*[(]\s*[)]')
isfarr_re = re.compile(r'(\S+)\s*[.]\s*is_f_array\s*[(]\s*[)]')
repeat_re = re.compile(r'(\S+)\s*[.]\s*repeat\s*[(]\s*(\S+)\s*[)]')
setshape_re = re.compile(r'(\S+)\s*[.]\s*setshape\s*[(]\s*(\S+)\s*[)]')
setreal_re = re.compile(r'(\S+)\s*[.]\s*setreal\s*[(]\s*(\S+)\s*[)]')
setimag_re = re.compile(r'(\S+)\s*[.]\s*setimag\s*[(]\s*(\S+)\s*[)]')
setimaginary_re = re.compile(r'(\S+)\s*[.]\s*setimaginary\s*[(]\s*(\S+)\s*[)]')
def replaceother(astr):
# self.info() --> numarray.info(self)
# self.new(type) --> numarray.newobj(self, type)
# self.togglebyteorder() --> numarray.togglebyteorder(self)
# self.type() --> numarray.typefrom(self)
(astr, n1) = info_re.subn('numarray.info(\\1)', astr)
(astr, n2) = new_re.subn('numarray.newobj(\\1, \\2)', astr)
(astr, n3) = toggle_re.subn('numarray.togglebyteorder(\\1)', astr)
(astr, n4) = type_re.subn('numarray.typefrom(\\1)', astr)
if (n1+n2+n3+n4 > 0):
astr = addimport(astr)
astr = isbyte_re.sub('not \\1.dtype.isnative', astr)
astr = iscarr_re.sub('\\1.dtype.isnative and \\1.flags.carray', astr)
astr = isfarr_re.sub('\\1.dtype.isnative and \\1.flags.farray', astr)
astr = repeat_re.sub('\\1.repeat(\\2, axis=0)', astr)
astr = setshape_re.sub('\\1.shape = \\2', astr)
astr = setreal_re.sub('\\1.real = \\2', astr)
astr = setimag_re.sub('\\1.imag = \\2', astr)
astr = setimaginary_re.sub('\\1.imag = \\2', astr)
return astr
import datetime
def fromstr(filestr):
savestr = filestr[:]
filestr, fromall = changeimports(filestr, 'numarray', 'numpy.numarray')
base = 'numarray'
newbase = 'numpy.numarray'
for sub in ['', 'convolve', 'image', 'nd_image', 'mlab', 'linear_algebra',
'ma', 'matrix', 'fft', 'random_array']:
if sub != '':
sub = '.'+sub
filestr, fromall = changeimports(filestr, base+sub, newbase+sub)
filestr = replaceattr(filestr)
filestr = replaceother(filestr)
if savestr != filestr:
name = os.path.split(sys.argv[0])[-1]
today = datetime.date.today().strftime('%b %d, %Y')
filestr = '## Automatically adapted for '\
'numpy.numarray %s by %s\n\n%s' % (today, name, filestr)
return filestr, 1
return filestr, 0
def makenewfile(name, filestr):
fid = file(name, 'w')
fid.write(filestr)
fid.close()
def convertfile(filename, orig=1):
"""Convert the filename given from using Numarray to using NumPy
Copies the file to filename.orig and then over-writes the file
with the updated code
"""
fid = open(filename)
filestr = fid.read()
fid.close()
filestr, changed = fromstr(filestr)
if changed:
if orig:
base, ext = os.path.splitext(filename)
os.rename(filename, base+".orig")
else:
os.remove(filename)
makenewfile(filename, filestr)
def fromargs(args):
filename = args[1]
convertfile(filename)
def convertall(direc=os.path.curdir, orig=1):
"""Convert all .py files to use numpy.oldnumeric (from Numeric) in the directory given
For each file, a backup of <usesnumeric>.py is made as
<usesnumeric>.py.orig. A new file named <usesnumeric>.py
is then written with the updated code.
"""
files = glob.glob(os.path.join(direc,'*.py'))
for afile in files:
if afile[-8:] == 'setup.py': continue
convertfile(afile, orig)
header_re = re.compile(r'(numarray/libnumarray.h)')
def convertsrc(direc=os.path.curdir, ext=None, orig=1):
"""Replace Numeric/arrayobject.h with numpy/oldnumeric.h in all files in the
directory with extension give by list ext (if ext is None, then all files are
replaced)."""
if ext is None:
files = glob.glob(os.path.join(direc,'*'))
else:
files = []
for aext in ext:
files.extend(glob.glob(os.path.join(direc,"*.%s" % aext)))
for afile in files:
fid = open(afile)
fstr = fid.read()
fid.close()
fstr, n = header_re.subn(r'numpy/libnumarray.h',fstr)
if n > 0:
if orig:
base, ext = os.path.splitext(afile)
os.rename(afile, base+".orig")
else:
os.remove(afile)
makenewfile(afile, fstr)
def _func(arg, dirname, fnames):
convertall(dirname, orig=0)
convertsrc(dirname, ['h','c'], orig=0)
def converttree(direc=os.path.curdir):
"""Convert all .py files in the tree given
"""
os.path.walk(direc, _func, None)
if __name__ == '__main__':
converttree(sys.argv)
| gpl-3.0 |
CharlesShang/TFFRCNN | lib/roi_data_layer/minibatch.py | 5 | 8725 | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
"""Compute minibatch blobs for training a Fast R-CNN network."""
import numpy as np
import numpy.random as npr
import cv2
import os
# TODO: make fast_rcnn irrelevant
# >>>> obsolete, because it depends on sth outside of this project
from ..fast_rcnn.config import cfg
# <<<< obsolete
from ..utils.blob import prep_im_for_blob, im_list_to_blob
def get_minibatch(roidb, num_classes):
"""Given a roidb, construct a minibatch sampled from it."""
num_images = len(roidb)
# Sample random scales to use for each image in this batch
random_scale_inds = npr.randint(0, high=len(cfg.TRAIN.SCALES),
size=num_images)
assert(cfg.TRAIN.BATCH_SIZE % num_images == 0), \
'num_images ({}) must divide BATCH_SIZE ({})'. \
format(num_images, cfg.TRAIN.BATCH_SIZE)
rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images
fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image)
# Get the input image blob, formatted for caffe
im_blob, im_scales = _get_image_blob(roidb, random_scale_inds)
blobs = {'data': im_blob}
if cfg.TRAIN.HAS_RPN:
assert len(im_scales) == 1, "Single batch only"
assert len(roidb) == 1, "Single batch only"
# gt boxes: (x1, y1, x2, y2, cls)
gt_inds = np.where(roidb[0]['gt_classes'] != 0)[0]
gt_boxes = np.empty((len(gt_inds), 5), dtype=np.float32)
gt_boxes[:, 0:4] = roidb[0]['boxes'][gt_inds, :] * im_scales[0]
gt_boxes[:, 4] = roidb[0]['gt_classes'][gt_inds]
blobs['gt_boxes'] = gt_boxes
blobs['gt_ishard'] = roidb[0]['gt_ishard'][gt_inds] \
if 'gt_ishard' in roidb[0] else np.zeros(gt_inds.size, dtype=int)
# blobs['gt_ishard'] = roidb[0]['gt_ishard'][gt_inds]
blobs['dontcare_areas'] = roidb[0]['dontcare_areas'] * im_scales[0] \
if 'dontcare_areas' in roidb[0] else np.zeros([0, 4], dtype=float)
blobs['im_info'] = np.array(
[[im_blob.shape[1], im_blob.shape[2], im_scales[0]]],
dtype=np.float32)
blobs['im_name'] = os.path.basename(roidb[0]['image'])
else: # not using RPN
# Now, build the region of interest and label blobs
rois_blob = np.zeros((0, 5), dtype=np.float32)
labels_blob = np.zeros((0), dtype=np.float32)
bbox_targets_blob = np.zeros((0, 4 * num_classes), dtype=np.float32)
bbox_inside_blob = np.zeros(bbox_targets_blob.shape, dtype=np.float32)
# all_overlaps = []
for im_i in xrange(num_images):
labels, overlaps, im_rois, bbox_targets, bbox_inside_weights \
= _sample_rois(roidb[im_i], fg_rois_per_image, rois_per_image,
num_classes)
# Add to RoIs blob
rois = _project_im_rois(im_rois, im_scales[im_i])
batch_ind = im_i * np.ones((rois.shape[0], 1))
rois_blob_this_image = np.hstack((batch_ind, rois))
rois_blob = np.vstack((rois_blob, rois_blob_this_image))
# Add to labels, bbox targets, and bbox loss blobs
labels_blob = np.hstack((labels_blob, labels))
bbox_targets_blob = np.vstack((bbox_targets_blob, bbox_targets))
bbox_inside_blob = np.vstack((bbox_inside_blob, bbox_inside_weights))
# all_overlaps = np.hstack((all_overlaps, overlaps))
# For debug visualizations
# _vis_minibatch(im_blob, rois_blob, labels_blob, all_overlaps)
blobs['rois'] = rois_blob
blobs['labels'] = labels_blob
if cfg.TRAIN.BBOX_REG:
blobs['bbox_targets'] = bbox_targets_blob
blobs['bbox_inside_weights'] = bbox_inside_blob
blobs['bbox_outside_weights'] = \
np.array(bbox_inside_blob > 0).astype(np.float32)
return blobs
def _sample_rois(roidb, fg_rois_per_image, rois_per_image, num_classes):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
# label = class RoI has max overlap with
labels = roidb['max_classes']
overlaps = roidb['max_overlaps']
rois = roidb['boxes']
# Select foreground RoIs as those with >= FG_THRESH overlap
fg_inds = np.where(overlaps >= cfg.TRAIN.FG_THRESH)[0]
# Guard against the case when an image has fewer than fg_rois_per_image
# foreground RoIs
fg_rois_per_this_image = np.minimum(fg_rois_per_image, fg_inds.size)
# Sample foreground regions without replacement
if fg_inds.size > 0:
fg_inds = npr.choice(
fg_inds, size=fg_rois_per_this_image, replace=False)
# Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
bg_inds = np.where((overlaps < cfg.TRAIN.BG_THRESH_HI) &
(overlaps >= cfg.TRAIN.BG_THRESH_LO))[0]
# Compute number of background RoIs to take from this image (guarding
# against there being fewer than desired)
bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image
bg_rois_per_this_image = np.minimum(bg_rois_per_this_image,
bg_inds.size)
# Sample foreground regions without replacement
if bg_inds.size > 0:
bg_inds = npr.choice(
bg_inds, size=bg_rois_per_this_image, replace=False)
# The indices that we're selecting (both fg and bg)
keep_inds = np.append(fg_inds, bg_inds)
# Select sampled values from various arrays:
labels = labels[keep_inds]
# Clamp labels for the background RoIs to 0
labels[fg_rois_per_this_image:] = 0
overlaps = overlaps[keep_inds]
rois = rois[keep_inds]
bbox_targets, bbox_inside_weights = _get_bbox_regression_labels(
roidb['bbox_targets'][keep_inds, :], num_classes)
return labels, overlaps, rois, bbox_targets, bbox_inside_weights
def _get_image_blob(roidb, scale_inds):
"""Builds an input blob from the images in the roidb at the specified
scales.
"""
num_images = len(roidb)
processed_ims = []
im_scales = []
for i in xrange(num_images):
im = cv2.imread(roidb[i]['image'])
if roidb[i]['flipped']:
im = im[:, ::-1, :]
target_size = cfg.TRAIN.SCALES[scale_inds[i]]
im, im_scale = prep_im_for_blob(im, cfg.PIXEL_MEANS, target_size,
cfg.TRAIN.MAX_SIZE)
im_scales.append(im_scale)
processed_ims.append(im)
# Create a blob to hold the input images
blob = im_list_to_blob(processed_ims)
return blob, im_scales
def _project_im_rois(im_rois, im_scale_factor):
"""Project image RoIs into the rescaled training image."""
rois = im_rois * im_scale_factor
return rois
def _get_bbox_regression_labels(bbox_target_data, num_classes):
"""Bounding-box regression targets are stored in a compact form in the
roidb.
This function expands those targets into the 4-of-4*K representation used
by the network (i.e. only one class has non-zero targets). The loss weights
are similarly expanded.
Returns:
bbox_target_data (ndarray): N x 4K blob of regression targets
bbox_inside_weights (ndarray): N x 4K blob of loss weights
"""
clss = bbox_target_data[:, 0]
bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32)
bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
for ind in inds:
cls = clss[ind]
start = 4 * cls
end = start + 4
bbox_targets[ind, start:end] = bbox_target_data[ind, 1:]
bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS
return bbox_targets, bbox_inside_weights
def _vis_minibatch(im_blob, rois_blob, labels_blob, overlaps):
"""Visualize a mini-batch for debugging."""
import matplotlib.pyplot as plt
for i in xrange(rois_blob.shape[0]):
rois = rois_blob[i, :]
im_ind = rois[0]
roi = rois[1:]
im = im_blob[im_ind, :, :, :].transpose((1, 2, 0)).copy()
im += cfg.PIXEL_MEANS
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
cls = labels_blob[i]
plt.imshow(im)
print 'class: ', cls, ' overlap: ', overlaps[i]
plt.gca().add_patch(
plt.Rectangle((roi[0], roi[1]), roi[2] - roi[0],
roi[3] - roi[1], fill=False,
edgecolor='r', linewidth=3)
)
plt.show()
| mit |
s40523133/2016fallcp_hw | plugin/liquid_tags/img.py | 320 | 2399 | """
Image Tag
---------
This implements a Liquid-style image tag for Pelican,
based on the octopress image tag [1]_
Syntax
------
{% img [class name(s)] [http[s]:/]/path/to/image [width [height]] [title text | "title text" ["alt text"]] %}
Examples
--------
{% img /images/ninja.png Ninja Attack! %}
{% img left half http://site.com/images/ninja.png Ninja Attack! %}
{% img left half http://site.com/images/ninja.png 150 150 "Ninja Attack!" "Ninja in attack posture" %}
Output
------
<img src="/images/ninja.png">
<img class="left half" src="http://site.com/images/ninja.png" title="Ninja Attack!" alt="Ninja Attack!">
<img class="left half" src="http://site.com/images/ninja.png" width="150" height="150" title="Ninja Attack!" alt="Ninja in attack posture">
[1] https://github.com/imathis/octopress/blob/master/plugins/image_tag.rb
"""
import re
from .mdx_liquid_tags import LiquidTags
import six
SYNTAX = '{% img [class name(s)] [http[s]:/]/path/to/image [width [height]] [title text | "title text" ["alt text"]] %}'
# Regular expression to match the entire syntax
ReImg = re.compile("""(?P<class>\S.*\s+)?(?P<src>(?:https?:\/\/|\/|\S+\/)\S+)(?:\s+(?P<width>\d+))?(?:\s+(?P<height>\d+))?(?P<title>\s+.+)?""")
# Regular expression to split the title and alt text
ReTitleAlt = re.compile("""(?:"|')(?P<title>[^"']+)?(?:"|')\s+(?:"|')(?P<alt>[^"']+)?(?:"|')""")
@LiquidTags.register('img')
def img(preprocessor, tag, markup):
attrs = None
# Parse the markup string
match = ReImg.search(markup)
if match:
attrs = dict([(key, val.strip())
for (key, val) in six.iteritems(match.groupdict()) if val])
else:
raise ValueError('Error processing input. '
'Expected syntax: {0}'.format(SYNTAX))
# Check if alt text is present -- if so, split it from title
if 'title' in attrs:
match = ReTitleAlt.search(attrs['title'])
if match:
attrs.update(match.groupdict())
if not attrs.get('alt'):
attrs['alt'] = attrs['title']
# Return the formatted text
return "<img {0}>".format(' '.join('{0}="{1}"'.format(key, val)
for (key, val) in six.iteritems(attrs)))
#----------------------------------------------------------------------
# This import allows image tag to be a Pelican plugin
from .liquid_tags import register
| agpl-3.0 |
Flamacue/pretix | src/tests/presale/test_organizer_page.py | 2 | 2634 | from datetime import timedelta
import pytest
from django.utils.timezone import now
from pretix.base.models import Event, Organizer
@pytest.fixture
def env():
o = Organizer.objects.create(name='MRMCD e.V.', slug='mrmcd')
event = Event.objects.create(
organizer=o, name='MRMCD2015', slug='2015',
date_from=now() + timedelta(days=10),
live=True
)
return o, event
@pytest.mark.django_db
def test_organizer_page_shown(env, client):
r = client.get('/mrmcd/')
assert r.status_code == 200
assert 'MRMCD e.V.' in r.rendered_content
@pytest.mark.django_db
def test_public_event_on_page(env, client):
env[1].is_public = True
env[1].save()
r = client.get('/mrmcd/')
assert 'MRMCD2015' in r.rendered_content
@pytest.mark.django_db
def test_non_public_event_not_on_page(env, client):
env[1].is_public = False
env[1].save()
r = client.get('/mrmcd/')
assert 'MRMCD2015' not in r.rendered_content
@pytest.mark.django_db
def test_running_event_on_current_page(env, client):
env[1].date_from = now() - timedelta(days=2)
env[1].date_to = now() + timedelta(days=2)
env[1].is_public = True
env[1].save()
r = client.get('/mrmcd/')
assert 'MRMCD2015' in r.rendered_content
@pytest.mark.django_db
def test_past_event_shown_on_archive_page(env, client):
env[1].date_from = now() - timedelta(days=2)
env[1].date_to = now() - timedelta(days=2)
env[1].is_public = True
env[1].save()
r = client.get('/mrmcd/?old=1')
assert 'MRMCD2015' in r.rendered_content
@pytest.mark.django_db
def test_event_not_shown_on_archive_page(env, client):
env[1].is_public = True
env[1].save()
r = client.get('/mrmcd/?old=1')
assert 'MRMCD2015' not in r.rendered_content
@pytest.mark.django_db
def test_past_event_not_shown(env, client):
env[1].date_from = now() - timedelta(days=2)
env[1].date_to = now() - timedelta(days=2)
env[1].is_public = True
env[1].save()
r = client.get('/mrmcd/')
assert 'MRMCD2015' not in r.rendered_content
@pytest.mark.django_db
def test_empty_message(env, client):
env[1].is_public = False
env[1].save()
r = client.get('/mrmcd/')
assert 'No public upcoming events found' in r.rendered_content
@pytest.mark.django_db
def test_different_organizer_not_shown(env, client):
o = Organizer.objects.create(name='CCC e.V.', slug='ccc')
Event.objects.create(
organizer=o, name='32C3', slug='32c3',
date_from=now() + timedelta(days=10), is_public=True
)
r = client.get('/mrmcd/')
assert '32C3' not in r.rendered_content
| apache-2.0 |
SantosDevelopers/sborganicos | venv/lib/python3.5/site-packages/django/core/checks/model_checks.py | 108 | 6273 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
import types
from itertools import chain
from django.apps import apps
from django.core.checks import Error, Tags, register
@register(Tags.models)
def check_all_models(app_configs=None, **kwargs):
errors = []
if app_configs is None:
models = apps.get_models()
else:
models = chain.from_iterable(app_config.get_models() for app_config in app_configs)
for model in models:
if not inspect.ismethod(model.check):
errors.append(
Error(
"The '%s.check()' class method is currently overridden by %r."
% (model.__name__, model.check),
obj=model,
id='models.E020'
)
)
else:
errors.extend(model.check(**kwargs))
return errors
def _check_lazy_references(apps, ignore=None):
"""
Ensure all lazy (i.e. string) model references have been resolved.
Lazy references are used in various places throughout Django, primarily in
related fields and model signals. Identify those common cases and provide
more helpful error messages for them.
The ignore parameter is used by StateApps to exclude swappable models from
this check.
"""
pending_models = set(apps._pending_operations) - (ignore or set())
# Short circuit if there aren't any errors.
if not pending_models:
return []
from django.db.models import signals
model_signals = {
signal: name for name, signal in vars(signals).items()
if isinstance(signal, signals.ModelSignal)
}
def extract_operation(obj):
"""
Take a callable found in Apps._pending_operations and identify the
original callable passed to Apps.lazy_model_operation(). If that
callable was a partial, return the inner, non-partial function and
any arguments and keyword arguments that were supplied with it.
obj is a callback defined locally in Apps.lazy_model_operation() and
annotated there with a `func` attribute so as to imitate a partial.
"""
operation, args, keywords = obj, [], {}
while hasattr(operation, 'func'):
# The or clauses are redundant but work around a bug (#25945) in
# functools.partial in Python 3 <= 3.5.1 and Python 2 <= 2.7.11.
args.extend(getattr(operation, 'args', []) or [])
keywords.update(getattr(operation, 'keywords', {}) or {})
operation = operation.func
return operation, args, keywords
def app_model_error(model_key):
try:
apps.get_app_config(model_key[0])
model_error = "app '%s' doesn't provide model '%s'" % model_key
except LookupError:
model_error = "app '%s' isn't installed" % model_key[0]
return model_error
# Here are several functions which return CheckMessage instances for the
# most common usages of lazy operations throughout Django. These functions
# take the model that was being waited on as an (app_label, modelname)
# pair, the original lazy function, and its positional and keyword args as
# determined by extract_operation().
def field_error(model_key, func, args, keywords):
error_msg = (
"The field %(field)s was declared with a lazy reference "
"to '%(model)s', but %(model_error)s."
)
params = {
'model': '.'.join(model_key),
'field': keywords['field'],
'model_error': app_model_error(model_key),
}
return Error(error_msg % params, obj=keywords['field'], id='fields.E307')
def signal_connect_error(model_key, func, args, keywords):
error_msg = (
"%(receiver)s was connected to the '%(signal)s' signal with a "
"lazy reference to the sender '%(model)s', but %(model_error)s."
)
receiver = args[0]
# The receiver is either a function or an instance of class
# defining a `__call__` method.
if isinstance(receiver, types.FunctionType):
description = "The function '%s'" % receiver.__name__
elif isinstance(receiver, types.MethodType):
description = "Bound method '%s.%s'" % (receiver.__self__.__class__.__name__, receiver.__name__)
else:
description = "An instance of class '%s'" % receiver.__class__.__name__
signal_name = model_signals.get(func.__self__, 'unknown')
params = {
'model': '.'.join(model_key),
'receiver': description,
'signal': signal_name,
'model_error': app_model_error(model_key),
}
return Error(error_msg % params, obj=receiver.__module__, id='signals.E001')
def default_error(model_key, func, args, keywords):
error_msg = "%(op)s contains a lazy reference to %(model)s, but %(model_error)s."
params = {
'op': func,
'model': '.'.join(model_key),
'model_error': app_model_error(model_key),
}
return Error(error_msg % params, obj=func, id='models.E022')
# Maps common uses of lazy operations to corresponding error functions
# defined above. If a key maps to None, no error will be produced.
# default_error() will be used for usages that don't appear in this dict.
known_lazy = {
('django.db.models.fields.related', 'resolve_related_class'): field_error,
('django.db.models.fields.related', 'set_managed'): None,
('django.dispatch.dispatcher', 'connect'): signal_connect_error,
}
def build_error(model_key, func, args, keywords):
key = (func.__module__, func.__name__)
error_fn = known_lazy.get(key, default_error)
return error_fn(model_key, func, args, keywords) if error_fn else None
return sorted(filter(None, (
build_error(model_key, *extract_operation(func))
for model_key in pending_models
for func in apps._pending_operations[model_key]
)), key=lambda error: error.msg)
@register(Tags.models)
def check_lazy_references(app_configs=None, **kwargs):
return _check_lazy_references(apps)
| mit |
lowitty/server | libsDarwin/twisted/test/test_formmethod.py | 10 | 3650 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for formmethod module.
"""
from twisted.trial import unittest
from twisted.python import formmethod
class ArgumentTests(unittest.TestCase):
def argTest(self, argKlass, testPairs, badValues, *args, **kwargs):
arg = argKlass("name", *args, **kwargs)
for val, result in testPairs:
self.assertEqual(arg.coerce(val), result)
for val in badValues:
self.assertRaises(formmethod.InputError, arg.coerce, val)
def test_argument(self):
"""
Test that corce correctly raises NotImplementedError.
"""
arg = formmethod.Argument("name")
self.assertRaises(NotImplementedError, arg.coerce, "")
def testString(self):
self.argTest(formmethod.String, [("a", "a"), (1, "1"), ("", "")], ())
self.argTest(formmethod.String, [("ab", "ab"), ("abc", "abc")], ("2", ""), min=2)
self.argTest(formmethod.String, [("ab", "ab"), ("a", "a")], ("223213", "345x"), max=3)
self.argTest(formmethod.String, [("ab", "ab"), ("add", "add")], ("223213", "x"), min=2, max=3)
def testInt(self):
self.argTest(formmethod.Integer, [("3", 3), ("-2", -2), ("", None)], ("q", "2.3"))
self.argTest(formmethod.Integer, [("3", 3), ("-2", -2)], ("q", "2.3", ""), allowNone=0)
def testFloat(self):
self.argTest(formmethod.Float, [("3", 3.0), ("-2.3", -2.3), ("", None)], ("q", "2.3z"))
self.argTest(formmethod.Float, [("3", 3.0), ("-2.3", -2.3)], ("q", "2.3z", ""),
allowNone=0)
def testChoice(self):
choices = [("a", "apple", "an apple"),
("b", "banana", "ook")]
self.argTest(formmethod.Choice, [("a", "apple"), ("b", "banana")],
("c", 1), choices=choices)
def testFlags(self):
flags = [("a", "apple", "an apple"),
("b", "banana", "ook")]
self.argTest(formmethod.Flags,
[(["a"], ["apple"]), (["b", "a"], ["banana", "apple"])],
(["a", "c"], ["fdfs"]),
flags=flags)
def testBoolean(self):
tests = [("yes", 1), ("", 0), ("False", 0), ("no", 0)]
self.argTest(formmethod.Boolean, tests, ())
def test_file(self):
"""
Test the correctness of the coerce function.
"""
arg = formmethod.File("name", allowNone=0)
self.assertEqual(arg.coerce("something"), "something")
self.assertRaises(formmethod.InputError, arg.coerce, None)
arg2 = formmethod.File("name")
self.assertEqual(arg2.coerce(None), None)
def testDate(self):
goodTests = {
("2002", "12", "21"): (2002, 12, 21),
("1996", "2", "29"): (1996, 2, 29),
("", "", ""): None,
}.items()
badTests = [("2002", "2", "29"), ("xx", "2", "3"),
("2002", "13", "1"), ("1999", "12","32"),
("2002", "1"), ("2002", "2", "3", "4")]
self.argTest(formmethod.Date, goodTests, badTests)
def testRangedInteger(self):
goodTests = {"0": 0, "12": 12, "3": 3}.items()
badTests = ["-1", "x", "13", "-2000", "3.4"]
self.argTest(formmethod.IntegerRange, goodTests, badTests, 0, 12)
def testVerifiedPassword(self):
goodTests = {("foo", "foo"): "foo", ("ab", "ab"): "ab"}.items()
badTests = [("ab", "a"), ("12345", "12345"), ("", ""), ("a", "a"), ("a",), ("a", "a", "a")]
self.argTest(formmethod.VerifiedPassword, goodTests, badTests, min=2, max=4)
| mit |
myyyy/wiki | learn/selenium/test_sensorcmd.py | 1 | 1183 | # -*- coding:utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
import time
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
driver = webdriver.Chrome()
driver.get("http://bk47.thunics.org:8888/login?next=/plat")
driver.set_window_size(1920, 1080)
loginname = driver.find_element_by_name("loginname")
password = driver.find_element_by_name("pwd")
loginname.clear()
loginname.send_keys("suyafei")
password.clear()
password.send_keys("111111")
password.send_keys(Keys.RETURN)
search_input = driver.find_element_by_css_selector(
"input[id=\"nav-search-input\"]")
search_input.send_keys(u'刘江')
search_input.send_keys(Keys.RETURN)
bridge = driver.find_element_by_partial_link_text('刘江黄河大桥5跨').click()
# actions = ActionChains(driver)
# action.moveToElement(())
# actions.move_by_offset(122, 888)
# time.sleep(5)
# 大桥详细页面
allinfo = driver.find_element_by_partial_link_text('欢迎你').click()
driver.find_element_by_partial_link_text('我的分组').click()
print bridge
assert "No results found." not in driver.page_source
# driver.close()
| mit |
drulang/foxleaf | server/server/settings.py | 1 | 2308 | """
Django settings for server project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')zps0569+9k07j8!mre^3gk(a7m0i5a9w6o!d!sz)=g^6_t#m-dh'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cobalt',
'cobalt.templatetags',
'djorm_pgfulltext',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'server.urls'
WSGI_APPLICATION = 'server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'cobalt',
'USER': 'dru',
'PASSWORD': 'temp1234',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'cobalt.User'
# Redis
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
REDIS_COUNTS_DB = 0
MIXPANEL_TOKEN = 'eb94856532da9353ec1f1107301477b9'
| gpl-2.0 |
benlaurie/certificate-transparency | python/utilities/submit_chain/submit_chain.py | 26 | 4274 | #!/usr/bin/env python
"""Submits a chain to a list of logs."""
import base64
import hashlib
import sys
import json
import logging
import gflags
from ct.client import log_client
from ct.crypto import cert
from ct.crypto import error
from ct.crypto import pem
from ct.crypto import verify
from ct.proto import client_pb2
from ct.serialization import tls_message
FLAGS = gflags.FLAGS
gflags.DEFINE_string("log_list", None, "File containing the list of logs "
"to submit to (see certificate-transparency.org/known-logs"
" for the format description).")
gflags.DEFINE_string("chain", None, "Certificate chain to submit (PEM).")
gflags.DEFINE_string("log_scheme", "http", "Log scheme (http/https)")
gflags.DEFINE_string("output", None, "output file for sct_list")
gflags.MarkFlagAsRequired("log_list")
gflags.MarkFlagAsRequired("chain")
gflags.MarkFlagAsRequired("output")
def _read_ct_log_list(log_list_file):
"""Parses the log list JSON, returns a log url to key map."""
try:
log_list_json = json.loads(log_list_file)
log_url_to_key = {}
for log_info in log_list_json['logs']:
log_url_to_key[FLAGS.log_scheme + '://' + log_info['url']] = (
base64.decodestring(log_info['key']))
return log_url_to_key
except (OSError, IOError) as io_exception:
raise Exception('Could not read log list file %s: %s' %
(log_list_file, io_exception))
def _submit_to_single_log(log_url, full_chain):
"""Submits the chain to a single log specified by log_url."""
ct_client = log_client.LogClient(log_url, connection_timeout=10)
res = None
try:
res = ct_client.add_chain(full_chain)
except log_client.HTTPError as err:
logging.info('Skipping log %s because of error: %s\n', log_url, err)
return res
def _map_log_id_to_verifier(log_list):
"""Returns a map from log id to verifier object from the log_list."""
log_id_to_verifier = {}
for log_key in log_list.values():
key_info = verify.create_key_info_from_raw_key(log_key)
key_id = hashlib.sha256(log_key).digest()
log_id_to_verifier[key_id] = verify.LogVerifier(key_info)
return log_id_to_verifier
def _submit_to_all_logs(log_list, certs_chain):
"""Submits the chain to all logs in log_list and validates SCTs."""
log_id_to_verifier = _map_log_id_to_verifier(log_list)
chain_der = [c.to_der() for c in certs_chain]
raw_scts_for_cert = []
for log_url in log_list.keys():
res = _submit_to_single_log(log_url, chain_der)
if res:
raw_scts_for_cert.append(res)
else:
logging.info("No SCT from log %s", log_url)
validated_scts = []
for raw_sct in raw_scts_for_cert:
key_id = raw_sct.id.key_id
try:
log_id_to_verifier[key_id].verify_sct(raw_sct, certs_chain)
validated_scts.append(raw_sct)
except error.SignatureError as err:
logging.warning(
'Discarding SCT from log_id %s which does not validate: %s',
key_id.encode('hex'), err)
except KeyError as err:
logging.warning('Could not find CT log validator for log_id %s. '
'The log key for this log is probably misconfigured.',
key_id.encode('hex'))
scts_for_cert = [tls_message.encode(proto_sct)
for proto_sct in validated_scts
if proto_sct]
sct_list = client_pb2.SignedCertificateTimestampList()
sct_list.sct_list.extend(scts_for_cert)
return tls_message.encode(sct_list)
def run():
"""Submits the chain specified in the flags to all logs."""
logging.getLogger().setLevel(logging.INFO)
logging.info("Starting up.")
with open(FLAGS.log_list) as log_list_file:
log_url_to_key = _read_ct_log_list(log_list_file.read())
certs_chain = [c for c in cert.certs_from_pem_file(FLAGS.chain)]
logging.info("Chain is of length %d", len(certs_chain))
sct_list = _submit_to_all_logs(log_url_to_key, certs_chain)
with open(FLAGS.output, 'wb') as sct_list_file:
sct_list_file.write(sct_list)
if __name__ == "__main__":
sys.argv = FLAGS(sys.argv)
run()
| apache-2.0 |
kordano/samba-ldb-mdb | lib/dnspython/dns/rdtypes/IN/SRV.py | 100 | 3395 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.name
class SRV(dns.rdata.Rdata):
"""SRV record
@ivar priority: the priority
@type priority: int
@ivar weight: the weight
@type weight: int
@ivar port: the port of the service
@type port: int
@ivar target: the target host
@type target: dns.name.Name object
@see: RFC 2782"""
__slots__ = ['priority', 'weight', 'port', 'target']
def __init__(self, rdclass, rdtype, priority, weight, port, target):
super(SRV, self).__init__(rdclass, rdtype)
self.priority = priority
self.weight = weight
self.port = port
self.target = target
def to_text(self, origin=None, relativize=True, **kw):
target = self.target.choose_relativity(origin, relativize)
return '%d %d %d %s' % (self.priority, self.weight, self.port,
target)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
priority = tok.get_uint16()
weight = tok.get_uint16()
port = tok.get_uint16()
target = tok.get_name(None)
target = target.choose_relativity(origin, relativize)
tok.get_eol()
return cls(rdclass, rdtype, priority, weight, port, target)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
three_ints = struct.pack("!HHH", self.priority, self.weight, self.port)
file.write(three_ints)
self.target.to_wire(file, compress, origin)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
(priority, weight, port) = struct.unpack('!HHH',
wire[current : current + 6])
current += 6
rdlen -= 6
(target, cused) = dns.name.from_wire(wire[: current + rdlen],
current)
if cused != rdlen:
raise dns.exception.FormError
if not origin is None:
target = target.relativize(origin)
return cls(rdclass, rdtype, priority, weight, port, target)
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
self.target = self.target.choose_relativity(origin, relativize)
def _cmp(self, other):
sp = struct.pack("!HHH", self.priority, self.weight, self.port)
op = struct.pack("!HHH", other.priority, other.weight, other.port)
v = cmp(sp, op)
if v == 0:
v = cmp(self.target, other.target)
return v
| gpl-3.0 |
googleapis/googleapis-gen | google/cloud/iot/v1/iot-v1-py/google/cloud/iot_v1/types/__init__.py | 1 | 3122 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .device_manager import (
BindDeviceToGatewayRequest,
BindDeviceToGatewayResponse,
CreateDeviceRegistryRequest,
CreateDeviceRequest,
DeleteDeviceRegistryRequest,
DeleteDeviceRequest,
GatewayListOptions,
GetDeviceRegistryRequest,
GetDeviceRequest,
ListDeviceConfigVersionsRequest,
ListDeviceConfigVersionsResponse,
ListDeviceRegistriesRequest,
ListDeviceRegistriesResponse,
ListDevicesRequest,
ListDevicesResponse,
ListDeviceStatesRequest,
ListDeviceStatesResponse,
ModifyCloudToDeviceConfigRequest,
SendCommandToDeviceRequest,
SendCommandToDeviceResponse,
UnbindDeviceFromGatewayRequest,
UnbindDeviceFromGatewayResponse,
UpdateDeviceRegistryRequest,
UpdateDeviceRequest,
)
from .resources import (
Device,
DeviceConfig,
DeviceCredential,
DeviceRegistry,
DeviceState,
EventNotificationConfig,
GatewayConfig,
HttpConfig,
MqttConfig,
PublicKeyCertificate,
PublicKeyCredential,
RegistryCredential,
StateNotificationConfig,
X509CertificateDetails,
GatewayAuthMethod,
GatewayType,
HttpState,
LogLevel,
MqttState,
PublicKeyCertificateFormat,
PublicKeyFormat,
)
__all__ = (
'BindDeviceToGatewayRequest',
'BindDeviceToGatewayResponse',
'CreateDeviceRegistryRequest',
'CreateDeviceRequest',
'DeleteDeviceRegistryRequest',
'DeleteDeviceRequest',
'GatewayListOptions',
'GetDeviceRegistryRequest',
'GetDeviceRequest',
'ListDeviceConfigVersionsRequest',
'ListDeviceConfigVersionsResponse',
'ListDeviceRegistriesRequest',
'ListDeviceRegistriesResponse',
'ListDevicesRequest',
'ListDevicesResponse',
'ListDeviceStatesRequest',
'ListDeviceStatesResponse',
'ModifyCloudToDeviceConfigRequest',
'SendCommandToDeviceRequest',
'SendCommandToDeviceResponse',
'UnbindDeviceFromGatewayRequest',
'UnbindDeviceFromGatewayResponse',
'UpdateDeviceRegistryRequest',
'UpdateDeviceRequest',
'Device',
'DeviceConfig',
'DeviceCredential',
'DeviceRegistry',
'DeviceState',
'EventNotificationConfig',
'GatewayConfig',
'HttpConfig',
'MqttConfig',
'PublicKeyCertificate',
'PublicKeyCredential',
'RegistryCredential',
'StateNotificationConfig',
'X509CertificateDetails',
'GatewayAuthMethod',
'GatewayType',
'HttpState',
'LogLevel',
'MqttState',
'PublicKeyCertificateFormat',
'PublicKeyFormat',
)
| apache-2.0 |
BRiAl/BRiAl | sage-brial/brial/simplebb.py | 1 | 2128 | from .PyPolyBoRi import *
from .interred import interred
def buchberger(l):
"calculates a (non minimal) Groebner basis"
l = interred(l)
#for making sure, that every polynomial has a different leading term
#needed for add_generator
if not l:
return []
g = GroebnerStrategy(l[0].ring())
for p in l:
g.add_generator(p)
while g.npairs() > 0:
g.clean_top_by_chain_criterion()
p = g.next_spoly()
p = g.nf(p)
if not p.is_zero():
g.add_generator(p)
return list(g)
def less_than_n_solutions(ideal, n):
l = interred(ideal)
if not l:
return False
g = GroebnerStrategy(l[0].ring())
all_monomials = Monomial([Variable(i) for i
in range(number_of_variables())]).divisors()
monomials_not_in_leading_ideal = all_monomials
for p in l:
g.add_generator(p)
while g.npairs() > 0:
monomials_not_in_leading_ideal = monomials_not_in_leading_ideal \
% g.reduction_strategy.minimal_leading_terms
if len(monomials_not_in_leading_ideal) < n:
return True
g.clean_top_by_chain_criterion()
p = g.next_spoly()
p = g.nf(p)
if not p.is_zero():
g.add_generator(p)
monomials_not_in_leading_ideal = monomials_not_in_leading_ideal \
% g.reduction_strategy.minimal_leading_terms
if len(monomials_not_in_leading_ideal) < n:
return True
else:
return False
def gauss(matrix):
"""Toy Gaussian elimination.
Example: gauss([[0,1],[1,1]]) """
from .gbcore import groebner_basis
def get_num(idx, vars):
if idx in [var.index() for var in vars.variables()]:
return 1
return 0
nrows = len(matrix)
ncols = len(matrix[0])
eqs = [sum([matrix[row][col] * Variable(col) for col in range(ncols)])
for row in range(nrows)]
result = groebner_basis(eqs)
result = result + [BooleConstant(0)] * (nrows - len(result))
return [[get_num(idx, elt.set().vars()) for idx in range(ncols)]
for elt in result]
return result
| gpl-2.0 |
LumPenPacK/NetworkExtractionFromImages | win_build/nefi2_win_amd64_msvc_2015/site-packages/networkx/linalg/tests/test_graphmatrix.py | 40 | 4292 | from nose import SkipTest
import networkx as nx
from networkx.generators.degree_seq import havel_hakimi_graph
class TestGraphMatrix(object):
numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
@classmethod
def setupClass(cls):
global numpy
global assert_equal
global assert_almost_equal
try:
import numpy
import scipy
from numpy.testing import assert_equal,assert_almost_equal
except ImportError:
raise SkipTest('SciPy not available.')
def setUp(self):
deg=[3,2,2,1,0]
self.G=havel_hakimi_graph(deg)
self.OI=numpy.array([[-1, -1, -1, 0],
[1, 0, 0, -1],
[0, 1, 0, 1],
[0, 0, 1, 0],
[0, 0, 0, 0]])
self.A=numpy.array([[0, 1, 1, 1, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
self.WG=nx.Graph( (u,v,{'weight':0.5,'other':0.3})
for (u,v) in self.G.edges_iter() )
self.WG.add_node(4)
self.WA=numpy.array([[0 , 0.5, 0.5, 0.5, 0],
[0.5, 0 , 0.5, 0 , 0],
[0.5, 0.5, 0 , 0 , 0],
[0.5, 0 , 0 , 0 , 0],
[0 , 0 , 0 , 0 , 0]])
self.MG=nx.MultiGraph(self.G)
self.MG2=self.MG.copy()
self.MG2.add_edge(0,1)
self.MG2A=numpy.array([[0, 2, 1, 1, 0],
[2, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
self.MGOI=numpy.array([[-1, -1, -1, -1, 0],
[1, 1, 0, 0, -1],
[0, 0, 1, 0, 1],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0]])
def test_incidence_matrix(self):
"Conversion to incidence matrix"
assert_equal(nx.incidence_matrix(self.G,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.G).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.MG,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.MG).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.MG2,oriented=True).todense(),self.MGOI)
assert_equal(nx.incidence_matrix(self.MG2).todense(),numpy.abs(self.MGOI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.WG).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True,
weight='weight').todense(),0.5*self.OI)
assert_equal(nx.incidence_matrix(self.WG,weight='weight').todense(),
numpy.abs(0.5*self.OI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True,weight='other').todense(),
0.3*self.OI)
WMG=nx.MultiGraph(self.WG)
WMG.add_edge(0,1,attr_dict={'weight':0.5,'other':0.3})
assert_equal(nx.incidence_matrix(WMG,weight='weight').todense(),
numpy.abs(0.5*self.MGOI))
assert_equal(nx.incidence_matrix(WMG,weight='weight',oriented=True).todense(),
0.5*self.MGOI)
assert_equal(nx.incidence_matrix(WMG,weight='other',oriented=True).todense(),
0.3*self.MGOI)
def test_adjacency_matrix(self):
"Conversion to adjacency matrix"
assert_equal(nx.adj_matrix(self.G).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG2).todense(),self.MG2A)
assert_equal(nx.adj_matrix(self.G,nodelist=[0,1]).todense(),self.A[:2,:2])
assert_equal(nx.adj_matrix(self.WG).todense(),self.WA)
assert_equal(nx.adj_matrix(self.WG,weight=None).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG2,weight=None).todense(),self.MG2A)
assert_equal(nx.adj_matrix(self.WG,weight='other').todense(),0.6*self.WA)
| bsd-2-clause |
eoneil1942/voltdb-4.7fix | tools/lbd_lock_test/testrunner.py | 20 | 4334 | #!/usr/bin/env python
from subprocess import Popen, PIPE
import os, sys, datetime, fcntl, time
DURATION_IN_SECONDS = 240
def cmd_readlines(cmd):
"Run a shell command and get the output as a list of lines"
fd = os.popen(cmd)
retval = fd.readlines()
fd.close()
return retval
def killProcess(p):
"Kill all processes for this user named 'LBDLockPatternTest'"
# get all the java processes for this user
javaprocs = cmd_readlines("jps")
# split them into (pid, name) tuples
javaprocs = [line.split() for line in javaprocs]
# throw out any with no name
javaprocs = [t for t in javaprocs if len(t) > 1]
# find pids for processes with the right name
javaprocs = [int(t[0]) for t in javaprocs if t[1].startswith("LBDLockPatternTest")]
# kill all the running procs with the right name explicitly
# (only for this user usally)
for pid in javaprocs:
killcmd = "kill -9 " + str(pid)
os.system(killcmd)
# this seems to do nothing at all on many platforms :(
p.wait()
def blockUntilInput(f):
"Assuming f is non blocking, block until you can read a line from it"
while True:
try: f.readline(); return
except: time.sleep(0.1)
# make stdin non-blocking
fd = sys.stdin.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
# compile the java code we need (assuming it's in the same folder)
print "Compiling Java Reprodcuer..."
output = os.system("javac LBDLockPatternTest.java")
if output == 0:
print "Success"
else:
print "Failed to compile reproducer."
print "Check the output of \"javac LBDLockPatternTest.java\" from your shell."
sys.exit(-1)
def runTest(i):
"""Start a subprocess that runs the java reproducer. If it hangs, let the user know and
leave the subprocess process running until the user presses a key. If it runs for
DURATION_IN_SECONDS seconds without hanging, kill the subprocess and repeat."""
print "\nBeginning run %d for %d seconds. Press ENTER or RETURN to end the test.\n" % (i, DURATION_IN_SECONDS)
p = Popen("java LBDLockPatternTest", shell=True, bufsize=0, stdout=PIPE)
# make the process's output non-blocking
fd = p.stdout.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
# get the current time and init some variables
start = datetime.datetime.utcnow()
prevnow = start # the last time a progress time was printed
lastdotprinted = start # the last time a dot was successfully read
# true if there was a break in the dots
possiblyFailed = False
# while the java process isn't dead
while p.poll() == None:
now = datetime.datetime.utcnow()
# print a progress time out every 10 seconds
if possiblyFailed == False:
if (now - prevnow).seconds == 10:
prevnow = now
sys.stdout.write(" %d seconds " % ((now - start).seconds))
# if no dots read in 10 seconds, then we assume the java proc has hung
if (now - lastdotprinted).seconds > 20:
print("\nSorry, this platfrom may have reproduced the issue. If you do not see more dots, it's sadness time.")
possiblyFailed = True
# if all's gone well for DURATION_IN_SECONDS, we kill the proc and return true
if (now - start).seconds > DURATION_IN_SECONDS:
print("\nThis run (%d) did not reproduce the issue." % (i))
killProcess(p)
return True
# do a non-blocking input read to see if the user wants to stop
try:
sys.stdin.readline()
print("\nThis run (%d) interrupted by user." % (i))
killProcess(p)
sys.exit(-1)
except:
pass
# do a non-blocking java-output read to see if a dot has been printed
try:
c = p.stdout.read(1)
sys.stdout.write(c)
lastdotprinted = now
possiblyFailed = False
except:
time.sleep(0.1)
# before the function exits, make sure the process is gone
p.wait()
# repeat until failure or the user presses ENTER or RETURN
i = 1
while runTest(i):
i += 1
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.