repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
mcr/ietfdb | ietf/doc/feeds.py | 1 | 2790 | # Copyright The IETF Trust 2007, All Rights Reserved
import datetime, re
from django.conf import settings
from django.contrib.syndication.feeds import Feed, FeedDoesNotExist
from django.utils.feedgenerator import Atom1Feed
from django.core.urlresolvers import reverse as urlreverse
from django.template.defaultfilters import truncatewords_html, date as datefilter, linebreaks
from django.utils.html import strip_tags
from django.utils.text import truncate_words
from ietf.doc.models import *
from ietf.doc.utils import augment_events_with_revision
from ietf.idtracker.templatetags.ietf_filters import format_textarea
class DocumentChanges(Feed):
feed_type = Atom1Feed
def get_object(self, bits):
if len(bits) != 1:
raise Document.DoesNotExist
return Document.objects.get(docalias__name=bits[0])
def title(self, obj):
return "Changes for %s" % obj.display_name()
def link(self, obj):
if obj is None:
raise FeedDoesNotExist
if not hasattr(self, "cached_link"):
self.cached_link = urlreverse("doc_history", kwargs=dict(name=obj.canonical_name()))
return self.cached_link
def subtitle(self, obj):
return "History of change entries for %s." % obj.display_name()
def items(self, obj):
events = obj.docevent_set.all().order_by("-time","-id")
augment_events_with_revision(obj, events)
return events
def item_title(self, item):
return u"[%s] %s [rev. %s]" % (item.by, truncate_words(strip_tags(item.desc), 15), item.rev)
def item_description(self, item):
return truncatewords_html(format_textarea(item.desc), 20)
def item_pubdate(self, item):
return item.time
def item_author_name(self, item):
return unicode(item.by)
def item_link(self, item):
return self.cached_link + "#history-%s" % item.pk
class InLastCall(Feed):
title = "Documents in Last Call"
subtitle = "Announcements for documents in last call."
feed_type = Atom1Feed
author_name = 'IESG Secretary'
link = "/doc/iesg/last-call/"
def items(self):
docs = list(Document.objects.filter(type="draft", states=State.objects.get(type="draft-iesg", slug="lc")))
for d in docs:
d.lc_event = d.latest_event(LastCallDocEvent, type="sent_last_call")
docs = [d for d in docs if d.lc_event]
docs.sort(key=lambda d: d.lc_event.expires)
return docs
def item_title(self, item):
return u"%s (%s - %s)" % (item.name,
datefilter(item.lc_event.time, "F j"),
datefilter(item.lc_event.expires, "F j, Y"))
def item_description(self, item):
return linebreaks(item.lc_event.desc)
def item_pubdate(self, item):
return item.lc_event.time
| bsd-3-clause |
allenlavoie/tensorflow | tensorflow/contrib/learn/python/learn/estimators/kmeans.py | 15 | 11087 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of k-means clustering on top of `Estimator` API (deprecated).
This module is deprecated. Please use
@{tf.contrib.factorization.KMeansClustering} instead of
@{tf.contrib.learn.KMeansClustering}. It has a similar interface, but uses the
@{tf.estimator.Estimator} API instead of @{tf.contrib.learn.Estimator}.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.contrib.factorization.python.ops import clustering_ops
from tensorflow.python.training import training_util
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators.model_fn import ModelFnOps
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops.control_flow_ops import with_dependencies
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import session_run_hook
from tensorflow.python.training.session_run_hook import SessionRunArgs
from tensorflow.python.util.deprecation import deprecated
_USE_TF_CONTRIB_FACTORIZATION = (
'Please use tf.contrib.factorization.KMeansClustering instead of'
' tf.contrib.learn.KMeansClustering. It has a similar interface, but uses'
' the tf.estimator.Estimator API instead of tf.contrib.learn.Estimator.')
class _LossRelativeChangeHook(session_run_hook.SessionRunHook):
"""Stops when the change in loss goes below a tolerance."""
def __init__(self, tolerance):
"""Initializes _LossRelativeChangeHook.
Args:
tolerance: A relative tolerance of change between iterations.
"""
self._tolerance = tolerance
self._prev_loss = None
def begin(self):
self._loss_tensor = ops.get_default_graph().get_tensor_by_name(
KMeansClustering.LOSS_OP_NAME + ':0')
assert self._loss_tensor is not None
def before_run(self, run_context):
del run_context
return SessionRunArgs(
fetches={KMeansClustering.LOSS_OP_NAME: self._loss_tensor})
def after_run(self, run_context, run_values):
loss = run_values.results[KMeansClustering.LOSS_OP_NAME]
assert loss is not None
if self._prev_loss is not None:
relative_change = (abs(loss - self._prev_loss) /
(1 + abs(self._prev_loss)))
if relative_change < self._tolerance:
run_context.request_stop()
self._prev_loss = loss
class _InitializeClustersHook(session_run_hook.SessionRunHook):
"""Initializes clusters or waits for cluster initialization."""
def __init__(self, init_op, is_initialized_op, is_chief):
self._init_op = init_op
self._is_chief = is_chief
self._is_initialized_op = is_initialized_op
def after_create_session(self, session, _):
assert self._init_op.graph == ops.get_default_graph()
assert self._is_initialized_op.graph == self._init_op.graph
while True:
try:
if session.run(self._is_initialized_op):
break
elif self._is_chief:
session.run(self._init_op)
else:
time.sleep(1)
except RuntimeError as e:
logging.info(e)
def _parse_tensor_or_dict(features):
"""Helper function to parse features."""
if isinstance(features, dict):
keys = sorted(features.keys())
with ops.colocate_with(features[keys[0]]):
features = array_ops.concat([features[k] for k in keys], 1)
return features
def _kmeans_clustering_model_fn(features, labels, mode, params, config):
"""Model function for KMeansClustering estimator."""
assert labels is None, labels
(all_scores, model_predictions, losses,
is_initialized, init_op, training_op) = clustering_ops.KMeans(
_parse_tensor_or_dict(features),
params.get('num_clusters'),
initial_clusters=params.get('training_initial_clusters'),
distance_metric=params.get('distance_metric'),
use_mini_batch=params.get('use_mini_batch'),
mini_batch_steps_per_iteration=params.get(
'mini_batch_steps_per_iteration'),
random_seed=params.get('random_seed'),
kmeans_plus_plus_num_retries=params.get(
'kmeans_plus_plus_num_retries')).training_graph()
incr_step = state_ops.assign_add(training_util.get_global_step(), 1)
loss = math_ops.reduce_sum(losses, name=KMeansClustering.LOSS_OP_NAME)
summary.scalar('loss/raw', loss)
training_op = with_dependencies([training_op, incr_step], loss)
predictions = {
KMeansClustering.ALL_SCORES: all_scores[0],
KMeansClustering.CLUSTER_IDX: model_predictions[0],
}
eval_metric_ops = {KMeansClustering.SCORES: loss}
training_hooks = [_InitializeClustersHook(
init_op, is_initialized, config.is_chief)]
relative_tolerance = params.get('relative_tolerance')
if relative_tolerance is not None:
training_hooks.append(_LossRelativeChangeHook(relative_tolerance))
return ModelFnOps(
mode=mode,
predictions=predictions,
eval_metric_ops=eval_metric_ops,
loss=loss,
train_op=training_op,
training_hooks=training_hooks)
# TODO(agarwal,ands): support sharded input.
class KMeansClustering(estimator.Estimator):
"""An Estimator for K-Means clustering.
THIS CLASS IS DEPRECATED. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for general migration instructions.
"""
SQUARED_EUCLIDEAN_DISTANCE = clustering_ops.SQUARED_EUCLIDEAN_DISTANCE
COSINE_DISTANCE = clustering_ops.COSINE_DISTANCE
RANDOM_INIT = clustering_ops.RANDOM_INIT
KMEANS_PLUS_PLUS_INIT = clustering_ops.KMEANS_PLUS_PLUS_INIT
SCORES = 'scores'
CLUSTER_IDX = 'cluster_idx'
CLUSTERS = 'clusters'
ALL_SCORES = 'all_scores'
LOSS_OP_NAME = 'kmeans_loss'
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def __init__(self,
num_clusters,
model_dir=None,
initial_clusters=RANDOM_INIT,
distance_metric=SQUARED_EUCLIDEAN_DISTANCE,
random_seed=0,
use_mini_batch=True,
mini_batch_steps_per_iteration=1,
kmeans_plus_plus_num_retries=2,
relative_tolerance=None,
config=None):
"""Creates a model for running KMeans training and inference.
Args:
num_clusters: number of clusters to train.
model_dir: the directory to save the model results and log files.
initial_clusters: specifies how to initialize the clusters for training.
See clustering_ops.kmeans for the possible values.
distance_metric: the distance metric used for clustering.
See clustering_ops.kmeans for the possible values.
random_seed: Python integer. Seed for PRNG used to initialize centers.
use_mini_batch: If true, use the mini-batch k-means algorithm. Else assume
full batch.
mini_batch_steps_per_iteration: number of steps after which the updated
cluster centers are synced back to a master copy. See clustering_ops.py
for more details.
kmeans_plus_plus_num_retries: For each point that is sampled during
kmeans++ initialization, this parameter specifies the number of
additional points to draw from the current distribution before selecting
the best. If a negative value is specified, a heuristic is used to
sample O(log(num_to_sample)) additional points.
relative_tolerance: A relative tolerance of change in the loss between
iterations. Stops learning if the loss changes less than this amount.
Note that this may not work correctly if use_mini_batch=True.
config: See Estimator
"""
params = {}
params['num_clusters'] = num_clusters
params['training_initial_clusters'] = initial_clusters
params['distance_metric'] = distance_metric
params['random_seed'] = random_seed
params['use_mini_batch'] = use_mini_batch
params['mini_batch_steps_per_iteration'] = mini_batch_steps_per_iteration
params['kmeans_plus_plus_num_retries'] = kmeans_plus_plus_num_retries
params['relative_tolerance'] = relative_tolerance
super(KMeansClustering, self).__init__(
model_fn=_kmeans_clustering_model_fn,
params=params,
model_dir=model_dir,
config=config)
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def predict_cluster_idx(self, input_fn=None):
"""Yields predicted cluster indices."""
key = KMeansClustering.CLUSTER_IDX
results = super(KMeansClustering, self).predict(
input_fn=input_fn, outputs=[key])
for result in results:
yield result[key]
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def score(self, input_fn=None, steps=None):
"""Predict total sum of distances to nearest clusters.
Note that this function is different from the corresponding one in sklearn
which returns the negative of the sum of distances.
Args:
input_fn: see predict.
steps: see predict.
Returns:
Total sum of distances to nearest clusters.
"""
return np.sum(
self.evaluate(
input_fn=input_fn, steps=steps)[KMeansClustering.SCORES])
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def transform(self, input_fn=None, as_iterable=False):
"""Transforms each element to distances to cluster centers.
Note that this function is different from the corresponding one in sklearn.
For SQUARED_EUCLIDEAN distance metric, sklearn transform returns the
EUCLIDEAN distance, while this function returns the SQUARED_EUCLIDEAN
distance.
Args:
input_fn: see predict.
as_iterable: see predict
Returns:
Array with same number of rows as x, and num_clusters columns, containing
distances to the cluster centers.
"""
key = KMeansClustering.ALL_SCORES
results = super(KMeansClustering, self).predict(
input_fn=input_fn,
outputs=[key],
as_iterable=as_iterable)
if not as_iterable:
return results[key]
else:
return results
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def clusters(self):
"""Returns cluster centers."""
return super(KMeansClustering, self).get_variable_value(self.CLUSTERS)
| apache-2.0 |
40223220/2015ca | static/Brython3.1.1-20150328-091302/Lib/http/cookies.py | 735 | 20810 | #!/usr/bin/env python3
#
####
# Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software
# and its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Timothy O'Malley not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#
####
#
# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
# by Timothy O'Malley <timo@alum.mit.edu>
#
# Cookie.py is a Python module for the handling of HTTP
# cookies as a Python dictionary. See RFC 2109 for more
# information on cookies.
#
# The original idea to treat Cookies as a dictionary came from
# Dave Mitchell (davem@magnet.com) in 1995, when he released the
# first version of nscookie.py.
#
####
r"""
Here's a sample session to show how to use this module.
At the moment, this is the only documentation.
The Basics
----------
Importing is easy...
>>> from http import cookies
Most of the time you start by creating a cookie.
>>> C = cookies.SimpleCookie()
Once you've created your Cookie, you can add values just as if it were
a dictionary.
>>> C = cookies.SimpleCookie()
>>> C["fig"] = "newton"
>>> C["sugar"] = "wafer"
>>> C.output()
'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer'
Notice that the printable representation of a Cookie is the
appropriate format for a Set-Cookie: header. This is the
default behavior. You can change the header and printed
attributes by using the .output() function
>>> C = cookies.SimpleCookie()
>>> C["rocky"] = "road"
>>> C["rocky"]["path"] = "/cookie"
>>> print(C.output(header="Cookie:"))
Cookie: rocky=road; Path=/cookie
>>> print(C.output(attrs=[], header="Cookie:"))
Cookie: rocky=road
The load() method of a Cookie extracts cookies from a string. In a
CGI script, you would use this method to extract the cookies from the
HTTP_COOKIE environment variable.
>>> C = cookies.SimpleCookie()
>>> C.load("chips=ahoy; vienna=finger")
>>> C.output()
'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger'
The load() method is darn-tootin smart about identifying cookies
within a string. Escaped quotation marks, nested semicolons, and other
such trickeries do not confuse it.
>>> C = cookies.SimpleCookie()
>>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
>>> print(C)
Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;"
Each element of the Cookie also supports all of the RFC 2109
Cookie attributes. Here's an example which sets the Path
attribute.
>>> C = cookies.SimpleCookie()
>>> C["oreo"] = "doublestuff"
>>> C["oreo"]["path"] = "/"
>>> print(C)
Set-Cookie: oreo=doublestuff; Path=/
Each dictionary element has a 'value' attribute, which gives you
back the value associated with the key.
>>> C = cookies.SimpleCookie()
>>> C["twix"] = "none for you"
>>> C["twix"].value
'none for you'
The SimpleCookie expects that all values should be standard strings.
Just to be sure, SimpleCookie invokes the str() builtin to convert
the value to a string, when the values are set dictionary-style.
>>> C = cookies.SimpleCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
'7'
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number=7\r\nSet-Cookie: string=seven'
Finis.
"""
#
# Import our required modules
#
import re
import string
__all__ = ["CookieError", "BaseCookie", "SimpleCookie"]
_nulljoin = ''.join
_semispacejoin = '; '.join
_spacejoin = ' '.join
#
# Define an exception visible to External modules
#
class CookieError(Exception):
pass
# These quoting routines conform to the RFC2109 specification, which in
# turn references the character definitions from RFC2068. They provide
# a two-way quoting algorithm. Any non-text character is translated
# into a 4 character sequence: a forward-slash followed by the
# three-digit octal equivalent of the character. Any '\' or '"' is
# quoted with a preceeding '\' slash.
#
# These are taken from RFC2068 and RFC2109.
# _LegalChars is the list of chars which don't require "'s
# _Translator hash-table for fast quoting
#
_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:"
_Translator = {
'\000' : '\\000', '\001' : '\\001', '\002' : '\\002',
'\003' : '\\003', '\004' : '\\004', '\005' : '\\005',
'\006' : '\\006', '\007' : '\\007', '\010' : '\\010',
'\011' : '\\011', '\012' : '\\012', '\013' : '\\013',
'\014' : '\\014', '\015' : '\\015', '\016' : '\\016',
'\017' : '\\017', '\020' : '\\020', '\021' : '\\021',
'\022' : '\\022', '\023' : '\\023', '\024' : '\\024',
'\025' : '\\025', '\026' : '\\026', '\027' : '\\027',
'\030' : '\\030', '\031' : '\\031', '\032' : '\\032',
'\033' : '\\033', '\034' : '\\034', '\035' : '\\035',
'\036' : '\\036', '\037' : '\\037',
# Because of the way browsers really handle cookies (as opposed
# to what the RFC says) we also encode , and ;
',' : '\\054', ';' : '\\073',
'"' : '\\"', '\\' : '\\\\',
'\177' : '\\177', '\200' : '\\200', '\201' : '\\201',
'\202' : '\\202', '\203' : '\\203', '\204' : '\\204',
'\205' : '\\205', '\206' : '\\206', '\207' : '\\207',
'\210' : '\\210', '\211' : '\\211', '\212' : '\\212',
'\213' : '\\213', '\214' : '\\214', '\215' : '\\215',
'\216' : '\\216', '\217' : '\\217', '\220' : '\\220',
'\221' : '\\221', '\222' : '\\222', '\223' : '\\223',
'\224' : '\\224', '\225' : '\\225', '\226' : '\\226',
'\227' : '\\227', '\230' : '\\230', '\231' : '\\231',
'\232' : '\\232', '\233' : '\\233', '\234' : '\\234',
'\235' : '\\235', '\236' : '\\236', '\237' : '\\237',
'\240' : '\\240', '\241' : '\\241', '\242' : '\\242',
'\243' : '\\243', '\244' : '\\244', '\245' : '\\245',
'\246' : '\\246', '\247' : '\\247', '\250' : '\\250',
'\251' : '\\251', '\252' : '\\252', '\253' : '\\253',
'\254' : '\\254', '\255' : '\\255', '\256' : '\\256',
'\257' : '\\257', '\260' : '\\260', '\261' : '\\261',
'\262' : '\\262', '\263' : '\\263', '\264' : '\\264',
'\265' : '\\265', '\266' : '\\266', '\267' : '\\267',
'\270' : '\\270', '\271' : '\\271', '\272' : '\\272',
'\273' : '\\273', '\274' : '\\274', '\275' : '\\275',
'\276' : '\\276', '\277' : '\\277', '\300' : '\\300',
'\301' : '\\301', '\302' : '\\302', '\303' : '\\303',
'\304' : '\\304', '\305' : '\\305', '\306' : '\\306',
'\307' : '\\307', '\310' : '\\310', '\311' : '\\311',
'\312' : '\\312', '\313' : '\\313', '\314' : '\\314',
'\315' : '\\315', '\316' : '\\316', '\317' : '\\317',
'\320' : '\\320', '\321' : '\\321', '\322' : '\\322',
'\323' : '\\323', '\324' : '\\324', '\325' : '\\325',
'\326' : '\\326', '\327' : '\\327', '\330' : '\\330',
'\331' : '\\331', '\332' : '\\332', '\333' : '\\333',
'\334' : '\\334', '\335' : '\\335', '\336' : '\\336',
'\337' : '\\337', '\340' : '\\340', '\341' : '\\341',
'\342' : '\\342', '\343' : '\\343', '\344' : '\\344',
'\345' : '\\345', '\346' : '\\346', '\347' : '\\347',
'\350' : '\\350', '\351' : '\\351', '\352' : '\\352',
'\353' : '\\353', '\354' : '\\354', '\355' : '\\355',
'\356' : '\\356', '\357' : '\\357', '\360' : '\\360',
'\361' : '\\361', '\362' : '\\362', '\363' : '\\363',
'\364' : '\\364', '\365' : '\\365', '\366' : '\\366',
'\367' : '\\367', '\370' : '\\370', '\371' : '\\371',
'\372' : '\\372', '\373' : '\\373', '\374' : '\\374',
'\375' : '\\375', '\376' : '\\376', '\377' : '\\377'
}
def _quote(str, LegalChars=_LegalChars):
r"""Quote a string for use in a cookie header.
If the string does not need to be double-quoted, then just return the
string. Otherwise, surround the string in doublequotes and quote
(with a \) special characters.
"""
if all(c in LegalChars for c in str):
return str
else:
return '"' + _nulljoin(_Translator.get(s, s) for s in str) + '"'
_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
_QuotePatt = re.compile(r"[\\].")
def _unquote(str):
# If there aren't any doublequotes,
# then there can't be any special characters. See RFC 2109.
if len(str) < 2:
return str
if str[0] != '"' or str[-1] != '"':
return str
# We have to assume that we must decode this string.
# Down to work.
# Remove the "s
str = str[1:-1]
# Check for special sequences. Examples:
# \012 --> \n
# \" --> "
#
i = 0
n = len(str)
res = []
while 0 <= i < n:
o_match = _OctalPatt.search(str, i)
q_match = _QuotePatt.search(str, i)
if not o_match and not q_match: # Neither matched
res.append(str[i:])
break
# else:
j = k = -1
if o_match:
j = o_match.start(0)
if q_match:
k = q_match.start(0)
if q_match and (not o_match or k < j): # QuotePatt matched
res.append(str[i:k])
res.append(str[k+1])
i = k + 2
else: # OctalPatt matched
res.append(str[i:j])
res.append(chr(int(str[j+1:j+4], 8)))
i = j + 4
return _nulljoin(res)
# The _getdate() routine is used to set the expiration time in the cookie's HTTP
# header. By default, _getdate() returns the current time in the appropriate
# "expires" format for a Set-Cookie header. The one optional argument is an
# offset from now, in seconds. For example, an offset of -3600 means "one hour
# ago". The offset may be a floating point number.
#
_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
_monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname):
from time import gmtime, time
now = time()
year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \
(weekdayname[wd], day, monthname[month], year, hh, mm, ss)
class Morsel(dict):
"""A class to hold ONE (key, value) pair.
In a cookie, each such pair may have several attributes, so this class is
used to keep the attributes associated with the appropriate key,value pair.
This class also includes a coded_value attribute, which is used to hold
the network representation of the value. This is most useful when Python
objects are pickled for network transit.
"""
# RFC 2109 lists these attributes as reserved:
# path comment domain
# max-age secure version
#
# For historical reasons, these attributes are also reserved:
# expires
#
# This is an extension from Microsoft:
# httponly
#
# This dictionary provides a mapping from the lowercase
# variant on the left to the appropriate traditional
# formatting on the right.
_reserved = {
"expires" : "expires",
"path" : "Path",
"comment" : "Comment",
"domain" : "Domain",
"max-age" : "Max-Age",
"secure" : "secure",
"httponly" : "httponly",
"version" : "Version",
}
_flags = {'secure', 'httponly'}
def __init__(self):
# Set defaults
self.key = self.value = self.coded_value = None
# Set default attributes
for key in self._reserved:
dict.__setitem__(self, key, "")
def __setitem__(self, K, V):
K = K.lower()
if not K in self._reserved:
raise CookieError("Invalid Attribute %s" % K)
dict.__setitem__(self, K, V)
def isReservedKey(self, K):
return K.lower() in self._reserved
def set(self, key, val, coded_val, LegalChars=_LegalChars):
# First we verify that the key isn't a reserved word
# Second we make sure it only contains legal characters
if key.lower() in self._reserved:
raise CookieError("Attempt to set a reserved key: %s" % key)
if any(c not in LegalChars for c in key):
raise CookieError("Illegal key value: %s" % key)
# It's a good key, so save it.
self.key = key
self.value = val
self.coded_value = coded_val
def output(self, attrs=None, header="Set-Cookie:"):
return "%s %s" % (header, self.OutputString(attrs))
__str__ = output
def __repr__(self):
return '<%s: %s=%s>' % (self.__class__.__name__,
self.key, repr(self.value))
def js_output(self, attrs=None):
# Print javascript
return """
<script type="text/javascript">
<!-- begin hiding
document.cookie = \"%s\";
// end hiding -->
</script>
""" % (self.OutputString(attrs).replace('"', r'\"'))
def OutputString(self, attrs=None):
# Build up our result
#
result = []
append = result.append
# First, the key=value pair
append("%s=%s" % (self.key, self.coded_value))
# Now add any defined attributes
if attrs is None:
attrs = self._reserved
items = sorted(self.items())
for key, value in items:
if value == "":
continue
if key not in attrs:
continue
if key == "expires" and isinstance(value, int):
append("%s=%s" % (self._reserved[key], _getdate(value)))
elif key == "max-age" and isinstance(value, int):
append("%s=%d" % (self._reserved[key], value))
elif key == "secure":
append(str(self._reserved[key]))
elif key == "httponly":
append(str(self._reserved[key]))
else:
append("%s=%s" % (self._reserved[key], value))
# Return the result
return _semispacejoin(result)
#
# Pattern for finding cookie
#
# This used to be strict parsing based on the RFC2109 and RFC2068
# specifications. I have since discovered that MSIE 3.0x doesn't
# follow the character rules outlined in those specs. As a
# result, the parsing rules here are less strict.
#
_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
_CookiePattern = re.compile(r"""
(?x) # This is a verbose pattern
(?P<key> # Start of group 'key'
""" + _LegalCharsPatt + r"""+? # Any word of at least one letter
) # End of group 'key'
( # Optional group: there may not be a value.
\s*=\s* # Equal Sign
(?P<val> # Start of group 'val'
"(?:[^\\"]|\\.)*" # Any doublequoted string
| # or
\w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
| # or
""" + _LegalCharsPatt + r"""* # Any word or empty string
) # End of group 'val'
)? # End of optional value group
\s* # Any number of spaces.
(\s+|;|$) # Ending either at space, semicolon, or EOS.
""", re.ASCII) # May be removed if safe.
# At long last, here is the cookie class. Using this class is almost just like
# using a dictionary. See this module's docstring for example usage.
#
class BaseCookie(dict):
"""A container class for a set of Morsels."""
def value_decode(self, val):
"""real_value, coded_value = value_decode(STRING)
Called prior to setting a cookie's value from the network
representation. The VALUE is the value read from HTTP
header.
Override this function to modify the behavior of cookies.
"""
return val, val
def value_encode(self, val):
"""real_value, coded_value = value_encode(VALUE)
Called prior to setting a cookie's value from the dictionary
representation. The VALUE is the value being assigned.
Override this function to modify the behavior of cookies.
"""
strval = str(val)
return strval, strval
def __init__(self, input=None):
if input:
self.load(input)
def __set(self, key, real_value, coded_value):
"""Private method for setting a cookie's value"""
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
"""Dictionary style assignment."""
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
"""Return a string suitable for HTTP."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.output(attrs, header))
return sep.join(result)
__str__ = output
def __repr__(self):
l = []
items = sorted(self.items())
for key, value in items:
l.append('%s=%s' % (key, repr(value.value)))
return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l))
def js_output(self, attrs=None):
"""Return a string suitable for JavaScript."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.js_output(attrs))
return _nulljoin(result)
def load(self, rawdata):
"""Load cookies from a string (presumably HTTP_COOKIE) or
from a dictionary. Loading cookies from a dictionary 'd'
is equivalent to calling:
map(Cookie.__setitem__, d.keys(), d.values())
"""
if isinstance(rawdata, str):
self.__parse_string(rawdata)
else:
# self.update() wouldn't call our custom __setitem__
for key, value in rawdata.items():
self[key] = value
return
def __parse_string(self, str, patt=_CookiePattern):
i = 0 # Our starting point
n = len(str) # Length of string
M = None # current morsel
while 0 <= i < n:
# Start looking for a cookie
match = patt.search(str, i)
if not match:
# No more cookies
break
key, value = match.group("key"), match.group("val")
i = match.end(0)
# Parse the key, value in case it's metainfo
if key[0] == "$":
# We ignore attributes which pertain to the cookie
# mechanism as a whole. See RFC 2109.
# (Does anyone care?)
if M:
M[key[1:]] = value
elif key.lower() in Morsel._reserved:
if M:
if value is None:
if key.lower() in Morsel._flags:
M[key] = True
else:
M[key] = _unquote(value)
elif value is not None:
rval, cval = self.value_decode(value)
self.__set(key, rval, cval)
M = self[key]
class SimpleCookie(BaseCookie):
"""
SimpleCookie supports strings as cookie values. When setting
the value using the dictionary assignment notation, SimpleCookie
calls the builtin str() to convert the value to a string. Values
received from HTTP are kept as strings.
"""
def value_decode(self, val):
return _unquote(val), val
def value_encode(self, val):
strval = str(val)
return strval, _quote(strval)
| gpl-3.0 |
xujun10110/Veil-Evasion | modules/payloads/native/hyperion.py | 9 | 2191 | """
Automates the running the the Hyperion crypter on an existing .exe
More information (Nullsecurity) - http://www.nullsecurity.net/papers/nullsec-bsides-slides.pdf
"""
import sys, time, subprocess
from modules.common import helpers
# the main config file
import settings
class Payload:
def __init__(self):
# required options
self.description = "Automates the running of the Hyperion crypter on an existing .exe"
self.language = "native"
self.rating = "Normal"
self.extension = "exe"
# options we require user interaction for- format is {OPTION : [Value, Description]]}
self.required_options = {
"ORIGINAL_EXE" : ["", "The executable to run Hyperion on"]
}
def generate(self):
# randomize the output file so we don't overwrite anything
randName = helpers.randomString(5) + ".exe"
outputFile = settings.TEMP_DIR + randName
# the command to invoke hyperion. TODO: windows compatibility
hyperionCommand = "wine hyperion.exe " + self.required_options["ORIGINAL_EXE"][0] + " " + outputFile
print helpers.color("\n[*] Running Hyperion on " + self.required_options["ORIGINAL_EXE"][0] + "...")
# be sure to set 'cwd' to the proper directory for hyperion so it properly runs
p = subprocess.Popen(hyperionCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=settings.VEIL_EVASION_PATH+"tools/hyperion/", shell=True)
stdout, stderr = p.communicate()
try:
# read in the output .exe from /tmp/
f = open(outputFile, 'rb')
PayloadCode = f.read()
f.close()
except IOError:
print "\nError during Hyperion execution:\n" + helpers.color(stdout, warning=True)
raw_input("\n[>] Press any key to return to the main menu.")
return ""
# cleanup the temporary output file. TODO: windows compatibility
p = subprocess.Popen("rm " + outputFile, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
return PayloadCode
| gpl-3.0 |
Arakmar/Sick-Beard | lib/requests/utils.py | 49 | 16960 | # -*- coding: utf-8 -*-
"""
requests.utils
~~~~~~~~~~~~~~
This module provides utility functions that are used within Requests
that are also useful for external consumption.
"""
import cgi
import codecs
import os
import platform
import re
import sys
import zlib
from netrc import netrc, NetrcParseError
from . import __version__
from .compat import parse_http_list as _parse_list_header
from .compat import quote, urlparse, basestring, bytes, str, OrderedDict
from .cookies import RequestsCookieJar, cookiejar_from_dict
_hush_pyflakes = (RequestsCookieJar,)
CERTIFI_BUNDLE_PATH = None
try:
# see if requests's own CA certificate bundle is installed
from . import certs
CERTIFI_BUNDLE_PATH = certs.where()
except ImportError:
pass
NETRC_FILES = ('.netrc', '_netrc')
# common paths for the OS's CA certificate bundle
POSSIBLE_CA_BUNDLE_PATHS = [
# Red Hat, CentOS, Fedora and friends (provided by the ca-certificates package):
'/etc/pki/tls/certs/ca-bundle.crt',
# Ubuntu, Debian, and friends (provided by the ca-certificates package):
'/etc/ssl/certs/ca-certificates.crt',
# FreeBSD (provided by the ca_root_nss package):
'/usr/local/share/certs/ca-root-nss.crt',
# openSUSE (provided by the ca-certificates package), the 'certs' directory is the
# preferred way but may not be supported by the SSL module, thus it has 'ca-bundle.pem'
# as a fallback (which is generated from pem files in the 'certs' directory):
'/etc/ssl/ca-bundle.pem',
]
def get_os_ca_bundle_path():
"""Try to pick an available CA certificate bundle provided by the OS."""
for path in POSSIBLE_CA_BUNDLE_PATHS:
if os.path.exists(path):
return path
return None
# if certifi is installed, use its CA bundle;
# otherwise, try and use the OS bundle
DEFAULT_CA_BUNDLE_PATH = CERTIFI_BUNDLE_PATH or get_os_ca_bundle_path()
def dict_to_sequence(d):
"""Returns an internal sequence dictionary update."""
if hasattr(d, 'items'):
d = d.items()
return d
def get_netrc_auth(url):
"""Returns the Requests tuple auth for a given url from netrc."""
try:
locations = (os.path.expanduser('~/{0}'.format(f)) for f in NETRC_FILES)
netrc_path = None
for loc in locations:
if os.path.exists(loc) and not netrc_path:
netrc_path = loc
# Abort early if there isn't one.
if netrc_path is None:
return netrc_path
ri = urlparse(url)
# Strip port numbers from netloc
host = ri.netloc.split(':')[0]
try:
_netrc = netrc(netrc_path).authenticators(host)
if _netrc:
# Return with login / password
login_i = (0 if _netrc[0] else 1)
return (_netrc[login_i], _netrc[2])
except (NetrcParseError, IOError):
# If there was a parsing error or a permissions issue reading the file,
# we'll just skip netrc auth
pass
# AppEngine hackiness.
except (ImportError, AttributeError):
pass
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
if name and name[0] != '<' and name[-1] != '>':
return name
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
OrderedDict, e.g.,
::
>>> from_key_val_list([('key', 'val')])
OrderedDict([('key', 'val')])
>>> from_key_val_list('string')
ValueError: need more than 1 value to unpack
>>> from_key_val_list({'key': 'val'})
OrderedDict([('key', 'val')])
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
return OrderedDict(value)
def to_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. If it can be, return a list of tuples, e.g.,
::
>>> to_key_val_list([('key', 'val')])
[('key', 'val')]
>>> to_key_val_list({'key': 'val'})
[('key', 'val')]
>>> to_key_val_list('string')
ValueError: cannot encode objects that are not 2-tuples.
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
if isinstance(value, dict):
value = value.items()
return list(value)
# From mitsuhiko/werkzeug (used with permission).
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
It basically works like :func:`parse_set_header` just that items
may appear multiple times and case sensitivity is preserved.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
To create a header from the :class:`list` again, use the
:func:`dump_header` function.
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in _parse_list_header(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
# From mitsuhiko/werkzeug (used with permission).
def parse_dict_header(value):
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
convert them into a python dict:
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
>>> type(d) is dict
True
>>> sorted(d.items())
[('bar', 'as well'), ('foo', 'is a fish')]
If there is no value for a key it will be `None`:
>>> parse_dict_header('key_without_value')
{'key_without_value': None}
To create a header from the :class:`dict` again, use the
:func:`dump_header` function.
:param value: a string with a dict header.
:return: :class:`dict`
"""
result = {}
for item in _parse_list_header(value):
if '=' not in item:
result[item] = None
continue
name, value = item.split('=', 1)
if value[:1] == value[-1:] == '"':
value = unquote_header_value(value[1:-1])
result[name] = value
return result
# From mitsuhiko/werkzeug (used with permission).
def unquote_header_value(value, is_filename=False):
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
# if this is a filename and the starting characters look like
# a UNC path, then just return the value without quotes. Using the
# replace sequence below on a UNC path has the effect of turning
# the leading double slash into a single slash and then
# _fix_ie_filename() doesn't work correctly. See #458.
if not is_filename or value[:2] != '\\\\':
return value.replace('\\\\', '\\').replace('\\"', '"')
return value
def header_expand(headers):
"""Returns an HTTP Header value string from a dictionary.
Example expansion::
{'text/x-dvi': {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}, 'text/x-c': {}}
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
(('text/x-dvi', {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}), ('text/x-c', {}))
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
"""
collector = []
if isinstance(headers, dict):
headers = list(headers.items())
elif isinstance(headers, basestring):
return headers
elif isinstance(headers, str):
# As discussed in https://github.com/kennethreitz/requests/issues/400
# latin-1 is the most conservative encoding used on the web. Anyone
# who needs more can encode to a byte-string before calling
return headers.encode("latin-1")
elif headers is None:
return headers
for i, (value, params) in enumerate(headers):
_params = []
for (p_k, p_v) in list(params.items()):
_params.append('%s=%s' % (p_k, p_v))
collector.append(value)
collector.append('; ')
if len(params):
collector.append('; '.join(_params))
if not len(headers) == i + 1:
collector.append(', ')
# Remove trailing separators.
if collector[-1] in (', ', '; '):
del collector[-1]
return ''.join(collector)
def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from.
"""
cookie_dict = {}
for cookie in cj:
cookie_dict[cookie.name] = cookie.value
return cookie_dict
def add_dict_to_cookiejar(cj, cookie_dict):
"""Returns a CookieJar from a key/value dictionary.
:param cj: CookieJar to insert cookies into.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
cj2 = cookiejar_from_dict(cookie_dict)
for cookie in cj2:
cj.set_cookie(cookie)
return cj
def get_encodings_from_content(content):
"""Returns encodings from given content string.
:param content: bytestring to extract encodings from.
"""
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
return charset_re.findall(content)
def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
:param headers: dictionary to extract encoding from.
"""
content_type = headers.get('content-type')
if not content_type:
return None
content_type, params = cgi.parse_header(content_type)
if 'charset' in params:
return params['charset'].strip("'\"")
if 'text' in content_type:
return 'ISO-8859-1'
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
if r.encoding is None:
for item in iterator:
yield item
return
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
yield rv
rv = decoder.decode('', final=True)
if rv:
yield rv
def iter_slices(string, slice_length):
"""Iterate over slices of a string."""
pos = 0
while pos < len(string):
yield string[pos:pos+slice_length]
pos += slice_length
def get_unicode_from_response(r):
"""Returns the requested content back in unicode.
:param r: Response object to get unicode content from.
Tried:
1. charset from content-type
2. every encodings from ``<meta ... charset=XXX>``
3. fall back and replace all unicode characters
"""
tried_encodings = []
# Try charset from content-type
encoding = get_encoding_from_headers(r.headers)
if encoding:
try:
return str(r.content, encoding)
except UnicodeError:
tried_encodings.append(encoding)
# Fall back:
try:
return str(r.content, encoding, errors='replace')
except TypeError:
return r.content
def stream_decompress(iterator, mode='gzip'):
"""
Stream decodes an iterator over compressed data
:param iterator: An iterator over compressed data
:param mode: 'gzip' or 'deflate'
:return: An iterator over decompressed data
"""
if mode not in ['gzip', 'deflate']:
raise ValueError('stream_decompress mode must be gzip or deflate')
zlib_mode = 16 + zlib.MAX_WBITS if mode == 'gzip' else -zlib.MAX_WBITS
dec = zlib.decompressobj(zlib_mode)
try:
for chunk in iterator:
rv = dec.decompress(chunk)
if rv:
yield rv
except zlib.error:
# If there was an error decompressing, just return the raw chunk
yield chunk
# Continue to return the rest of the raw data
for chunk in iterator:
yield chunk
else:
# Make sure everything has been returned from the decompression object
buf = dec.decompress(bytes())
rv = buf + dec.flush()
if rv:
yield rv
def stream_untransfer(gen, resp):
if 'gzip' in resp.headers.get('content-encoding', ''):
gen = stream_decompress(gen, mode='gzip')
elif 'deflate' in resp.headers.get('content-encoding', ''):
gen = stream_decompress(gen, mode='deflate')
return gen
# The unreserved URI characters (RFC 3986)
UNRESERVED_SET = frozenset(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ "0123456789-._~")
def unquote_unreserved(uri):
"""Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
"""
try:
parts = uri.split('%')
for i in range(1, len(parts)):
h = parts[i][0:2]
if len(h) == 2 and h.isalnum():
c = chr(int(h, 16))
if c in UNRESERVED_SET:
parts[i] = c + parts[i][2:]
else:
parts[i] = '%' + parts[i]
else:
parts[i] = '%' + parts[i]
return ''.join(parts)
except ValueError:
return uri
def requote_uri(uri):
"""Re-quote the given URI.
This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted.
"""
# Unquote only the unreserved characters
# Then quote only illegal characters (do not quote reserved, unreserved,
# or '%')
return quote(unquote_unreserved(uri), safe="!#$%&'()*+,/:;=?@[]~")
def get_environ_proxies():
"""Return a dict of environment proxies."""
proxy_keys = [
'all',
'http',
'https',
'ftp',
'socks',
'no'
]
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
proxies = [(key, get_proxy(key + '_proxy')) for key in proxy_keys]
return dict([(key, val) for (key, val) in proxies if val])
def default_user_agent():
"""Return a string representing the default user agent."""
_implementation = platform.python_implementation()
if _implementation == 'CPython':
_implementation_version = platform.python_version()
elif _implementation == 'PyPy':
_implementation_version = '%s.%s.%s' % (
sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro
)
if sys.pypy_version_info.releaselevel != 'final':
_implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel])
elif _implementation == 'Jython':
_implementation_version = platform.python_version() # Complete Guess
elif _implementation == 'IronPython':
_implementation_version = platform.python_version() # Complete Guess
else:
_implementation_version = 'Unknown'
return " ".join([
'python-requests/%s' % __version__,
'%s/%s' % (_implementation, _implementation_version),
'%s/%s' % (platform.system(), platform.release()),
])
def parse_header_links(value):
"""Return a dict of parsed link headers proxies.
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
"""
links = []
replace_chars = " '\""
for val in value.split(","):
try:
url, params = val.split(";", 1)
except ValueError:
url, params = val, ''
link = {}
link["url"] = url.strip("<> '\"")
for param in params.split(";"):
try:
key,value = param.split("=")
except ValueError:
break
link[key.strip(replace_chars)] = value.strip(replace_chars)
links.append(link)
return links
| gpl-3.0 |
dongweiming/tola | tests/test_pytest.py | 3 | 1357 | # coding=utf-8
import pytest
@pytest.fixture # 创建测试环境, 可以做setUp和tearDown的工作
def setup_math():
import math
return math
@pytest.fixture(scope='function')
def setup_function(request):
def teardown_function():
print("teardown_function called.")
request.addfinalizer(teardown_function) # 这个内嵌的函数去做收尾工作
print('setup_function called.')
# Py.test不需要Unittest的那种模板, 只要函数或者类以test开头即可
def test_func(setup_function):
print('Test_Func called.')
def test_setup_math(setup_math):
# Py.test不需要使用self.assertXXX这样的方法, 直接使用Python内置的断言语句即可assert
import time
time.sleep(4)
assert setup_math.pow(2, 3) == 8.0
class TestClass(object):
def test_in(self):
assert 'h' in 'hello'
def test_two(self, setup_math):
assert setup_math.ceil(10) == 10.0
def raise_exit():
raise SystemExit(1)
def test_mytest():
with pytest.raises(SystemExit): # 用来测试抛出来的异常
raise_exit()
@pytest.mark.parametrize('test_input,expected', [
('1+3', 4),
('2*4', 8),
('1 == 2', False),
]) # parametrize可以用装饰器的方式集成多组测试样例
def test_eval(test_input, expected):
assert eval(test_input) == expected
| apache-2.0 |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/encodings/cp862.py | 593 | 33626 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP862.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp862',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x05d0, # HEBREW LETTER ALEF
0x0081: 0x05d1, # HEBREW LETTER BET
0x0082: 0x05d2, # HEBREW LETTER GIMEL
0x0083: 0x05d3, # HEBREW LETTER DALET
0x0084: 0x05d4, # HEBREW LETTER HE
0x0085: 0x05d5, # HEBREW LETTER VAV
0x0086: 0x05d6, # HEBREW LETTER ZAYIN
0x0087: 0x05d7, # HEBREW LETTER HET
0x0088: 0x05d8, # HEBREW LETTER TET
0x0089: 0x05d9, # HEBREW LETTER YOD
0x008a: 0x05da, # HEBREW LETTER FINAL KAF
0x008b: 0x05db, # HEBREW LETTER KAF
0x008c: 0x05dc, # HEBREW LETTER LAMED
0x008d: 0x05dd, # HEBREW LETTER FINAL MEM
0x008e: 0x05de, # HEBREW LETTER MEM
0x008f: 0x05df, # HEBREW LETTER FINAL NUN
0x0090: 0x05e0, # HEBREW LETTER NUN
0x0091: 0x05e1, # HEBREW LETTER SAMEKH
0x0092: 0x05e2, # HEBREW LETTER AYIN
0x0093: 0x05e3, # HEBREW LETTER FINAL PE
0x0094: 0x05e4, # HEBREW LETTER PE
0x0095: 0x05e5, # HEBREW LETTER FINAL TSADI
0x0096: 0x05e6, # HEBREW LETTER TSADI
0x0097: 0x05e7, # HEBREW LETTER QOF
0x0098: 0x05e8, # HEBREW LETTER RESH
0x0099: 0x05e9, # HEBREW LETTER SHIN
0x009a: 0x05ea, # HEBREW LETTER TAV
0x009b: 0x00a2, # CENT SIGN
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00a5, # YEN SIGN
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x2310, # REVERSED NOT SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\u05d0' # 0x0080 -> HEBREW LETTER ALEF
u'\u05d1' # 0x0081 -> HEBREW LETTER BET
u'\u05d2' # 0x0082 -> HEBREW LETTER GIMEL
u'\u05d3' # 0x0083 -> HEBREW LETTER DALET
u'\u05d4' # 0x0084 -> HEBREW LETTER HE
u'\u05d5' # 0x0085 -> HEBREW LETTER VAV
u'\u05d6' # 0x0086 -> HEBREW LETTER ZAYIN
u'\u05d7' # 0x0087 -> HEBREW LETTER HET
u'\u05d8' # 0x0088 -> HEBREW LETTER TET
u'\u05d9' # 0x0089 -> HEBREW LETTER YOD
u'\u05da' # 0x008a -> HEBREW LETTER FINAL KAF
u'\u05db' # 0x008b -> HEBREW LETTER KAF
u'\u05dc' # 0x008c -> HEBREW LETTER LAMED
u'\u05dd' # 0x008d -> HEBREW LETTER FINAL MEM
u'\u05de' # 0x008e -> HEBREW LETTER MEM
u'\u05df' # 0x008f -> HEBREW LETTER FINAL NUN
u'\u05e0' # 0x0090 -> HEBREW LETTER NUN
u'\u05e1' # 0x0091 -> HEBREW LETTER SAMEKH
u'\u05e2' # 0x0092 -> HEBREW LETTER AYIN
u'\u05e3' # 0x0093 -> HEBREW LETTER FINAL PE
u'\u05e4' # 0x0094 -> HEBREW LETTER PE
u'\u05e5' # 0x0095 -> HEBREW LETTER FINAL TSADI
u'\u05e6' # 0x0096 -> HEBREW LETTER TSADI
u'\u05e7' # 0x0097 -> HEBREW LETTER QOF
u'\u05e8' # 0x0098 -> HEBREW LETTER RESH
u'\u05e9' # 0x0099 -> HEBREW LETTER SHIN
u'\u05ea' # 0x009a -> HEBREW LETTER TAV
u'\xa2' # 0x009b -> CENT SIGN
u'\xa3' # 0x009c -> POUND SIGN
u'\xa5' # 0x009d -> YEN SIGN
u'\u20a7' # 0x009e -> PESETA SIGN
u'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
u'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
u'\u2310' # 0x00a9 -> REVERSED NOT SIGN
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
u'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
u'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
u'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
u'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
u'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
u'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
u'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
u'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
u'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
u'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
u'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
u'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
u'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
u'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u258c' # 0x00dd -> LEFT HALF BLOCK
u'\u2590' # 0x00de -> RIGHT HALF BLOCK
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S (GERMAN)
u'\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA
u'\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI
u'\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA
u'\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU
u'\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI
u'\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA
u'\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA
u'\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA
u'\u221e' # 0x00ec -> INFINITY
u'\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI
u'\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON
u'\u2229' # 0x00ef -> INTERSECTION
u'\u2261' # 0x00f0 -> IDENTICAL TO
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO
u'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO
u'\u2320' # 0x00f4 -> TOP HALF INTEGRAL
u'\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\u2248' # 0x00f7 -> ALMOST EQUAL TO
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\u2219' # 0x00f9 -> BULLET OPERATOR
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\u221a' # 0x00fb -> SQUARE ROOT
u'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x009b, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a5: 0x009d, # YEN SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b5: 0x00e6, # MICRO SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S (GERMAN)
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f7: 0x00f6, # DIVISION SIGN
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA
0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA
0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA
0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI
0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA
0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA
0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA
0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON
0x03c0: 0x00e3, # GREEK SMALL LETTER PI
0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA
0x03c4: 0x00e7, # GREEK SMALL LETTER TAU
0x03c6: 0x00ed, # GREEK SMALL LETTER PHI
0x05d0: 0x0080, # HEBREW LETTER ALEF
0x05d1: 0x0081, # HEBREW LETTER BET
0x05d2: 0x0082, # HEBREW LETTER GIMEL
0x05d3: 0x0083, # HEBREW LETTER DALET
0x05d4: 0x0084, # HEBREW LETTER HE
0x05d5: 0x0085, # HEBREW LETTER VAV
0x05d6: 0x0086, # HEBREW LETTER ZAYIN
0x05d7: 0x0087, # HEBREW LETTER HET
0x05d8: 0x0088, # HEBREW LETTER TET
0x05d9: 0x0089, # HEBREW LETTER YOD
0x05da: 0x008a, # HEBREW LETTER FINAL KAF
0x05db: 0x008b, # HEBREW LETTER KAF
0x05dc: 0x008c, # HEBREW LETTER LAMED
0x05dd: 0x008d, # HEBREW LETTER FINAL MEM
0x05de: 0x008e, # HEBREW LETTER MEM
0x05df: 0x008f, # HEBREW LETTER FINAL NUN
0x05e0: 0x0090, # HEBREW LETTER NUN
0x05e1: 0x0091, # HEBREW LETTER SAMEKH
0x05e2: 0x0092, # HEBREW LETTER AYIN
0x05e3: 0x0093, # HEBREW LETTER FINAL PE
0x05e4: 0x0094, # HEBREW LETTER PE
0x05e5: 0x0095, # HEBREW LETTER FINAL TSADI
0x05e6: 0x0096, # HEBREW LETTER TSADI
0x05e7: 0x0097, # HEBREW LETTER QOF
0x05e8: 0x0098, # HEBREW LETTER RESH
0x05e9: 0x0099, # HEBREW LETTER SHIN
0x05ea: 0x009a, # HEBREW LETTER TAV
0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N
0x20a7: 0x009e, # PESETA SIGN
0x2219: 0x00f9, # BULLET OPERATOR
0x221a: 0x00fb, # SQUARE ROOT
0x221e: 0x00ec, # INFINITY
0x2229: 0x00ef, # INTERSECTION
0x2248: 0x00f7, # ALMOST EQUAL TO
0x2261: 0x00f0, # IDENTICAL TO
0x2264: 0x00f3, # LESS-THAN OR EQUAL TO
0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO
0x2310: 0x00a9, # REVERSED NOT SIGN
0x2320: 0x00f4, # TOP HALF INTEGRAL
0x2321: 0x00f5, # BOTTOM HALF INTEGRAL
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| gpl-3.0 |
Deepthibr28/software-testing | tutorials/selenium/test_demo_selenium_webdriver_using_phantomjs.py | 1 | 1388 | import unittest
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait # available since 2.4.0
from selenium.webdriver.support import expected_conditions as EC # available since 2.26.0
class TestSeleniumUsingPhantomJS(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
def test_page_title_exists(self):
self.driver.get('http://stackoverflow.com/')
#f = open('out.txt', 'w')
#f.write(self.driver.current_url)
#f.close()
assert "Stack Overflow" in self.driver.title
def test_google_search_works(self):
# go to the google home page
self.driver.get("http://www.google.com")
# find the element that's name attribute is q (the google search box)
inputElement = self.driver.find_element_by_name("q")
# type in the search
inputElement.send_keys("Mazedur Rahman")
# submit the form (although google automatically searches now without submitting)
inputElement.submit()
# we have to wait for the page to refresh, the last thing that seems to be updated is the title
WebDriverWait(self.driver, 10).until(EC.title_contains("Mazedur Rahman"))
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
| mit |
tardis-sn/tardis | tardis/plasma/properties/general.py | 1 | 4003 | import logging
import numpy as np
import pandas as pd
from astropy import units as u
from tardis import constants as const
from tardis.plasma.properties.base import ProcessingPlasmaProperty
logger = logging.getLogger(__name__)
__all__ = [
"BetaRadiation",
"GElectron",
"NumberDensity",
"SelectedAtoms",
"ElectronTemperature",
"BetaElectron",
"LuminosityInner",
"TimeSimulation",
"ThermalGElectron",
]
class BetaRadiation(ProcessingPlasmaProperty):
"""
Attributes
----------
beta_rad : Numpy Array, dtype float
"""
outputs = ("beta_rad",)
latex_name = (r"\beta_{\textrm{rad}}",)
latex_formula = (r"\dfrac{1}{k_{B} T_{\textrm{rad}}}",)
def __init__(self, plasma_parent):
super(BetaRadiation, self).__init__(plasma_parent)
self.k_B_cgs = const.k_B.cgs.value
def calculate(self, t_rad):
return 1 / (self.k_B_cgs * t_rad)
class GElectron(ProcessingPlasmaProperty):
"""
Attributes
----------
g_electron : Numpy Array, dtype float
"""
outputs = ("g_electron",)
latex_name = (r"g_{\textrm{electron}}",)
latex_formula = (
r"\Big(\dfrac{2\pi m_{e}/\beta_{\textrm{rad}}}{h^2}\Big)^{3/2}",
)
def calculate(self, beta_rad):
return (
(2 * np.pi * const.m_e.cgs.value / beta_rad)
/ (const.h.cgs.value ** 2)
) ** 1.5
class ThermalGElectron(GElectron):
"""
Attributes
----------
thermal_g_electron : Numpy Array, dtype float
"""
outputs = ("thermal_g_electron",)
latex_name = (r"g_{\textrm{electron_thermal}}",)
latex_formula = (
r"\Big(\dfrac{2\pi m_{e}/\beta_{\textrm{electron}}}{h^2}\Big)^{3/2}",
)
def calculate(self, beta_electron):
return super(ThermalGElectron, self).calculate(beta_electron)
class NumberDensity(ProcessingPlasmaProperty):
"""
Attributes
----------
number_density : Pandas DataFrame, dtype float
Indexed by atomic number, columns corresponding to zones
"""
outputs = ("number_density",)
latex_name = ("N_{i}",)
@staticmethod
def calculate(atomic_mass, abundance, density):
number_densities = abundance * density
return number_densities.div(atomic_mass.loc[abundance.index], axis=0)
class SelectedAtoms(ProcessingPlasmaProperty):
"""
Attributes
----------
selected_atoms : Pandas Int64Index, dtype int
Atomic numbers of elements required for particular simulation
"""
outputs = ("selected_atoms",)
def calculate(self, abundance):
return abundance.index
class ElectronTemperature(ProcessingPlasmaProperty):
"""
Attributes
----------
t_electron : Numpy Array, dtype float
"""
outputs = ("t_electrons",)
latex_name = (r"T_{\textrm{electron}}",)
latex_formula = (r"\textrm{const.}\times T_{\textrm{rad}}",)
def calculate(self, t_rad, link_t_rad_t_electron):
return t_rad * link_t_rad_t_electron
class BetaElectron(ProcessingPlasmaProperty):
"""
Attributes
----------
beta_electron : Numpy Array, dtype float
"""
outputs = ("beta_electron",)
latex_name = (r"\beta_{\textrm{electron}}",)
latex_formula = (r"\frac{1}{K_{B} T_{\textrm{electron}}}",)
def __init__(self, plasma_parent):
super(BetaElectron, self).__init__(plasma_parent)
self.k_B_cgs = const.k_B.cgs.value
def calculate(self, t_electrons):
return 1 / (self.k_B_cgs * t_electrons)
class LuminosityInner(ProcessingPlasmaProperty):
outputs = ("luminosity_inner",)
@staticmethod
def calculate(r_inner, t_inner):
return (
4 * np.pi * const.sigma_sb.cgs * r_inner[0] ** 2 * t_inner ** 4
).to("erg/s")
class TimeSimulation(ProcessingPlasmaProperty):
outputs = ("time_simulation",)
@staticmethod
def calculate(luminosity_inner):
return 1.0 * u.erg / luminosity_inner
| bsd-3-clause |
WarwickAnimeSoc/aniMango | site_info/migrations/0011_auto_20190616_1734.py | 1 | 1608 | # Generated by Django 2.2.2 on 2019-06-16 16:34
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('site_info', '0010_homealert'),
]
operations = [
migrations.AlterField(
model_name='exec',
name='academic_year',
field=models.IntegerField(choices=[(1997, 1997), (1998, 1998), (1999, 1999), (2000, 2000), (2001, 2001), (2002, 2002), (2003, 2003), (2004, 2004), (2005, 2005), (2006, 2006), (2007, 2007), (2008, 2008), (2009, 2009), (2010, 2010), (2011, 2011), (2012, 2012), (2013, 2013), (2014, 2014), (2015, 2015), (2016, 2016), (2017, 2017), (2018, 2018), (2019, 2019)], default=2019, verbose_name='Academic year starting'),
),
migrations.AlterField(
model_name='exec',
name='user',
field=models.ForeignKey(limit_choices_to={'is_staff': True}, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='historyentry',
name='academic_year',
field=models.IntegerField(choices=[(1997, 1997), (1998, 1998), (1999, 1999), (2000, 2000), (2001, 2001), (2002, 2002), (2003, 2003), (2004, 2004), (2005, 2005), (2006, 2006), (2007, 2007), (2008, 2008), (2009, 2009), (2010, 2010), (2011, 2011), (2012, 2012), (2013, 2013), (2014, 2014), (2015, 2015), (2016, 2016), (2017, 2017), (2018, 2018), (2019, 2019)], default=2019, verbose_name='Academic year starting'),
),
]
| mit |
nlandolfi/test-infra-1 | metrics/influxdb_test.py | 7 | 4440 | #!/usr/bin/env python
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test influxdb.py."""
import BaseHTTPServer
import threading
import re
import unittest
import influxdb
class TestInfluxPoint(unittest.TestCase):
def test_from_dict(self):
def check(sample, measurement, tags, fields, time):
point = influxdb.Point.from_dict(sample)
self.assertEqual(point.measurement, measurement)
self.assertEqual(point.tags, tags)
self.assertEqual(point.fields, fields)
self.assertEqual(point.time, time)
check(
{
'measurement': 'metric',
'tags': {'style': 'stylish'},
'fields': {'baseball': 'diamond', 'basketball': False},
'time': 42
},
'metric',
{'style': 'stylish'},
{'baseball': 'diamond', 'basketball': False},
42,
)
check(
{
'measurement': 'metric',
'tags': {},
'fields': {'num': 2.7},
},
'metric',
{},
{'num': 2.7},
None,
)
# Check that objects that don't meet the InfluxPoint spec are unchanged.
sample = {
'measurement': 'metric',
'tags': {'tag': 'value'},
'notfields': 'something',
}
self.assertEqual(influxdb.Point.from_dict(sample), sample)
def test_serialize(self):
def check(measurement, tags, fields, time, expected):
point = influxdb.Point(measurement, tags, fields, time)
self.assertEqual(point.serialize(), expected)
check(
'metric',
{'type': 'good'},
{'big?': True, 'size': 20},
42,
'metric,type=good big?=True,size=20 42',
)
check(
'measure with spaces',
{'tag,with,comma': 'tagval=with=equals'},
{',,': 20.2, 'string': 'yarn'},
None,
r'measure\ with\ spaces,tag\,with\,comma=tagval\=with\=equals \,\,=20.2,string="yarn"',
)
check(
'measure with spaces',
{'tag,with,comma': 'tagval=with=equals'},
{',,': 20.2, 'string': 'yarn'},
None,
r'measure\ with\ spaces,tag\,with\,comma=tagval\=with\=equals \,\,=20.2,string="yarn"',
)
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self): # pylint: disable=invalid-name
if not self.path.startswith('/write'):
raise ValueError(
'path should start with \'/write\', but is \'%s\'' % self.path
)
body = self.rfile.read(int(self.headers.getheader('content-length')))
new_ids = [int(match) for match in re.findall(r'id=(\d+)', body)]
self.server.received = self.server.received.union(new_ids)
self.send_response(201)
class TestInfluxPusher(unittest.TestCase):
def setUp(self):
self.port = 8000
self.written = 0
self.test_server = BaseHTTPServer.HTTPServer(
('', self.port),
RequestHandler,
)
self.test_server.received = set()
thread = threading.Thread(target=self.test_server.serve_forever)
thread.start()
def tearDown(self):
self.test_server.shutdown()
for num in xrange(self.written):
self.assertIn(num, self.test_server.received)
def test_push(self):
points = [influxdb.Point('metric', {}, {'id': num}, None)
for num in xrange(110)]
pusher = influxdb.Pusher(
'localhost:%d' % self.port,
None,
'username',
'pass123',
)
pusher.push(points, 'mydb')
self.written = 110
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
ciber96/mtasa-blue | vendor/google-breakpad/src/testing/gtest/test/gtest_shuffle_test.py | 184 | 12608 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy,
capture_stderr=False).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
| gpl-3.0 |
Br3nda/plow | lib/python/plow/rndaemon/core.py | 1 | 21789 |
import threading
import subprocess
import logging
import time
import os
import traceback
import errno
from collections import namedtuple, deque
from itertools import chain
import psutil
import conf
import client
import utils
import rpc.ttypes as ttypes
from profile import SystemProfiler as _SystemProfiler
logger = logging.getLogger(__name__)
__all__ = ['Profiler', 'ResourceMgr', 'ProcessMgr']
_RunningProc = namedtuple("RunningProc", "processCmd pthread cpus")
#
# _ResourceManager
#
class _ResourceManager(object):
"""
The ResourceManager keeps track of the bookable resources on the
machine. This is currently just cores, but memory and GPUS
in the future.
"""
def __init__(self):
self.__slots = deque(xrange(Profiler.physicalCpus))
self.__slots_all = tuple(self.__slots)
self.__lock = threading.RLock()
logger.info("Intializing resource manager with %d physical cores.",
Profiler.physicalCpus)
def checkout(self, numCores):
if numCores < 1:
raise ttypes.RndException(1, "Cannot reserve 0 slots")
result = []
with self.__lock:
open_slots = self.__slots
logger.info("Open slots: %s", list(open_slots))
if numCores > len(open_slots):
raise ttypes.RndException(1, "No more open slots")
result = [open_slots.pop() for _ in xrange(numCores)]
logger.info("Checked out CPUS: %s", result)
return result
def checkin(self, cores):
with self.__lock:
self.__slots.extend(cores)
avail, total = len(self.__slots), Profiler.physicalCpus
logger.info("Checked in CPUS: %s; Now available: %d / %d", cores, avail, total)
def getSlots(self):
return list(xrange(Profiler.physicalCpus))
def getOpenSlots(self):
with self.__lock:
return list(self.__slots)
#
# _ProcessManager
#
class _ProcessManager(object):
"""
The ProcessManager keeps track of the running tasks. Each task
is executed in a separate ProcessThread.
"""
SAMPLE_INTERVAL_SEC = 10
def __init__(self):
self.__threads = {}
self.__lock = threading.RLock()
self.__timer = None
self.__isReboot = threading.Event()
self.__isShutdown = threading.Event()
self.__sampler = threading.Thread(target=self._processSampler)
self.__sampler.daemon = True
self.__sampler.start()
self.sendPing(True)
@property
def isReboot(self):
return self.__isReboot.is_set()
def runProcess(self, processCmd, wait=-1):
"""
Takes a RunTaskCommand object, reserves resources,
and starts the process. Default mode is to return None
Optionally, a wait time may be specified in float
seconds, to wait until the job has fully started,
before returning. If wait > -1, return a RunningTask object
"""
cpus = ResourceMgr.checkout(processCmd.cores)
pthread = _ProcessThread(processCmd, cpus)
with self.__lock:
self.__threads[processCmd.procId] = _RunningProc(processCmd, pthread, cpus)
pthread.start()
logger.info("process thread started")
if wait == -1:
return
task = pthread.getRunningTask(wait)
return task
def processFinished(self, processResult, cpus=None):
"""
Callback for when a process has finished running.
Receives the RunTaskResult object.
Deallocates the resources.
"""
with self.__lock:
if cpus is None:
cpus = self.__threads[processResult.procId].cpus
ResourceMgr.checkin(cpus)
try:
del self.__threads[processResult.procId]
except Exception, e:
logger.warn("Process %s not found: %s", processResult.procId, e)
def sendPing(self, isReboot=False, repeat=True):
"""
Ping into the server with current task and resource states.
If repeat is True, schedules another ping at an interval defined
by the rndaemon config.
"""
if self.__isShutdown.is_set():
repeat = False
# TODO: What is the purpose of the isReboot flag?
# Using the internal flag to determine if we are in a
# reboot state.
isReboot = self.__isReboot.is_set()
tasks = self.getRunningTasks()
Profiler.sendPing(tasks, isReboot)
# TODO: Maybe there needs to be a seperate thread for this check
# but for now it is part of the ping loop.
if isReboot and not tasks:
logger.info("Task queue is empty and daemon is scheduled for reboot")
try:
Profiler.reboot()
except ttypes.RndException, e:
# on next loop, the server will see that the system
# is no longer in isReboot state
logger.warn(e.why)
self.__isReboot.clear()
else:
# just in case
return
if repeat:
self.__timer = threading.Timer(conf.NETWORK_PING_INTERVAL, self.sendPing)
self.__timer.daemon = True
self.__timer.start()
def killRunningTask(self, procId, reason):
"""
Kill a currently running task by its procId.
"""
logger.info("kill requested for procId %s, %s", procId, reason)
with self.__lock:
try:
pthread = self.__threads[procId].pthread
except KeyError:
err = "Process %s not found" % procId
logger.warn(err)
# TODO: Raise a proper exception type? or
# fail quietly?
raise ttypes.RndException(1, err)
_, not_killed = pthread.killProcess(reason=reason)
if not_killed:
err = "Failed to kill the following pids for prodId %s: %s" % \
(procId, ','.join(not_killed))
logger.warn(err)
raise ttypes.RndException(1, err)
def getRunningTasks(self):
""" Get a list of all running task objects """
with self.__lock:
tasks = [t.pthread.getRunningTask() for t in self.__threads.itervalues()]
return tasks
def shutdown(self):
"""
Gracefully shut down all running tasks so they can report back in
"""
logger.debug("Shutdown requested for process manager.")
self.__isShutdown.set()
with self.__lock:
threads = [proc.pthread for proc in self.__threads.itervalues()]
for t in threads:
t.shutdown()
logger.debug("Asked %d tasks to quit and report. Waiting for them to complete", len(threads))
for t in threads:
if not t.wait(10):
logger.warn("Thread failed to close down after waiting 10 seconds: %r", t)
self.__threads.clear()
del threads
logger.debug("Done waiting on task shutdown")
def reboot(self, now=False):
"""
reboot (bool now=False)
Reboot the system as soon as it becomes idle. That is,
when no tasks are running.
If now == True, reboot immediately, regardless of any
in-progress render tasks.
"""
# TODO: For now, assuming that even if they aren't root,
# that they may have permission to reboot. This means a
# reboot(now=False) will not raise an exception to the caller.
#
# if os.geteuid() != 0:
# err = "rndaemon not running as user with permission to reboot system"
# raise ttypes.RndException(1, err)
self.__isReboot.set()
if now:
logger.info("*SYSTEM GOING DOWN FOR IMMEDIATE REBOOT*")
# stop all of the tasks
self.shutdown()
with self.__lock:
if self.__timer:
self.__timer.cancel()
# The reboot could happen from the ping if the task
# queue is empty.
self.sendPing(repeat=False)
# Otherwise, the reboot will happen here, regardless
# of whether there are active tasks running.
Profiler.reboot()
else:
logger.info("*Reboot scheduled at next idle event*")
def _processSampler(self):
"""
Loop that updates metrics on every running process
at intervals.
"""
while not self.__isShutdown.is_set():
with self.__lock:
pthreads = [t.pthread for t in self.__threads.itervalues()]
for pthread in pthreads:
pthread.updateMetrics()
time.sleep(self.SAMPLE_INTERVAL_SEC)
#
# RunningTask
#
class RunningTask(ttypes.RunningTask):
"""
Subclass of ttypes.RunningTask that adjusts the
__repr__ to only print a reduces amount of the last
log line string.
"""
def __repr__(self):
D = self.__dict__.copy()
# elide the log string if its too big
lastLog = D.get('lastLog')
if lastLog and len(lastLog) > 50:
D['lastLog'] = '%s...' % lastLog[:47]
L = ('%s=%r' % (key, value) for key, value in D.iteritems())
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
#
# _ProcessThread
#
class _ProcessThread(threading.Thread):
"""
The _ProcessThread wraps a running task.
"""
_DO_DISK_IO = hasattr(psutil.Process, "get_io_counters")
def __init__(self, rtc, cpus=None):
threading.Thread.__init__(self)
self.daemon = True
self.__logfp = None
self.__cpus = cpus or set()
self.__rtc = rtc
self.__pptr = None
self.__logfp = None
self.__pid = -1
self.__killThread = None
self.__wasKilled = threading.Event()
self.__hasStarted = threading.Event()
self.__isShutdown = threading.Event()
self.__progress = 0.0
self.__lastLog = ""
self.__killReason = ""
self.__metrics = {
'rssMb': 0,
'maxRssMb': 0,
'cpuPercent': 0,
'diskIO': ttypes.DiskIO(-1,-1,-1,-1),
}
def __repr__(self):
return "<%s: (procId: %s, pid: %d)>" % (
self.__class__.__name__,
self.__rtc.procId,
self.__pid)
def shutdown(self):
"""
Instruct the process to shutdown gracefully.
Returns the same output as killProcess()
"""
logger.debug("Shutdown request received. Killing %r", self)
self.__isShutdown.set()
self.killProcess(block=False, reason="rndaemon shutdown request received")
def wait(self, timeout=None):
"""
Waits for the process to finish.
By default, blocks indefinitely. Specify a
timeout in float seconds to wait. If the timeout
value is exceeded, return False
Returns True if the task ended.
"""
self.join(timeout)
return not self.isAlive()
def getRunningTask(self, wait=-1):
"""
getRunningTask(float wait=-1) -> RunningTask
Returns a RunningTask instance representing
the current state of the task.
If wait > 0, then wait that many seconds for
the process to start. This is useful if you are
creating the process and then checking its running
task right away. Some information may not be
available until after the thread has gotten the
process running.
"""
if wait > 0:
self.__hasStarted.wait(wait)
rt = RunningTask()
rtc = self.__rtc
rt.jobId = rtc.jobId
rt.procId = rtc.procId
rt.taskId = rtc.taskId
rt.layerId = rtc.layerId
rt.pid = self.__pid
metrics = self.__metrics
rt.rssMb = metrics['rssMb']
rt.cpuPercent = metrics['cpuPercent']
if self._DO_DISK_IO:
rt.diskIO = metrics['diskIO']
rt.progress = self.__progress
rt.lastLog = self.__lastLog or None
return rt
def run(self):
"""
Run method called implicitely by start()
Fires up the process to do the actual task.
Logs output, and records resource metrics.
"""
rtc = self.__rtc
retcode = 1
try:
uid = self.__rtc.uid
cpus = self.__cpus
logger.info("Opening log file: %s", rtc.logFile)
self.__logfp = utils.ProcessLog(self.__rtc.logFile, uid=uid, buffering=1)
self.__logfp.writeLogHeader(rtc)
env = os.environ.copy()
env.update(rtc.env)
parser = None
if rtc.taskTypes:
parser = utils.ProcessLogParser.fromTaskTypes(rtc.taskTypes)
if not parser.progress:
parser = None
opts = {
'stdout': subprocess.PIPE,
'stderr': subprocess.STDOUT,
'uid': uid,
'cpus': cpus,
'env': env,
}
cmd, opts = Profiler.getSubprocessOpts(rtc.command, **opts)
logger.info("Running command: %s", rtc.command)
self.__logfp.write("[%s] Running process" % time.strftime("%Y-%m-%d %H:%M:%S"))
self.__logfp.flush()
p = subprocess.Popen(cmd, **opts)
self.__pptr = p
self.__pid = p.pid
self.__hasStarted.set()
logger.info("PID: %d", p.pid)
self.updateMetrics()
writeLog = self.__logfp.write
r_pipe = self.__pptr.stdout
for line in iter(r_pipe.readline, ""):
writeLog(line)
self.__lastLog = line
if parser:
prog = parser.parseProgress(line)
if prog is not None:
self.__progress = prog
if self.__isShutdown.is_set():
break
self.__logfp.write("[%s] Process finished" % time.strftime("%Y-%m-%d %H:%M:%S"))
self.__logfp.flush()
try:
retcode = p.wait()
except OSError, e:
if e.errno != errno.ECHILD:
if not self.__isShutdown.is_set():
raise
r_pipe.close()
logger.debug("Return code: %s", retcode)
except Exception, e:
if self.__isShutdown.is_set():
logger.debug("Thread detected shutdown request. Leaving gracefully.")
else:
logger.warn("Failed to execute command: %s", e)
logger.debug(traceback.format_exc())
finally:
self.__completed(retcode)
def updateMetrics(self):
"""
updateMetrics()
Resample information about the currently running
process tree, and update member attributes.
i.e. rss
"""
# logger.debug("updateMetrics(): %r", self)
rss_bytes = 0
cpu_perc = 0
do_disk_io = self._DO_DISK_IO
if do_disk_io:
disk_io = [0,0,0,0]
try:
root_pid = self.__pid
p = psutil.Process(root_pid)
for proc in chain([p], p.get_children(True)):
this_pid = proc.pid
if proc.status == psutil.STATUS_ZOMBIE:
continue
try:
rss_bytes += proc.get_memory_info().rss
except psutil.Error, e:
logger.debug("Error while getting memory data for pid %r: %s", this_pid, e)
try:
cpu_perc += proc.get_cpu_percent(None)
except psutil.Error, e:
logger.debug("Error while getting cpu data for pid %r: %s", this_pid, e)
if do_disk_io:
try:
counters = proc.get_io_counters()
except psutil.Error, e:
logger.debug("Error while getting disk io data for pid %r: %s", this_pid, e)
for i, val in enumerate(counters):
disk_io[i] += val
except psutil.NoSuchProcess, e:
return
cpu_perc_int = int(round(cpu_perc))
rssMb = rss_bytes / 1024 / 1024
metrics = self.__metrics
maxRss = max(rssMb, metrics['maxRssMb'])
disk_io_t = ttypes.DiskIO(*disk_io) if do_disk_io else None
metrics.update({
'rssMb': rssMb,
'maxRssMb': maxRss,
'cpuPercent': cpu_perc_int,
'diskIO': disk_io_t,
})
logger.debug("metrics: %r", metrics)
def killProcess(self, block=True, reason=''):
"""
killProcess(bool block=True, reason='') -> (list killed_pids, list not_killed)
Stop the entire process tree
Returns a tuple of two lists. The first list contains
the pids from the process tree that were successfully
stopped. The second list contains pids that were not
able to be stopped successfully.
By default the call blocks until the attempt to kill
has completed. Set block=False to issue the kill async.
If the reason for killing the process is passes as a string,
it will be added to the log footer.
"""
self.__killReason = reason
if block:
return self.__killProcess()
# guards against repeat calls to kill while one async
# call is already running
if self.__killThread and self.__killThread.isAlive():
return
t = threading.Thread(target=self.__killProcess)
t.start()
self.__killThread = t
return
def __killProcess(self):
pid = self.__pid
if pid == -1:
return
try:
p = psutil.Process(pid)
except psutil.NoSuchProcess:
return
children = p.get_children(recursive=True)
self.__wasKilled.set()
# kill the top parent
self.__killOneProcess(p)
# make sure each process in the tree is really dead
killed = []
not_killed = []
for child in children:
success = self.__killOneProcess(child)
if success:
killed.append(child.pid)
else:
not_killed.append(child.pid)
return killed, not_killed
def __killOneProcess(self, p):
"""
__killOneProcess(psutil.Process p) -> bool
Try and nicely stop a Process first, then kill it.
Return True if process was killed.
"""
try:
try:
p.wait(0.001)
except psutil.TimeoutExpired:
pass
if not p.is_running():
return True
pid = p.pid
logger.info("Asking nicely for pid %d (%s) to stop", pid, p.name)
p.terminate()
try:
p.wait(5)
except psutil.TimeoutExpired:
pass
if not p.is_running():
return True
logger.info("Killing pid %d (%s)", pid, p.name)
p.kill()
try:
p.wait(1)
except psutil.TimeoutExpired:
pass
if p.is_running():
logger.warn("Failed to properly kill pid %d (taskId: %s)", pid, self.__rtc.taskId)
return False
except psutil.NoSuchProcess:
pass
return True
def __completed(self, retcode):
logger.debug("Process completed: %r, (IsShutdown: %r)", self, self.__isShutdown.is_set())
result = ttypes.RunTaskResult()
result.maxRssMb = self.__metrics['maxRssMb']
result.procId = self.__rtc.procId
result.taskId = self.__rtc.taskId
result.jobId = self.__rtc.jobId
if self.__isShutdown.is_set():
result.exitStatus = 1
result.exitSignal = 86
logger.info("Task closing gracefully from shutdown request")
elif self.__wasKilled.is_set():
result.exitStatus = 1
result.exitSignal = retcode if retcode < 0 else -9
elif retcode < 0:
result.exitStatus = 1
result.exitSignal = retcode
else:
result.exitStatus = retcode
result.exitSignal = 0
logger.info("Process result %s", result)
if not conf.NETWORK_DISABLED:
while True:
try:
service, transport = client.getPlowConnection()
service.taskComplete(result)
transport.close()
break
except Exception, e:
logger.warn("Error talking to plow server, %s, sleeping for 30 seconds", e)
time.sleep(30)
ProcessMgr.processFinished(result, self.__cpus)
if self.__logfp is not None:
attrs = {
'DiskIO': self.__metrics['diskIO'],
'Cpus': len(self.__cpus),
}
if self.__killReason:
attrs['Reason Killed'] = self.__killReason
self.__logfp.writeLogFooterAndClose(result, attrs)
self.__logfp = None
#
# Singleton Instances
#
Profiler = _SystemProfiler()
ResourceMgr = _ResourceManager()
ProcessMgr = _ProcessManager()
| apache-2.0 |
alex/sqlalchemy | test/orm/test_scoping.py | 33 | 3119 | from sqlalchemy.testing import assert_raises, assert_raises_message
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.orm import scoped_session
from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, query
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
class _ScopedTest(fixtures.MappedTest):
"""Adds another lookup bucket to emulate Session globals."""
run_setup_mappers = 'once'
@classmethod
def setup_class(cls):
cls.scoping = _base.adict()
super(_ScopedTest, cls).setup_class()
@classmethod
def teardown_class(cls):
cls.scoping.clear()
super(_ScopedTest, cls).teardown_class()
class ScopedSessionTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('table1', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(30)))
Table('table2', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('someid', None, ForeignKey('table1.id')))
def test_basic(self):
table2, table1 = self.tables.table2, self.tables.table1
Session = scoped_session(sa.orm.sessionmaker())
class CustomQuery(query.Query):
pass
class SomeObject(fixtures.ComparableEntity):
query = Session.query_property()
class SomeOtherObject(fixtures.ComparableEntity):
query = Session.query_property()
custom_query = Session.query_property(query_cls=CustomQuery)
mapper(SomeObject, table1, properties={
'options':relationship(SomeOtherObject)})
mapper(SomeOtherObject, table2)
s = SomeObject(id=1, data="hello")
sso = SomeOtherObject()
s.options.append(sso)
Session.add(s)
Session.commit()
Session.refresh(sso)
Session.remove()
eq_(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]),
Session.query(SomeObject).one())
eq_(SomeObject(id=1, data="hello", options=[SomeOtherObject(someid=1)]),
SomeObject.query.one())
eq_(SomeOtherObject(someid=1),
SomeOtherObject.query.filter(
SomeOtherObject.someid == sso.someid).one())
assert isinstance(SomeOtherObject.query, query.Query)
assert not isinstance(SomeOtherObject.query, CustomQuery)
assert isinstance(SomeOtherObject.custom_query, query.Query)
def test_config_errors(self):
Session = scoped_session(sa.orm.sessionmaker())
s = Session()
assert_raises_message(
sa.exc.InvalidRequestError,
"Scoped session is already present",
Session, bind=testing.db
)
assert_raises_message(
sa.exc.SAWarning,
"At least one scoped session is already present. ",
Session.configure, bind=testing.db
)
| mit |
Semersterprojekt/Server | node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py | 1569 | 23354 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions to perform Xcode-style build steps.
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import fnmatch
import glob
import json
import os
import plistlib
import re
import shutil
import string
import subprocess
import sys
import tempfile
def main(args):
executor = MacTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class MacTool(object):
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
# TODO(thakis): This copies file attributes like mtime, while the
# single-file branch below doesn't. This should probably be changed to
# be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest, convert_to_binary)
else:
shutil.copy(source, dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
base = os.path.dirname(os.path.realpath(__file__))
if os.path.relpath(source):
source = os.path.join(base, source)
if os.path.relpath(dest):
dest = os.path.join(base, dest)
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
'--output-format', 'human-readable-text', '--compile', dest, source]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
def _ConvertToBinary(self, dest):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
def _CopyStringsFile(self, source, dest, convert_to_binary):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
fp = open(dest, 'wb')
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
except e:
fp.close()
return None
fp.close()
if header.startswith("\xFE\xFF"):
return "UTF-16"
elif header.startswith("\xFF\xFE"):
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
lines = plistlib.writePlistToString(plist)
# Go through all the environment variables and replace them as variables in
# the file.
IDENT_RE = re.compile(r'[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
# template these days, and :identifier was used earlier. They are used to
# convert non-url characters into things that look like valid urls --
# except that the replacement character for :identifier, '_' isn't valid
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
# Remove any keys with values that haven't been replaced.
lines = lines.split('\n')
for i in range(len(lines)):
if lines[i].strip().startswith("<string>${"):
lines[i] = None
lines[i - 1] = None
lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
fd.write(lines)
fd.close()
# Now write out PkgInfo file now that the Info.plist file has been
# "compiled".
self._WritePkgInfo(dest)
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _WritePkgInfo(self, info_plist):
"""This writes the PkgInfo file from the data stored in Info.plist."""
plist = plistlib.readPlist(info_plist)
if not plist:
return
# Only create PkgInfo for executable types.
package_type = plist['CFBundlePackageType']
if package_type != 'APPL':
return
# The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead.
signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4: # Wrong length resets everything, too.
signature_code = '?' * 4
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
fp = open(dest, 'w')
fp.write('%s%s' % (package_type, signature_code))
fp.close()
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
fcntl.flock(fd, fcntl.LOCK_EX)
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
libtool_re5 = re.compile(
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
r'\(no object file members in the library define global symbols\)$')
env = os.environ.copy()
# Ref:
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
# The problem with this flag is that it resets the file mtime on the file to
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
env['ZERO_AR_DATE'] = '1'
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
if not libtoolout.returncode:
for i in range(len(cmd_list) - 1):
if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
os.utime(cmd_list[i+1], None)
break
return libtoolout.returncode
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
CURRENT = 'Current'
RESOURCES = 'Resources'
VERSIONS = 'Versions'
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
# Binary-less frameworks don't seem to contain symlinks (see e.g.
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
return
# Move into the framework directory to set the symlinks correctly.
pwd = os.getcwd()
os.chdir(framework)
# Set up the Current version.
self._Relink(version, os.path.join(VERSIONS, CURRENT))
# Set up the root symlinks.
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
# Back to where we were before!
os.chdir(pwd)
def _Relink(self, dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
it is overwritten."""
if os.path.lexists(link):
os.remove(link)
os.symlink(dest, link)
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
This invokes 'actool' to compile all the inputs .xcassets files. The
|keys| arguments is a json-encoded dictionary of extra arguments to
pass to 'actool' when the asset catalogs contains an application icon
or a launch image.
Note that 'actool' does not create the Assets.car file if the asset
catalogs does not contains imageset.
"""
command_line = [
'xcrun', 'actool', '--output-format', 'human-readable-text',
'--compress-pngs', '--notices', '--warnings', '--errors',
]
is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
if is_iphone_target:
platform = os.environ['CONFIGURATION'].split('-')[-1]
if platform not in ('iphoneos', 'iphonesimulator'):
platform = 'iphonesimulator'
command_line.extend([
'--platform', platform, '--target-device', 'iphone',
'--target-device', 'ipad', '--minimum-deployment-target',
os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
])
else:
command_line.extend([
'--platform', 'macosx', '--target-device', 'mac',
'--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
'--compile',
os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
])
if keys:
keys = json.loads(keys)
for key, value in keys.iteritems():
arg_name = '--' + key
if isinstance(value, bool):
if value:
command_line.append(arg_name)
elif isinstance(value, list):
for v in value:
command_line.append(arg_name)
command_line.append(str(v))
else:
command_line.append(arg_name)
command_line.append(str(value))
# Note: actool crashes if inputs path are relative, so use os.path.abspath
# to get absolute path name for inputs.
command_line.extend(map(os.path.abspath, inputs))
subprocess.check_call(command_line)
def ExecMergeInfoPlist(self, output, *inputs):
"""Merge multiple .plist files into a single .plist file."""
merged_plist = {}
for path in inputs:
plist = self._LoadPlistMaybeBinary(path)
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. copy ResourceRules.plist from the user or the SDK into the bundle,
2. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
3. copy Entitlements.plist from user or SDK next to the bundle,
4. code sign the bundle.
"""
resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
'codesign', '--force', '--sign', key, '--resource-rules',
resource_rules_path, '--entitlements', entitlements_path,
os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
def _InstallResourceRules(self, resource_rules):
"""Installs ResourceRules.plist from user or SDK into the bundle.
Args:
resource_rules: string, optional, path to the ResourceRules.plist file
to use, default to "${SDKROOT}/ResourceRules.plist"
Returns:
Path to the copy of ResourceRules.plist into the bundle.
"""
source_path = resource_rules
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'ResourceRules.plist')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'], 'ResourceRules.plist')
shutil.copy2(source_path, target_path)
return target_path
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple containing two dictionary: variables substitutions and values
to overrides when generating the entitlements file.
"""
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier)
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'embedded.mobileprovision')
shutil.copy2(source_path, target_path)
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
return substitutions, provisioning_data['Entitlements']
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
Checks all the installed provisioning profiles (or if the user specified
the PROVISIONING_PROFILE variable, only consult it) and select the most
specific that correspond to the bundle identifier.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple of the path to the selected provisioning profile, the data of
the embedded plist in the provisioning profile and the team identifier
to use for code signing.
Raises:
SystemExit: if no .mobileprovision can be used to sign the bundle.
"""
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
provisioning_profiles = None
if profile:
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
if os.path.exists(profile_path):
provisioning_profiles = [profile_path]
if not provisioning_profiles:
provisioning_profiles = glob.glob(
os.path.join(profiles_dir, '*.mobileprovision'))
valid_provisioning_profiles = {}
for profile_path in provisioning_profiles:
profile_data = self._LoadProvisioningProfile(profile_path)
app_id_pattern = profile_data.get(
'Entitlements', {}).get('application-identifier', '')
for team_identifier in profile_data.get('TeamIdentifier', []):
app_id = '%s.%s' % (team_identifier, bundle_identifier)
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
# provisioning profile whose pattern is the longest).
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
return valid_provisioning_profiles[selected_key]
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
def _MergePlist(self, merged_plist, plist):
"""Merge |plist| into |merged_plist|."""
for key, value in plist.iteritems():
if isinstance(value, dict):
merged_value = merged_plist.get(key, {})
if isinstance(merged_value, dict):
self._MergePlist(merged_value, value)
merged_plist[key] = merged_value
else:
merged_plist[key] = value
else:
merged_plist[key] = value
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
This is a wrapper around plistlib.readPlist that tries to convert the
plist to the XML format if it can't be parsed (assuming that it is in
the binary format).
Args:
plist_path: string, path to a plist file, in XML or binary format
Returns:
Content of the plist as a dictionary.
"""
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
# load that copy.
return plistlib.readPlist(plist_path)
except:
pass
with tempfile.NamedTemporaryFile() as temp:
shutil.copy2(plist_path, temp.name)
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
return plistlib.readPlist(temp.name)
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
Args:
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
app_identifier_prefix: string, value for AppIdentifierPrefix
Returns:
Dictionary of substitutions to apply when generating Entitlements.plist.
"""
return {
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
}
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
Returns:
Value of CFBundleIdentifier in the Info.plist located in the bundle.
"""
info_plist_path = os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['INFOPLIST_PATH'])
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
return info_plist_data['CFBundleIdentifier']
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
Expands variables "$(variable)" pattern in the source entitlements file,
add extra entitlements defined in the .mobileprovision file and the copy
the generated plist to "${BundlePath}.xcent".
Args:
entitlements: string, optional, path to the Entitlements.plist template
to use, defaults to "${SDKROOT}/Entitlements.plist"
substitutions: dictionary, variable substitutions
overrides: dictionary, values to add to the entitlements
Returns:
Path to the generated entitlements file.
"""
source_path = entitlements
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['PRODUCT_NAME'] + '.xcent')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'],
'Entitlements.plist')
shutil.copy2(source_path, target_path)
data = self._LoadPlistMaybeBinary(target_path)
data = self._ExpandVariables(data, substitutions)
if overrides:
for key in overrides:
if key not in data:
data[key] = overrides[key]
plistlib.writePlist(data, target_path)
return target_path
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
Args:
data: object, can be either string, list or dictionary
substitutions: dictionary, variable substitutions to perform
Returns:
Copy of data where each references to "$(variable)" has been replaced
by the corresponding value found in substitutions, or left intact if
the key was not found.
"""
if isinstance(data, str):
for key, value in substitutions.iteritems():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
return [self._ExpandVariables(v, substitutions) for v in data]
if isinstance(data, dict):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
vitan/hue | desktop/core/ext-py/Django-1.6.10/django/template/loaders/eggs.py | 111 | 1205 | # Wrapper for loading templates from eggs via pkg_resources.resource_string.
from __future__ import unicode_literals
try:
from pkg_resources import resource_string
except ImportError:
resource_string = None
from django.conf import settings
from django.template.base import TemplateDoesNotExist
from django.template.loader import BaseLoader
from django.utils import six
class Loader(BaseLoader):
is_usable = resource_string is not None
def load_template_source(self, template_name, template_dirs=None):
"""
Loads templates from Python eggs via pkg_resource.resource_string.
For every installed app, it tries to get the resource (app, template_name).
"""
if resource_string is not None:
pkg_name = 'templates/' + template_name
for app in settings.INSTALLED_APPS:
try:
resource = resource_string(app, pkg_name)
except Exception:
continue
if six.PY2:
resource = resource.decode(settings.FILE_CHARSET)
return (resource, 'egg:%s:%s' % (app, pkg_name))
raise TemplateDoesNotExist(template_name)
| apache-2.0 |
surajssd/kuma | kuma/wiki/middleware.py | 10 | 2315 | from django.shortcuts import render
from django.http import HttpResponseRedirect
from jingo.helpers import urlparams
from .exceptions import ReadOnlyException
from .jobs import DocumentZoneURLRemapsJob
class ReadOnlyMiddleware(object):
"""
Renders a 403.html page with a flag for a specific message.
"""
def process_exception(self, request, exception):
if isinstance(exception, ReadOnlyException):
context = {'reason': exception.args[0]}
return render(request, '403.html', context, status=403)
return None
class DocumentZoneMiddleware(object):
"""
For document zones with specified URL roots, this middleware modifies the
incoming path_info to point at the internal wiki path
"""
def process_request(self, request):
# https://bugzil.la/1189222
# Don't redirect POST $subscribe requests to GET zone url
if request.method == 'POST' and '$subscribe' in request.path:
return None
remaps = DocumentZoneURLRemapsJob().get(request.locale)
for original_path, new_path in remaps:
if (
request.path_info == original_path or
request.path_info.startswith(u''.join([original_path, '/']))
):
# Is this a request for the "original" wiki path? Redirect to
# new URL root, if so.
new_path = request.path_info.replace(original_path,
new_path,
1)
new_path = '/%s%s' % (request.locale, new_path)
query = request.GET.copy()
if 'lang' in query:
query.pop('lang')
new_path = urlparams(new_path, query_dict=query)
return HttpResponseRedirect(new_path)
elif request.path_info.startswith(new_path):
# Is this a request for the relocated wiki path? If so, rewrite
# the path as a request for the proper wiki view.
request.path_info = request.path_info.replace(new_path,
original_path,
1)
break
| mpl-2.0 |
the9ull/OpenBazaar-Server | dht/protocol.py | 4 | 7274 | """
Copyright (c) 2014 Brian Muller
Copyright (c) 2015 OpenBazaar
"""
import random
from twisted.internet import defer
from zope.interface import implements
import nacl.signing
from dht.node import Node
from dht.routing import RoutingTable
from dht.utils import digest
from log import Logger
from rpcudp import RPCProtocol
from interfaces import MessageProcessor
from protos import objects
from protos.message import PING, STUN, STORE, DELETE, FIND_NODE, FIND_VALUE, HOLE_PUNCH
class KademliaProtocol(RPCProtocol):
implements(MessageProcessor)
def __init__(self, sourceNode, storage, ksize):
self.ksize = ksize
self.router = RoutingTable(self, ksize, sourceNode)
self.storage = storage
self.sourceNode = sourceNode
self.multiplexer = None
self.log = Logger(system=self)
self.handled_commands = [PING, STUN, STORE, DELETE, FIND_NODE, FIND_VALUE, HOLE_PUNCH]
RPCProtocol.__init__(self, sourceNode.getProto(), self.router)
def connect_multiplexer(self, multiplexer):
self.multiplexer = multiplexer
def getRefreshIDs(self):
"""
Get ids to search for to keep old buckets up to date.
"""
ids = []
for bucket in self.router.getLonelyBuckets():
ids.append(random.randint(*bucket.range))
return ids
def rpc_stun(self, sender):
self.addToRouter(sender)
return [sender.ip, str(sender.port)]
def rpc_ping(self, sender):
self.addToRouter(sender)
return [self.sourceNode.getProto().SerializeToString()]
def rpc_store(self, sender, keyword, key, value):
self.addToRouter(sender)
self.log.debug("got a store request from %s, storing value" % str(sender))
if len(keyword) == 20 and len(key) <= 33 and len(value) <= 1800:
self.storage[keyword] = (key, value)
return ["True"]
else:
return ["False"]
def rpc_delete(self, sender, keyword, key, signature):
self.addToRouter(sender)
value = self.storage.getSpecific(keyword, key)
if value is not None:
# Try to delete a message from the dht
if keyword == digest(sender.id):
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify(key, signature)
self.storage.delete(keyword, key)
return ["True"]
except Exception:
return ["False"]
# Or try to delete a pointer
else:
try:
node = objects.Node()
node.ParseFromString(value)
pubkey = node.signedPublicKey[64:]
try:
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(signature + key)
self.storage.delete(keyword, key)
return ["True"]
except Exception:
return ["False"]
except Exception:
pass
return ["False"]
def rpc_find_node(self, sender, key):
self.log.info("finding neighbors of %s in local table" % key.encode('hex'))
self.addToRouter(sender)
node = Node(key)
nodeList = self.router.findNeighbors(node, exclude=sender)
ret = []
for n in nodeList:
ret.append(n.getProto().SerializeToString())
return ret
def rpc_find_value(self, sender, key):
self.addToRouter(sender)
ret = ["value"]
value = self.storage.get(key, None)
if value is None:
return self.rpc_find_node(sender, key)
ret.extend(value)
return ret
def callFindNode(self, nodeToAsk, nodeToFind):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.find_node(address, nodeToFind.id)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callFindValue(self, nodeToAsk, nodeToFind):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.find_value(address, nodeToFind.id)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callPing(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.ping(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callStore(self, nodeToAsk, keyword, key, value):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.store(address, keyword, key, value)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callDelete(self, nodeToAsk, keyword, key, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.delete(address, keyword, key, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def transferKeyValues(self, node):
"""
Given a new node, send it all the keys/values it should be storing.
@param node: A new node that just joined (or that we just found out
about).
Process:
For each key in storage, get k closest nodes. If newnode is closer
than the furtherst in that list, and the node for this server
is closer than the closest in that list, then store the key/value
on the new node (per section 2.5 of the paper)
"""
ds = []
for keyword in self.storage.iterkeys():
keynode = Node(keyword)
neighbors = self.router.findNeighbors(keynode, exclude=node)
if len(neighbors) > 0:
newNodeClose = node.distanceTo(keynode) < neighbors[-1].distanceTo(keynode)
thisNodeClosest = self.sourceNode.distanceTo(keynode) < neighbors[0].distanceTo(keynode)
if len(neighbors) == 0 \
or (newNodeClose and thisNodeClosest) \
or (thisNodeClosest and len(neighbors) < self.ksize):
for k, v in self.storage.iteritems(keyword):
ds.append(self.callStore(node, keyword, k, v))
return defer.gatherResults(ds)
def handleCallResponse(self, result, node):
"""
If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table.
"""
if result[0]:
if self.router.isNewNode(node):
self.transferKeyValues(node)
self.log.info("got response from %s, adding to router" % node)
self.router.addContact(node)
else:
self.log.debug("no response from %s, removing from router" % node)
self.router.removeContact(node)
return result
def addToRouter(self, node):
"""
Called by rpc_ functions when a node sends them a request.
We add the node to our router and transfer our stored values
if they are new and within our neighborhood.
"""
if self.router.isNewNode(node):
self.log.debug("Found a new node, transferring key/values")
self.transferKeyValues(node)
self.router.addContact(node)
def __iter__(self):
return iter(self.handled_commands)
| mit |
spencerahill/aospy-obj-lib | aospy_user/calcs/mse_budget.py | 1 | 3948 | """Functions relating to the moist static energy budget."""
from aospy.utils.vertcoord import int_dp_g
from indiff.advec import Upwind
from indiff.deriv import LatCenDeriv, LonCenDeriv
from .. import LAT_STR, LON_STR, PLEVEL_STR
from .advection import (horiz_advec, vert_advec, horiz_advec_upwind,
zonal_advec_upwind, merid_advec_upwind,
total_advec_upwind)
from .transport import (field_horiz_flux_divg, field_vert_flux_divg,
field_total_advec, field_horiz_advec_divg_sum,
field_times_horiz_divg)
from .thermo import mse
from .toa_sfc_fluxes import column_energy
def mse_merid_deriv_eta(temp, hght, sphum):
"""Meridional derivative of MSE on pressure coordinates."""
return LatCenDeriv(mse(temp, hght, sphum), LAT_STR).deriv()
def mse_zonal_deriv_eta(temp, hght, sphum):
"""Zonal derivative of MSE on pressure coordinates."""
return LonCenDeriv(mse(temp, hght, sphum), LON_STR).deriv()
def mse_horiz_flux_divg(temp, hght, sphum, u, v, radius):
"""Horizontal flux convergence of moist static energy."""
return field_horiz_flux_divg(mse(temp, hght, sphum), u, v, radius)
def mse_horiz_advec(temp, hght, sphum, u, v, radius):
"""Horizontal advection of moist static energy."""
return horiz_advec(mse(temp, hght, sphum), u, v, radius)
def mse_times_horiz_divg(temp, hght, sphum, u, v, radius, dp):
"""Horizontal divergence of moist static energy."""
return field_times_horiz_divg(mse(temp, hght, sphum), u, v, radius, dp)
def mse_horiz_advec_divg_sum(T, z, q, u, v, rad, dp):
return field_horiz_advec_divg_sum(mse(T, z, q), u, v, rad, dp)
def mse_vert_flux_divg(T, z, q, omega, p):
"""Vertical divergence times moist static energy."""
return field_vert_flux_divg(mse(T, z, q), omega, p)
def mse_vert_advec(temp, hght, sphum, omega, p):
"""Vertical advection of moist static energy."""
return vert_advec(mse(temp, hght, sphum), omega, p)
def mse_total_advec(temp, hght, sphum, u, v, omega, p, radius):
mse_ = mse(temp, hght, sphum)
return field_total_advec(mse_, u, v, omega, p, radius)
def mse_zonal_advec_upwind(temp, z, q, u, radius, order=2):
"""Zonal advection of moist static energy using upwind scheme."""
return zonal_advec_upwind(mse(temp, z, q), u, radius, order=order)
def mse_merid_advec_upwind(temp, z, q, v, radius, order=2):
"""Meridional advection of moist static energy using upwind scheme."""
return merid_advec_upwind(mse(temp, z, q), v, radius, order=order)
def mse_horiz_advec_upwind(temp, hght, sphum, u, v, radius, order=2):
"""Horizontal moist static energy advection using upwind scheme."""
return horiz_advec_upwind(mse(temp, hght, sphum), u, v, radius,
order=order)
def mse_vert_advec_upwind(temp, hght, sphum, omega, p, order=2):
"""Upwind vertical advection of moist static energy."""
# p_names = ['plev', PLEVEL_STR]
# p_str = get_dim_name(p, p_names)
# p = p.rename({p_str: PLEVEL_STR})
return Upwind(omega, mse(temp, hght, sphum), PLEVEL_STR, coord=p,
order=order, fill_edge=True).advec()
def mse_total_advec_upwind(temp, hght, sphum, u, v, omega, p, radius):
return total_advec_upwind(mse(temp, hght, sphum), u, v, omega, p, radius)
def mse_budget_advec_residual(temp, hght, sphum, ucomp, vcomp, omega,
p, dp, radius, swdn_toa, swup_toa, olr, swup_sfc,
swdn_sfc, lwup_sfc, lwdn_sfc, shflx, evap):
"""Residual in vertically integrated MSE budget."""
mse_ = mse(temp, hght, sphum)
transport = field_total_advec(mse_, ucomp, vcomp, omega, p, radius)
trans_vert_int = int_dp_g(transport, dp)
f_net = column_energy(swdn_toa, swup_toa, olr, swup_sfc, swdn_sfc,
lwup_sfc, lwdn_sfc, shflx, evap)
return f_net - trans_vert_int
| apache-2.0 |
YosubShin/morphous-cassandra | pylib/cqlshlib/test/test_cqlsh_invocation.py | 160 | 1941 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# to configure behavior, define $CQL_TEST_HOST to the destination address
# for Thrift connections, and $CQL_TEST_PORT to the associated port.
from .basecase import BaseTestCase
class TestCqlshInvocation(BaseTestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_normal_run(self):
pass
def test_python_interpreter_location(self):
pass
def test_color_capability_detection(self):
pass
def test_colored_output(self):
pass
def test_color_cmdline_option(self):
pass
def test_debug_option(self):
pass
def test_connection_args(self):
pass
def test_connection_config(self):
pass
def test_connection_envvars(self):
pass
def test_command_history(self):
pass
def test_missing_dependencies(self):
pass
def test_completekey_config(self):
pass
def test_ctrl_c(self):
pass
def test_eof(self):
pass
def test_output_encoding_detection(self):
pass
def test_output_encoding(self):
pass
def test_retries(self):
pass
| apache-2.0 |
stefpiatek/mdt-flask-app | config.py | 1 | 1447 | import os
from secret_info import POSTGRES_CONNECTION, SECRET_KEY
basedir = os.path.abspath(os.path.dirname(__file__))
date_style = {'format': '%d-%b-%Y',
'help': 'DD-MMM-YYYY'}
class Config:
WTF_CSRF_ENABLED = True
SECRET_KEY = os.environ.get('SECRET_KEY') or SECRET_KEY
SQLALCHEMY_TRACK_MODIFICATIONS = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
TESTING = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
DEBUG_TB_HOSTS = ('127.0.0.1')
SQLALCHEMY_DATABASE_URI = (os.environ.get('TEST_DATABASE_URL') or
POSTGRES_CONNECTION + 'mdt_dev')
class TestingConfig(Config):
DEBUG = False
TESTING = True
# for ease of testing, disable login, should probably create fixture
# with logged in user
LOGIN_DISABLED = True
WTF_CSRF_ENABLED = False
WTF_CSRF_METHODS = []
TEST_SERVER_PORT = 5001
SQLALCHEMY_DATABASE_URI = (os.environ.get('TEST_DATABASE_URL') or
POSTGRES_CONNECTION + 'mdt_test')
class ProductionConfig(Config):
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = (os.environ.get('TEST_DATABASE_URL') or
POSTGRES_CONNECTION + 'mdt_db')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
| mit |
chrhartm/SORN | common/sorn_stats.py | 2 | 74077 | from __future__ import division
from pylab import *
import utils
utils.backup(__file__)
from stats import AbstractStat
from stats import HistoryStat
from stats import _getvar
from common.sources import TrialSource
from utils.lstsq_reg import lstsq_reg
import cPickle as pickle
import gzip
def load_source(name,c):
try:
filename = c.logfilepath+name+".pickle"
sourcefile = gzip.open(filename,"r")
except IOError: # Cluster
filename = c.logfilepath+\
name+"_%s_%.3f.pickle"\
%(c.cluster.vary_param,\
c.cluster.current_param)
sourcefile = gzip.open(filename,"r")
source = pickle.load(sourcefile)
if isinstance(source,TrialSource):
source = source.source
return source
class CounterStat(AbstractStat):
def __init__(self):
self.name = 'num_steps'
self.collection = "reduce"
def start(self,c,obj):
c[self.name] = 0.0 # Everything needs to be a float :-/
def add(self,c,obj):
c[self.name] += 1
def report(self,c,obj):
return array(c[self.name]) # And an array :-/
# By making CounterStat a little longer we can make ClearCounterStat a
# lot shorter
class ClearCounterStat(CounterStat):
def __init__(self):
self.name = 'counter'
self.collection = "ignore"
(self.clear,self.start) = (self.start,self.clear)
class PopulationVariance(AbstractStat):
def __init__(self):
self.name = 'pop_var'
self.collection = 'reduce'
def clear(self,c,obj):
N = obj.c.N_e
c.pop_var = zeros(N+1)
def add(self,c,obj):
n = sum(obj.x)
c.pop_var[n] += 1.0
def report(self,c,obj):
return c.pop_var
class ActivityStat(AbstractStat):
"""
Gathers the state of the network at each step
If the parameter only_last is set, only the first and last steps are
collected
"""
def __init__(self):
self.name = 'activity'
self.collection = 'gather'
def clear(self,c,sorn):
if sorn.c.stats.has_key('only_last'):
c.activity = zeros(sorn.c.stats.only_last\
+sorn.c.stats.only_last)
else:
c.activity = zeros(sorn.c.N_steps)
self.step = 0
def add(self,c,sorn):
if sorn.c.stats.has_key('only_last'):
new_step = self.step - (sorn.c.N_steps\
-sorn.c.stats.only_last)
if new_step >= 0:
c.activity[new_step+sorn.c.stats.only_last] \
= sum(sorn.x)/sorn.c.N_e
elif self.step % (sorn.c.N_steps\
//sorn.c.stats.only_last) == 0:
c.activity[self.step//(sorn.c.N_steps\
//sorn.c.stats.only_last)] = sum(sorn.x)/sorn.c.N_e
else:
c.activity[self.step] = sum(sorn.x)/sorn.c.N_e
self.step += 1
def report(self,c,sorn):
return c.activity
class InputIndexStat(AbstractStat):
"""
Gathers the index of the input at each step
"""
def __init__(self):
self.name = 'InputIndex'
self.collection = 'gather'
def clear(self,c,sorn):
if sorn.c.stats.has_key('only_last'):
c.inputindex = zeros(sorn.c.stats.only_last\
+sorn.c.stats.only_last)
else:
c.inputindex = zeros(sorn.c.N_steps)
self.step = 0
def add(self,c,sorn):
if sorn.c.stats.has_key('only_last'):
new_step = self.step - (sorn.c.N_steps\
-sorn.c.stats.only_last)
if new_step >= 0:
c.inputindex[new_step+sorn.c.stats.only_last] \
= sorn.source.global_index()
elif self.step % (sorn.c.N_steps\
//sorn.c.stats.only_last) == 0:
c.inputindex[self.step//(sorn.c.N_steps\
//sorn.c.stats.only_last)] = sorn.source.global_index()
else:
c.inputindex[self.step] = sorn.source.global_index()
self.step += 1
def report(self,c,sorn):
return c.inputindex
class WordListStat(AbstractStat):
# OLD! use pickle of source instead!
def __init__(self):
self.name = 'WordList'
self.collection = 'gather'
def report(self,c,sorn):
return sorn.c.words
class InputUnitsStat(AbstractStat):
def __init__(self):
self.name = 'InputUnits'
self.collection = 'gather'
def report(self,c,sorn):
input_units = where(sum(sorn.W_eu.get_synapses(),1)>0)[0]
# to make them equal in size
tmp = array([z in input_units for z in arange(sorn.c.N_e)])
return tmp+0 # cast as double
class NormLastStat(AbstractStat):
'''
This is a helper Stat that computes the normalized last spikes
and input indices
'''
def __init__(self):
self.name = 'NormLast'
self.collection = 'gather'
def report(self,c,sorn):
steps_plastic = sorn.c.steps_plastic
steps_noplastic_train = sorn.c.steps_noplastic_train
steps_noplastic_test = sorn.c.steps_noplastic_test
plastic_train = steps_plastic+steps_noplastic_train
input_spikes = c.spikes[:,steps_plastic:plastic_train]
input_index = c.inputindex[steps_plastic:plastic_train]
# Filter out empty states
input_spikes = input_spikes[:,input_index != -1]
input_index = input_index[input_index != -1]
if sorn.c.stats.has_key('only_last'):
N_comparison = sorn.c.stats.only_last
else:
N_comparison = 2500
assert(N_comparison > 0)
assert(N_comparison <= steps_noplastic_test \
and N_comparison <= steps_noplastic_train)
maxindex = int(max(input_index))
# Only use spikes that occured at the end of learning and spont
last_input_spikes = input_spikes[:,-N_comparison:]
last_input_index = input_index[-N_comparison:]
# Get the minimal occurence of an index in the last steps
min_letter_count = inf
for i in range(maxindex+1):
tmp = sum(last_input_index == i)
if min_letter_count > tmp:
min_letter_count = tmp
# For each index, take the same number of states from the
# end phase of learning to avoid a bias in comparing states
norm_last_input_spikes = np.zeros((shape(last_input_spikes)[0],\
min_letter_count*(maxindex+1)))
norm_last_input_index = np.zeros(min_letter_count*(maxindex+1))
for i in range(maxindex+1):
indices = find(last_input_index == i)
norm_last_input_spikes[:,min_letter_count*i\
: min_letter_count*(i+1)]\
= last_input_spikes[:, indices[-min_letter_count:]]
norm_last_input_index[min_letter_count*i\
: min_letter_count*(i+1)]\
= last_input_index[indices[-min_letter_count:]]
# Shuffle to avoid argmin-problem of selecting only first match
indices = arange(shape(norm_last_input_index)[0])
shuffle(indices)
norm_last_input_index = norm_last_input_index[indices]
norm_last_input_spikes = norm_last_input_spikes[:,indices]
c.norm_last_input_index = norm_last_input_index
c.norm_last_input_spikes = norm_last_input_spikes
c.maxindex = maxindex
c.N_comparison = N_comparison
to_return = array([float(N_comparison)])
return to_return
class SpontPatternStat(AbstractStat):
"""
Computes the frequency of each pattern in the spontaneous activity
"""
def __init__(self):
self.name = 'SpontPattern'
self.collection = 'gather'
def report(self,c,sorn):
source_plastic = load_source("source_plastic",sorn.c)
steps_noplastic_test = sorn.c.steps_noplastic_test
spont_spikes = c.spikes[:,-steps_noplastic_test:]
norm_last_input_index = c.norm_last_input_index
norm_last_input_spikes = c.norm_last_input_spikes
maxindex = c.maxindex
N_comparison = c.N_comparison
last_spont_spikes = spont_spikes[:,-N_comparison:]
# Remove silent periods from spontspikes
last_spont_spikes = last_spont_spikes[:,sum(last_spont_spikes,0)>0]
N_comp_spont = shape(last_spont_spikes)[1]
# Find for each spontaneous state the evoked state with the
# smallest hamming distance and store the corresponding index
similar_input = zeros(N_comp_spont)
for i in xrange(N_comp_spont):
most_similar = argmin(sum(abs(norm_last_input_spikes.T\
-last_spont_spikes[:,i]),axis=1))
similar_input[i] = norm_last_input_index[most_similar]
# Count the number of spontaneous states for each index and plot
index = range(maxindex+1)
if self.collection == 'gatherv':
adding = 2
else:
adding = 1
pattern_freqs = zeros((2,maxindex+adding))
barcolor = []
for i in index:
pattern_freqs[0,i] = sum(similar_input==index[i])
# Compare patterns
# Forward patterns ([0,1,2,3],[4,5,6,7],...)
patterns = array([arange(len(w))+source_plastic.glob_ind[i] \
for (i,w) in enumerate(source_plastic.words)])
rev_patterns = array([x[::-1] for x in patterns])
maxlen = max([len(x) for x in patterns])
# Also get the reversed patterns
if maxlen>1: # Single letters can't be reversed
allpatterns = array(patterns.tolist()+rev_patterns.tolist())
else:
allpatterns = array(patterns.tolist())
for (i,p) in enumerate(allpatterns):
patternlen = len(p)
for j in xrange(N_comp_spont-maxlen):
if all(similar_input[j:j+patternlen] == p):
pattern_freqs[1,i] += 1
# Marker for end of freqs
if self.collection == 'gatherv':
pattern_freqs[:,-1] = -1
c.similar_input = similar_input
return(pattern_freqs)
class SpontTransitionStat(AbstractStat):
def __init__(self):
self.name = 'SpontTransition'
self.collection = 'gather'
def report(self,c,sorn):
similar_input = c.similar_input # from SpontPatternStat
maxindex = c.maxindex
transitions = np.zeros((maxindex+1,maxindex+1))
for (i_from, i_to) in zip(similar_input[:-1],similar_input[1:]):
transitions[i_to,i_from] += 1
return transitions
class SpontIndexStat(AbstractStat):
def __init__(self):
self.name = 'SpontIndex'
self.collection = 'gather'
def report (self,c,sorn):
return c.similar_input
class BayesStat(AbstractStat):
def __init__(self,pred_pos = 0):
self.name = 'Bayes'
self.collection = 'gather'
self.pred_pos = pred_pos # steps before M/N
def clear(self,c,sorn):
pass
# If raw_prediction is input to M/N neurons, this is needed
#~ self.M_neurons = where(sorn.W_eu.W[:,
#~ sorn.source.source.lookup['M']]==1)[0]
#~ self.N_neurons = where(sorn.W_eu.W[:,
#~ sorn.source.source.lookup['N']]==1)[0]
def report(self,c,sorn):
### Prepare spike train matrices for training and testing
# Separate training and test data according to steps
source_plastic = load_source("source_plastic",sorn.c)
steps_plastic = sorn.c.steps_plastic
N_train_steps = sorn.c.steps_noplastic_train
N_inputtrain_steps = steps_plastic + N_train_steps
N_test_steps = sorn.c.steps_noplastic_test
burnin = 3000
# Transpose because this is the way they are in test_bayes.py
Xtrain = c.spikes[:,steps_plastic+burnin:N_inputtrain_steps].T
Xtest = c.spikes[:,N_inputtrain_steps:].T
assert(shape(Xtest)[0] == N_test_steps)
inputi_train = c.inputindex[steps_plastic+burnin
:N_inputtrain_steps]
assert(shape(Xtrain)[0] == shape(inputi_train)[0])
inputi_test = c.inputindex[N_inputtrain_steps:]
assert(shape(inputi_test)[0]== N_test_steps)
N_fracs = len(sorn.c.frac_A)
# Filter out empty states
if isinstance(sorn.source,TrialSource): # if TrialSource
source = sorn.source.source
else:
source = sorn.source
Xtrain = Xtrain[inputi_train != -1,:]
inputi_train = inputi_train[inputi_train != -1]
Xtest = Xtest[inputi_test != -1,:]
inputi_test = inputi_test[inputi_test != -1]
# Following snipplet modified from sorn_stats spont_stat
# Get the minimal occurence of an index in the last steps
maxindex = int(max(inputi_train))
min_letter_count = inf
for i in range(maxindex+1):
tmp = sum(inputi_train == i)
if min_letter_count > tmp:
min_letter_count = tmp
# For each index, take the same number of states from the
# end phase of learning to avoid a bias in comparing states
norm_Xtrain = np.zeros((min_letter_count*(maxindex+1),
shape(Xtrain)[1]))
norm_inputi_train = np.zeros(min_letter_count*(maxindex+1))
for i in range(maxindex+1):
indices = find(inputi_train == i)
norm_Xtrain[min_letter_count*i
: min_letter_count*(i+1), :]\
= Xtrain[indices[-min_letter_count:],:]
norm_inputi_train[min_letter_count*i
: min_letter_count*(i+1)]\
= inputi_train[indices[-min_letter_count:]]
Xtrain = norm_Xtrain
inputi_train = norm_inputi_train
noinput_units = where(sum(sorn.W_eu.W,1)==0)[0]
if sorn.c.stats.bayes_noinput:
Xtrain_noinput = Xtrain[:,noinput_units]
Xtest_noinput = Xtest[:,noinput_units]
else:
Xtrain_noinput = Xtrain
Xtest_noinput = Xtest
assert(source_plastic.words[0][0]=="A" and
source_plastic.words[1][0]=="B")
A_index = source_plastic.glob_ind[0] # start of first word
B_index = source_plastic.glob_ind[1] # start of second word
# position from which to predict end of word
pred_pos = len(source_plastic.words[0])-1-self.pred_pos
assert(pred_pos>=0
and pred_pos <= source_plastic.global_range())
R = np.zeros((2,shape(inputi_train)[0]))
R[0,:] = inputi_train == A_index+pred_pos
R[1,:] = inputi_train == B_index+pred_pos
if sorn.c.stats.relevant_readout:
Xtrain_relevant = Xtrain_noinput[((inputi_train ==
A_index+pred_pos) +
(inputi_train == B_index+pred_pos))>0,:]
R_relevant = R[:,((inputi_train == A_index+pred_pos) +
(inputi_train == B_index+pred_pos))>0]
classifier = lstsq_reg(Xtrain_relevant,R_relevant.T,
sorn.c.stats.lstsq_mue)
else:
classifier = lstsq_reg(Xtrain_noinput,R.T,
sorn.c.stats.lstsq_mue)
#~ # No real difference between LogReg, BayesRidge and my thing
#~ # If you do this, comment out raw_predictions further down
#~ from sklearn import linear_model
#~ clf0 = linear_model.LogisticRegression(C=1)#BayesianRidge()
#~ clf1 = linear_model.LogisticRegression(C=1)#BayesianRidge()
#~ clf0.fit(Xtrain_noinput,R.T[:,0])
#~ clf1.fit(Xtrain_noinput,R.T[:,1])
#~ raw_predictions = vstack((clf0.predict_proba(Xtest_noinput)[:,1]
#~ ,clf1.predict_proba(Xtest_noinput)[:,1])).T
# predict
#~ raw_predictions = Xtest.dot(classifier)
#~ # comment this out if you use sklearn
raw_predictions = Xtest_noinput.dot(classifier)
#~ # Historical stuff
#~ # Raw predictions = total synaptic input to M/N neurons
#~ raw_predictions[1:,0] = sum((sorn.W_ee*Xtest[:-1].T)[
#~ self.M_neurons],0)
#~ raw_predictions[1:,1] = sum((sorn.W_ee*Xtest[:-1].T)[
#~ self.N_neurons],0)
#~ # Raw predictions = total activation of M/N neurons
#~ raw_predictions[:,0] = sum(Xtest.T[self.M_neurons],0)
#~ raw_predictions[:,1] = sum(Xtest.T[self.N_neurons],0)
#~ # for testing: sum(raw_predictions[indices,0])>indices+-1,2,3
letters_for_frac = ['B']
# Because alphabet is sorted alphabetically, this list will
# have the letters corresponding to the list frac_A
for l in source.alphabet:
if not ((l=='A') or (l=='B') or (l=='M') or (l=='N')
or (l=='X') or (l=='_')):
letters_for_frac.append(l)
letters_for_frac.append('A')
output_drive = np.zeros((N_fracs,2))
output_std = np.zeros((N_fracs,2))
decisions = np.zeros((N_fracs,2))
denom = np.zeros(N_fracs)
for (s_word,s_index) in zip(source.words,source.glob_ind):
i = ''.join(letters_for_frac).find(s_word[0])
indices = find(inputi_test==s_index+pred_pos)
# A predicted
output_drive[i,0] += mean(raw_predictions[indices,0])
# B predicted
output_drive[i,1] += mean(raw_predictions[indices,1])
decisions[i,0] += mean(raw_predictions[indices,0]>\
raw_predictions[indices,1])
decisions[i,1] += mean(raw_predictions[indices,1]>=\
raw_predictions[indices,0])
output_std[i,0] += std(raw_predictions[indices,0])
output_std[i,1] += std(raw_predictions[indices,1])
denom[i] += 1
# Some words occur more than once
output_drive[:,0] /= denom
output_drive[:,1] /= denom
output_std[:,0] /= denom
output_std[:,1] /= denom
decisions[:,0] /= denom
decisions[:,1] /= denom
# for other stats (e.g. SpontBayesStat)
c.pred_pos = pred_pos
c.Xtest = Xtest
c.raw_predictions = raw_predictions
c.inputi_test = inputi_test
c.letters_for_frac = letters_for_frac
c.classifier = classifier
c.noinput_units = noinput_units
to_return = hstack((output_drive,output_std,decisions))
return to_return
class AttractorDynamicsStat(AbstractStat):
"""
This stat tracks the distance between output gains during the
input presentation to determine whether the decision is based on
attractor dynamics
"""
def __init__(self):
self.name = 'AttractorDynamics'
self.collection = 'gather'
def report(self,c,sorn):
# Read stuff in
letters_for_frac = c.letters_for_frac
if isinstance(sorn.source,TrialSource): # if TrialSource
source = sorn.source.source
else:
source = sorn.source
word_length = min([len(x) for x in source.words])
N_words = len(source.words)
N_fracs = len(sorn.c.frac_A)
bayes_stat = None
for stat in sorn.stats.methods:
if stat.name is 'Bayes':
bayes_stat = stat
break
assert(bayes_stat is not None)
pred_pos_old = bayes_stat.pred_pos
#output_dist = np.zeros((word_length-1,N_fracs))
output_dist = np.zeros((word_length,N_fracs))
min_trials = inf
for i in range(int(max(c.inputi_test))+1):
tmp = sum(c.inputi_test == i)
if min_trials > tmp:
min_trials = tmp
decisions = np.zeros((N_words,word_length,min_trials),\
dtype=np.bool)
seq_count = np.zeros((N_words,4))
for (p,pp) in enumerate(arange(0,word_length)):
bayes_stat.pred_pos = pp
bayes_stat.report(c,sorn)
pred_pos = c.pred_pos
raw_predictions = c.raw_predictions
inputi_test = c.inputi_test
#~ summed = abs(raw_predictions[:,0])+abs(raw_predictions[:,1])
#~ summed[summed<1e-10] = 1 # if predicted 0, leave at 0
#~ raw_predictions[:,0] /= summed
#~ raw_predictions[:,1] /= summed
denom = np.zeros((N_fracs))
for (w,(s_word,s_index)) in enumerate(zip(source.words,
source.glob_ind)):
i = ''.join(letters_for_frac).find(s_word[0])
indices = find(inputi_test==s_index+pred_pos)
tmp = abs(raw_predictions[indices,0]-
raw_predictions[indices,1])
output_dist[p,i] += mean(tmp)
decisions[w,p,:] = raw_predictions[
indices[-min_trials:],0]>\
raw_predictions[indices[-min_trials:],1]
denom[i] += 1
output_dist[p,:] /= denom
for i in range(N_words):
# Full-length 1s to be expected
seq_count[i,0] = ((sum(decisions[i])/(1.*min_trials*
word_length))**(word_length))*min_trials
# Actual 1-series
seq_count[i,1] = sum(sum(decisions[i],0)==word_length)
# Same for 0-series
seq_count[i,2] = ((1-(sum(decisions[i])/(1.*min_trials*
word_length)))**(word_length))*min_trials
seq_count[i,3] = sum(sum(decisions[i],0)==0)
bayes_stat.pred_pos = pred_pos_old
bayes_stat.report(c,sorn)
return output_dist
class OutputDistStat(AbstractStat):
"""
This stat reports the distance between output gains as an indicator
for whether the decision is based on chance or on attractor dynamics
"""
def __init__(self):
self.name = 'OutputDist'
self.collection = 'gather'
def report(self,c,sorn):
# Read stuff in
letters_for_frac = c.letters_for_frac
raw_predictions = c.raw_predictions
inputi_test = c.inputi_test
pred_pos = c.pred_pos
if isinstance(sorn.source,TrialSource): # if TrialSource
source = sorn.source.source
else:
source = sorn.source
N_fracs = len(sorn.c.frac_A)
summed = abs(raw_predictions[:,0])+abs(raw_predictions[:,1])
summed[summed<1e-10] = 1 # if predicted 0, leave at 0
raw_predictions[:,0] /= summed
raw_predictions[:,1] /= summed
output_dist = np.zeros((N_fracs))
output_std = np.zeros((N_fracs))
denom = np.zeros((N_fracs))
for (s_word,s_index) in zip(source.words,source.glob_ind):
i = ''.join(letters_for_frac).find(s_word[0])
indices = find(inputi_test==s_index+pred_pos)
tmp = abs(raw_predictions[indices,0]-
raw_predictions[indices,1])
output_dist[i] += mean(tmp)
output_std[i] += std(tmp)
denom[i] += 1
output_dist /= denom
output_std /= denom
to_return = vstack((output_dist,output_std))
return to_return
class TrialBayesStat(AbstractStat):
"""
This stat looks at the interaction of spontaneous activity before
stimulus onset with the final prediction
index: int
Word index (global) for which prediction is done
"""
def __init__(self):
self.name = 'TrialBayes'
self.collection = 'gather'
def report(self,c,sorn):
# Read stuff in
STA_window = 50
pred_pos = c.pred_pos
classifier_old = c.classifier
noinput_units = c.noinput_units
steps_plastic = sorn.c.steps_plastic
N_train_steps = sorn.c.steps_noplastic_train
N_inputtrain_steps = steps_plastic + N_train_steps
N_test_steps = sorn.c.steps_noplastic_test
# Transpose because this is the way they are in test_bayes.py
# Use all neurons because we're predicting from spont activity
Xtest = c.spikes[:,N_inputtrain_steps:].T
inputi_test = c.inputindex[N_inputtrain_steps:]
N_exc = shape(Xtest)[1]
if isinstance(sorn.source,TrialSource): # if TrialSource
source = sorn.source.source
else:
raise NotImplementedError
# select middle word
index = source.glob_ind[1+(shape(source.glob_ind)[0]-3)//2]
forward_pred = sorn.c.stats.forward_pred
start_indices = find(inputi_test==index)
# * is element-wise AND
start_indices = start_indices[(start_indices>STA_window) *
((start_indices+pred_pos+forward_pred)<shape(inputi_test)[0])]
N_samples = shape(start_indices)[0]
pred_indices = find(inputi_test==(index+pred_pos))
pred_indices = pred_indices[(pred_indices>=start_indices[0])*
((pred_indices+forward_pred)<shape(inputi_test)[0])]
assert(N_samples == shape(pred_indices)[0])
if sorn.c.stats.bayes_noinput:
raw_predictions = Xtest[:,noinput_units].dot(classifier_old)
else:
raw_predictions = Xtest.dot(classifier_old)
predictions = raw_predictions[pred_indices,:]
# Two different baselines
#~ test_base = ones((shape(Xtest)[0],1))
test_base = Xtest.copy()
shuffle(test_base) # without shuffle, identical predictions
test_base = hstack((test_base,ones((shape(Xtest)[0],1))))
# Add bias term to exclude effects of varability
N_exc += 1
Xtest = hstack((Xtest,ones((shape(Xtest)[0],1))))
# Divide into train and test set
predictions_train = predictions[:N_samples//2]
predictions_test = predictions[N_samples//2:]
train_A = predictions_train[:,0]>predictions_train[:,1]
train_B = train_A==False
train_A = find(train_A==True)
train_B = find(train_B==True)
# This case is filtered out during plotting
if not(shape(train_A)[0]>0 and shape(train_B)[0]>0):
return np.ones((2,STA_window))*-1
agreement_lstsq = np.zeros(STA_window)
agreement_base = np.zeros(STA_window)
# This maps 0/1 spikes to -1/1 spikes for later * comparison
predtrain_lstsq = (predictions_train[:,0]>\
predictions_train[:,1])*2-1
predtest_lstsq = (predictions_test[:,0]>\
predictions_test[:,1])*2-1
# Prediction with spontaneous activity
for i in range(-STA_window,0):
classifier_lstsq = lstsq_reg(Xtest[\
start_indices[:N_samples//2]+i+forward_pred,:],\
predtrain_lstsq,sorn.c.stats.lstsq_mue)
predictions_lstsq = (Xtest[start_indices[N_samples//2:]+i\
+forward_pred,:]).dot(classifier_lstsq)
# this is where the -1/1 comes in
agreement_lstsq[i] = sum((predictions_lstsq*predtest_lstsq)\
>0)/(1.*N_samples//2)
# Baseline prediction (loop is unnecessary and for similarity)
for i in range(-STA_window,0):
classifier_base = lstsq_reg(test_base[\
start_indices[:N_samples//2]+i+forward_pred,:],\
predtrain_lstsq,sorn.c.stats.lstsq_mue)
predictions_base = (test_base[start_indices[N_samples//2:]+i\
+forward_pred,:]).dot(classifier_base)
agreement_base[i] = sum((predictions_base*predtest_lstsq)\
>0)/(1.*N_samples//2)
# STA - not used
trials = np.zeros((N_samples,STA_window,N_exc))
for i in range(N_samples):
trials[i,:,:] = Xtest[start_indices[i]-STA_window\
+forward_pred:start_indices[i]+forward_pred,:]
STA_A = mean(trials[train_A,:,:],0)
STA_B = mean(trials[train_B,:,:],0)
N_test = N_samples-N_samples//2
overlap_A = np.zeros((N_test,STA_window,N_exc))
overlap_B = np.zeros((N_test,STA_window,N_exc))
for i in range(N_samples//2,N_samples):
overlap_A[i-N_samples//2] = trials[i]*STA_A
overlap_B[i-N_samples//2] = trials[i]*STA_B
agreement = np.zeros(STA_window)
pred_gain_A = predictions_test[:,0]>predictions_test[:,1]
for i in range(STA_window):
pred_STA_A = sum(overlap_A[:,i,:],1)>sum(overlap_B[:,i,:],1)
agreement[i] = sum(pred_gain_A == pred_STA_A)
agreement /= float(shape(pred_gain_A)[0])
return vstack((agreement_base, agreement_lstsq))
class SpontBayesStat(AbstractStat):
def __init__(self):
self.name = 'SpontBayes'
self.collection = 'gather'
def report(self,c,sorn):
# Read stuff in
pred_pos = c.pred_pos
inputi_test = c.inputi_test
raw_predictions = c.raw_predictions
Xtest = c.Xtest
# Filter out empty states
if isinstance(sorn.source,TrialSource): # if TrialSource
source = sorn.source.source
else:
source = sorn.source
Xtest = Xtest[inputi_test != -1,:]
inputi_test = inputi_test[inputi_test != -1]
letters_for_frac = c.letters_for_frac
# Results will first be saved in dict for simplicity and later
# subsampled to an array
cue_act = {}
pred_gain = {}
minlen = inf
for (s_word,s_index) in zip(source.words,source.glob_ind):
i = ''.join(letters_for_frac).find(s_word[0])
# Indices that point to the presentation of the cue relative
# to the readout
cue_indices = find(inputi_test==s_index)
pred_indices = cue_indices+pred_pos
pred_indices = pred_indices[pred_indices
<shape(inputi_test)[0]]
# Get x-states at cue_indices and figure out the number of
# active input units for A and B
tmp_cue = Xtest[cue_indices]
tmp_cue = vstack((
sum(tmp_cue[:,1==sorn.W_eu.W[:,
source.lookup['A']]],1),
sum(tmp_cue[:,1==sorn.W_eu.W[:,
source.lookup['B']]],1))).T
tmp_gain = raw_predictions[pred_indices,:]
if cue_act.has_key(i):
cue_act[i] = np.append(cue_act[i],tmp_cue,axis=0)
pred_gain[i] = np.append(pred_gain[i],tmp_gain,axis=0)
else:
cue_act[i] = tmp_cue
pred_gain[i] = tmp_gain
if shape(cue_act[i])[0]<minlen:
minlen = shape(cue_act[i])[0]
# TODO super ugly - try to make prettier
minlen = 18 # hack for cluster - otherwise variable minlen
# subsample to make suitable for array
n_conditions = max(cue_act.keys())+1
to_return = np.zeros((n_conditions,minlen,4))
for i in range(n_conditions):
to_return[i,:,:2] = cue_act[i][-minlen:]
to_return[i,:,2:] = pred_gain[i][-minlen:]
return to_return
class EvokedPredStat(AbstractStat):
"""
This stat predicts evoked activity from spontaneous activity
traintimes is an interval of training data
testtimes is an interval of testing data
"""
def __init__(self,traintimes,testtimes,traintest):
self.name = 'EvokedPred'
self.collection = 'gather'
self.traintimes = traintimes
self.testtimes = testtimes
self.traintest = traintest
def report(self,c,sorn):
# Read data
traintimes = self.traintimes
testtimes = self.testtimes
Xtrain = c.spikes[:,traintimes[0]:traintimes[1]].T
Xtest = c.spikes[:,testtimes[0]:testtimes[1]].T
inputi_train = c.inputindex[traintimes[0]:traintimes[1]]
inputi_test = c.inputindex[testtimes[0]:testtimes[1]]
# Determine word length
source = load_source("source_%s"%self.traintest,sorn.c)
N_words = len(source.words)
max_word_length = int(max([len(x) for x in source.words]))
max_spont_length = int(sorn.c['wait_min_%s'%self.traintest]
+sorn.c['wait_var_%s'%self.traintest])
pred_window = max_word_length + max_spont_length+max_word_length
correlations = zeros((N_words,pred_window,2))
import scipy.stats as stats
# Convert 0/1 spike trains to -1/1 spike trains if needed
if sorn.c.stats.match:
Xtrain *= 2
Xtrain -= 1
Xtest *= 2
Xtest -= 1
word_length = 0
for (w,word) in enumerate(source.words):
word_starts_train = find(inputi_train==(word_length))
word_starts_train = word_starts_train[(word_starts_train>0)\
*(word_starts_train<(shape(Xtrain)[0]-pred_window))]
word_starts_test = find(inputi_test==(word_length))
word_starts_test = word_starts_test[word_starts_test<\
(shape(Xtest)[0]-pred_window)]
bias_train = ones((shape(word_starts_train)[0],1))
bias_test = ones((shape(word_starts_test)[0],1))
base_train = Xtrain[word_starts_train-1,:].copy()
base_test = Xtest[word_starts_test-1,:].copy()
shuffle(base_train)
shuffle(base_test)
base_train = hstack((bias_train,base_train))
base_test = hstack((bias_test,base_test))
sp_train = hstack((bias_train,Xtrain[word_starts_train-1,:]))
sp_test = hstack((bias_test,Xtest[word_starts_test-1,:]))
#~ sp_train = bias_train <-- this is a STA!
#~ sp_test = bias_test
for t in range(pred_window):
# First do a least-squares fit
Xt_train = Xtrain[word_starts_train+t,:]
Xt_test = Xtest[word_starts_test+t,:]
# regularize with mue to avoid problems when #samples <
# #neurons
classifier = lstsq_reg(sp_train,Xt_train,
sorn.c.stats.lstsq_mue)
classifier_base = lstsq_reg(base_train,Xt_train,
sorn.c.stats.lstsq_mue)
Xt_pred = sp_test.dot(classifier)
base_pred = base_test.dot(classifier)
# Baseline = STA
#~ base = mean(Xt_train,0)
#~ base_pred = array([base,]*shape(Xt_test)[0])
# Don't use this because the paper uses correlation
# Don't use this because of lower bound for zeros
# instead of pearsonr - lower bound = 1-h.ip
# -> spont pred always better
def match(x,y):
assert(shape(x) == shape(y))
x = x>0
y = y>0
return sum(x==y)/(1.0*shape(x)[0])
if not sorn.c.stats.match:
correlations[w,t,0] = stats.pearsonr(
Xt_pred.flatten(),Xt_test.flatten())[0]
correlations[w,t,1] = stats.pearsonr(
base_pred.flatten(),Xt_test.flatten())[0]
else:
correlations[w,t,0] = match(Xt_pred.flatten(),
Xt_test.flatten())
correlations[w,t,1] = match(base_pred.flatten(),
Xt_test.flatten())
word_length += len(word)
# Correlations are sorted like the words:
# A B C D E ... B = 0*A C = 0.1*A, D=0.2*A ...
return correlations
class SpikesStat(AbstractStat):
def __init__(self,inhibitory = False):
if inhibitory:
self.name = 'SpikesInh'
self.sattr = 'spikes_inh'
else:
self.name = 'Spikes'
self.sattr = 'spikes'
self.collection = 'gather'
self.inh = inhibitory
def clear(self,c,sorn):
if self.inh:
self.neurons = sorn.c.N_i
else:
self.neurons = sorn.c.N_e
if sorn.c.stats.has_key('only_last'):
steps = sorn.c.stats.only_last+sorn.c.stats.only_last
c[self.sattr] = zeros((self.neurons,steps))
else:
c[self.sattr] = zeros((self.neurons,sorn.c.N_steps))
self.step = 0
def add(self,c,sorn):
if self.inh:
spikes = sorn.y
else:
spikes = sorn.x
if sorn.c.stats.has_key('only_last'):
new_step = self.step - (sorn.c.N_steps\
-sorn.c.stats.only_last)
if new_step >= 0:
c[self.sattr][:,new_step+sorn.c.stats.only_last] \
= spikes
elif self.step % (sorn.c.N_steps\
//sorn.c.stats.only_last) == 0:
c[self.sattr][:,self.step//(sorn.c.N_steps\
//sorn.c.stats.only_last)] = spikes
else:
c[self.sattr][:,self.step] = spikes
self.step += 1
def report(self,c,sorn):
if sorn.c.stats.save_spikes:
return c[self.sattr]
else:
return zeros(0)
class CondProbStat(AbstractStat):
def __init__(self):
self.name='CondProb'
self.collection='gather'
def clear(self,c,sorn):
pass
def add(self,c,sorn):
pass
def report(self,c,sorn):
# return a marix with M_ij = frequency of a spike in i following
# a spike in j
# Look at test instead of training to get more diverse data
steps = sorn.c.steps_noplastic_test
spikes = c.spikes[:,-steps:]
N = shape(spikes)[0] # number of neurons
condspikes = np.zeros((N,N))
for t in xrange(1,steps):
condspikes[spikes[:,t]==1,:] += spikes[:,t-1]
spike_sum = sum(spikes,1)
for i in xrange(N):
condspikes[i,:] /= spike_sum
return condspikes
class BalancedStat(AbstractStat):
"""
This stat records the excitatory and inhibitory input and thresholds
to determine how balanced the network operates
"""
def __init__(self):
self.name='Balanced'
self.collection='gather'
def clear(self,c,sorn):
c.balanced = zeros((sorn.c.N_e*3,sorn.c.N_steps))
self.step = 0
self.N_e = sorn.c.N_e
def add(self,c,sorn):
c.balanced[:self.N_e,self.step] = sorn.W_ee*sorn.x
c.balanced[self.N_e:2*self.N_e,self.step] = sorn.W_ei*sorn.y
c.balanced[2*self.N_e:,self.step] = sorn.T_e
self.step += 1
def report(self,c,sorn):
return c.balanced
class RateStat(AbstractStat):
"""
This stat returns a matrix of firing rates of each presynaptic
neuron
"""
def __init__(self):
self.name = 'Rate'
self.collection='gather'
def clear(self,c,sorn):
pass
def add(self,c,sorn):
pass
def report(self,c,sorn):
# same interval as for condprob
steps = sorn.c.steps_noplastic_test
spikes = c.spikes[:,-steps:]
N = shape(spikes)[0] # number of neurons
rates = mean(spikes,1)
return array([rates,]*N)
class InputStat(AbstractStat):
def __init__(self):
self.name = 'Input'
self.collection = 'gather'
def clear(self,c,sorn):
c.inputs = zeros((sorn.c.N_e,sorn.c.N_steps))
self.step = 0
def add(self,c,sorn):
c.inputs[:,self.step] = sorn.W_eu*sorn.u
self.step += 1
def report(self,c,sorn):
return c.inputs
class FullEndWeightStat(AbstractStat):
def __init__(self):
self.name = 'FullEndWeight'
self.collection = 'gather'
def clear(self,c,sorn):
pass
def add(self,c,sorn):
pass
def report(self,c,sorn):
tmp1 = np.vstack((sorn.W_ee.get_synapses(),\
sorn.W_ie.get_synapses()))
tmp2 = np.vstack((sorn.W_ei.get_synapses(),\
np.zeros((sorn.c.N_i,sorn.c.N_i))))
return np.array(hstack((tmp1,tmp2)))
class EndWeightStat(AbstractStat):
def __init__(self):
self.name = 'endweight'
self.collection = 'gather'
def clear(self,c,sorn):
pass
def add(self,c,sorn):
pass
def report(self,c,sorn):
if sorn.c.W_ee.use_sparse:
return np.array(sorn.W_ee.W.todense())
else:
return sorn.W_ee.W*(sorn.W_ee.M==1)
class ISIsStat(AbstractStat):
def __init__(self,interval=[]):
self.name = 'ISIs'
self.collection = 'gather'
self.interval = interval
def clear(self,c,sorn):
self.mask = sum(sorn.W_eu.get_synapses(),1)==0
self.N_noinput = sum(self.mask)
self.ISIs = zeros((self.N_noinput,100))
self.isis = zeros(self.N_noinput)
self.step = 0
if self.interval == []:
self.interval = [0,sorn.c.N_steps]
def add(self,c,sorn):
if ((self.step > self.interval[0] and
self.step < self.interval[1]) and
((not sorn.c.stats.has_key('only_last')) \
or (self.step > sorn.c.stats.only_last))):
spikes = sorn.x[self.mask]
self.isis[spikes==0] += 1
isis_tmp = self.isis[spikes==1]
isis_tmp = isis_tmp[isis_tmp<100]
tmp = zip(where(spikes==1)[0],isis_tmp.astype(int))
for pair in tmp:
self.ISIs[pair] += 1
self.isis[spikes==1] = 0
self.step += 1
def report(self,c,sorn):
return self.ISIs
class SynapseFractionStat(AbstractStat):
def __init__(self):
self.name = 'SynapseFraction'
self.collection = 'reduce'
def report(self,c,sorn):
if sorn.c.W_ee.use_sparse:
return array(sum((sorn.W_ee.W.data>0)+0.0)\
/(sorn.c.N_e*sorn.c.N_e))
else:
return array(sum(sorn.W_ee.M)/(sorn.c.N_e*sorn.c.N_e))
class ConnectionFractionStat(AbstractStat):
def __init__(self):
self.name = 'ConnectionFraction'
self.collection = 'gather'
def clear(self,c,sorn):
self.step = 0
if sorn.c.stats.has_key('only_last'):
self.cf = zeros(sorn.c.stats.only_last\
+sorn.c.stats.only_last)
else:
self.cf = zeros(sorn.c.N_steps)
def add(self,c,sorn):
if sorn.c.stats.has_key('only_last'):
new_step = self.step \
- (sorn.c.N_steps-sorn.c.stats.only_last)
if new_step >= 0:
if sorn.c.W_ee.use_sparse:
self.cf[new_step+sorn.c.stats.only_last] = sum(\
(sorn.W_ee.W.data>0)+0)/(sorn.c.N_e*sorn.c.N_e)
else:
self.cf[new_step+sorn.c.stats.only_last] = sum(\
sorn.W_ee.M)/(sorn.c.N_e*sorn.c.N_e)
elif self.step%(sorn.c.N_steps\
//sorn.c.stats.only_last) == 0:
if sorn.c.W_ee.use_sparse:
self.cf[self.step//(sorn.c.N_steps\
//sorn.c.stats.only_last)] = sum(\
(sorn.W_ee.W.data>0)+0)/(sorn.c.N_e*sorn.c.N_e)
else:
self.cf[self.step//(sorn.c.N_steps\
//sorn.c.stats.only_last)] = sum(\
sorn.W_ee.M)/(sorn.c.N_e*sorn.c.N_e)
else:
if sorn.c.W_ee.use_sparse:
self.cf[self.step] = sum((sorn.W_ee.W.data>0)+0)\
/(sorn.c.N_e*sorn.c.N_e)
else:
self.cf[self.step] = sum(sorn.W_ee.M)\
/(sorn.c.N_e*sorn.c.N_e)
self.step += 1
def report(self,c,sorn):
return self.cf
class WeightLifetimeStat(AbstractStat):
def __init__(self):
self.name = 'WeightLifetime'
self.collection = 'gather'
def clear(self,c,sorn):
if sorn.c.W_ee.use_sparse:
self.last_M_ee = np.array(sorn.W_ee.W.todense())>0
else:
self.last_M_ee = sorn.W_ee.M.copy()
self.lifetimes = zeros((sorn.c.N_e,sorn.c.N_e))
self.diedat = np.zeros((1,0))
def add(self,c,sorn):
if sorn.c.W_ee.use_sparse:
new_M_ee = np.array(sorn.W_ee.W.todense())>0
else:
new_M_ee = sorn.W_ee.M
self.diedat = append(self.diedat, \
self.lifetimes[(new_M_ee+0-self.last_M_ee+0)==-1])
# remove dead synapses
self.lifetimes *= new_M_ee+0
#increase lifetime of existing ones
self.lifetimes += (self.lifetimes>0)+0
#add new ones
self.lifetimes += ((new_M_ee+0-self.last_M_ee+0)==1)+0
self.last_M_ee = new_M_ee.copy()
def report(self,c,sorn):
padding = (-1)*np.ones(2*sorn.c.N_steps\
+shape(self.last_M_ee)[0]**2-self.diedat.size)
return np.append(self.diedat,padding)
class WeightChangeStat(AbstractStat):
def __init__(self):
self.name = 'WeightChange'
self.collection = 'gather'
def clear(self,c,sorn):
self.step = 0
self.start = 2999
self.end = 5999
self.save_W_ee = []
self.abschange = []
self.relchange = []
self.weights = []
def add(self,c,sorn):
if(self.step == self.start):
if sorn.c.W_ee.use_sparse:
self.save_W_ee = np.array(sorn.W_ee.W.todense())
else:
self.save_W_ee = sorn.W_ee.W.copy()
if(self.step == self.end):
if sorn.c.W_ee.use_sparse:
diff = np.array(sorn.W_ee.W.todense())-self.save_W_ee
else:
diff = sorn.W_ee.W-self.save_W_ee
self.weights = self.save_W_ee[diff!=0]
self.abschange = (diff[diff!=0])
seterr(divide='ignore')
# Some weights become 0 and thereby elicit division by 0
# and try except RuntimeWarning didn't work
self.relchange = self.abschange/self.weights*100
seterr(divide='warn')
# append zeros to always have the same size
tmp_zeros = np.zeros(shape(self.save_W_ee)[0]**2\
-self.weights.size)
self.weights = np.append(self.weights,tmp_zeros)
self.abschange = np.append(self.abschange,tmp_zeros)
self.relchange = np.append(self.relchange,tmp_zeros)
self.step += 1
def report(self,c,sorn):
stacked = np.vstack((self.weights, self.abschange,\
self.relchange))
return stacked
class WeightChangeRumpelStat(AbstractStat):
def __init__(self):
self.name = 'WeightChangeRumpel'
self.collection = 'gather'
def clear(self,c,sorn):
self.step = 0
self.interval = 0
self.start = 50001
self.started = False
self.imaging_interval = 50000
self.N_intervals = (sorn.c.N_steps-self.start)\
//self.imaging_interval+1
self.save_W_ees = np.zeros((self.N_intervals,sorn.c.N_e,\
sorn.c.N_e))
self.constant_weights = []
self.abschange = []
self.relchange = []
self.weights = []
def add(self,c,sorn):
if(self.step%self.imaging_interval == 0 and self.started):
self.save_W_ees[self.interval,:,:] \
= sorn.W_ee.get_synapses()
self.constant_weights *= (self.save_W_ees[self.interval,\
:,:]>0)
self.interval += 1
if(self.step == self.start):
self.save_W_ees[self.interval,:,:] \
= sorn.W_ee.get_synapses()
self.constant_weights \
= (self.save_W_ees[self.interval,:,:].copy()>0)
self.interval = 1
self.started = True
self.step += 1
def report(self,c,sorn):
# compute diffs and multiply with const
import pdb
pdb.set_trace()
diffs = self.save_W_ees[1:,:,:] - self.save_W_ees[:-1,:,:]
diffs *= self.constant_weights
self.abschange = (diffs[diffs!=0])
self.weights = self.save_W_ees[:-1,:,:][diffs!=0]
self.relchange = self.abschange/self.weights*100
# append zeros to always have the same size
tmp_zeros = np.zeros((self.N_intervals-1)\
*shape(self.save_W_ees)[1]**2-self.weights.size)
self.weights = np.append(self.weights,tmp_zeros)
self.abschange = np.append(self.abschange,tmp_zeros)
self.relchange = np.append(self.relchange,tmp_zeros)
stacked = np.vstack((self.weights, self.abschange,\
self.relchange))
return stacked
class SmallWorldStat(AbstractStat):
def __init__(self):
self.name = 'smallworld'
self.collection = 'gather'
def clear(self,c,sorn):
pass
def add(self,c,sorn):
pass
def report(self,c,sorn):
if sorn.c.stats.rand_networks <= 0:
return np.array([])
if sorn.c.W_ee.use_sparse:
weights = np.array(sorn.W_ee.W.todense())
else:
weights = sorn.W_ee.W*(sorn.W_ee.M==1)
tmp = weights>0.0+0.0
binary_connections = tmp+0.0
def all_pairs_shortest_path(graph_matrix):
# adapted Floyd-Warshall Algorithm
N = shape(graph_matrix)[0]
distances = graph_matrix.copy()
#Set missing connections to max length
distances[distances==0] += N*N
for k in range(N):
for i in range(N):
for j in range(N):
if i==j:
distances[i,j] = 0
else:
distances[i,j] = min(distances[i,j],
distances[i,k]
+distances[k,j])
return distances
def characteristic_path_length(graph_matrix):
N = shape(graph_matrix)[0]
distances = all_pairs_shortest_path(graph_matrix.T)
if any(distances == N*N):
print 'Disconnected elements in char. path len calc.'
# ignore disconnected elements
distances[distances==N*N] = 0
average_length = sum(distances[distances>0]*1.0)\
/sum(graph_matrix[distances>0]*1.0)
return average_length
def cluster_coefficient(graph_matrix):
# From Fagiolo, 2007 and Gerhard, 2011
N = shape(graph_matrix)[0]
in_degree = sum(graph_matrix,1)
out_degree = sum(graph_matrix,0)
k = in_degree+out_degree
A = graph_matrix
A_T = A.transpose()
A_A_T = A + A_T
A_2 = np.dot(A,A)
nominator = np.dot(A_A_T,np.dot(A_A_T,A_A_T))
single_coeff = np.zeros(N)
for i in range(N):
single_coeff[i] = nominator[i,i]/(2.0*(k[i]*(k[i]-1)\
-2.0*(A_2[i,i])))
if(np.isnan(single_coeff[i])):
# if total degree <= 1, the formula divides by 0
single_coeff[i] = 0
return 1.0*sum(single_coeff)/(N*1.0)
L = characteristic_path_length(binary_connections)
C = cluster_coefficient(binary_connections)
# Average over some random networks
N = shape(binary_connections)[0]
edge_density = sum(binary_connections)/(1.0*N*N-N)
num_rand = sorn.c.stats.rand_networks
L_rand = np.zeros(num_rand)
C_rand = np.zeros(num_rand)
delete_diagonal = np.ones((N,N))
for i in range(N):
delete_diagonal[i,i] = 0
for i in range(num_rand):
sys.stdout.write('\rRand Graph No.%3i of %3i'%(i+1,\
num_rand))
sys.stdout.flush()
tmp = np.random.rand(N,N)<edge_density
rand_graph = tmp*delete_diagonal
L_rand[i] = characteristic_path_length(rand_graph)
C_rand[i] = cluster_coefficient(rand_graph)
sys.stdout.write('\rAll %i Graphs Done '%num_rand)
sys.stdout.flush()
L_r = sum(L_rand)*1.0/(num_rand*1.0)
C_r = sum(C_rand)*1.0/(num_rand*1.0)
gamma = C/C_r
lam = L/L_r
S_w = gamma/lam
return np.array([gamma, lam, S_w])
class ParamTrackerStat(AbstractStat):
def __init__(self):
self.name = 'paramtracker'
self.collection = 'gather'
def clear(self,c,sorn):
pass
def add(self,c,sorn):
pass
def report(self,c,sorn):
tmp = sorn.c
for item in sorn.c.cluster.vary_param.split('.'):
tmp = tmp[item]
return np.array([tmp*1.0])
class InputWeightStat(AbstractStat):
def __init__(self):
self.name = 'InputWeight'
self.collection = 'gather'
def clear(self,c,sorn):
self.step = 0
self.weights = np.zeros((sorn.c.N_e,sorn.c.N_u_e,\
sorn.c.stats.only_last*2))
def add(self,c,sorn):
if self.step % (sorn.c.N_steps//sorn.c.stats.only_last) == 0:
self.weights[:,:,self.step//(sorn.c.N_steps\
//sorn.c.stats.only_last)] = sorn.W_eu.get_synapses()
self.step += 1
def report(self,c,sorn):
return self.weights
class SVDStat(AbstractStat):
def __init__(self,nth = 200):
self.name = 'SVD'
self.collection = 'gather'
self.nth = nth
def clear(self,c,sorn):
self.step = 0
# Quick hack - there must be a prettier solution
if sorn.c.steps_plastic % self.nth == 0:
add1 = 0
else:
add1 = 1
c.SVD_singulars = np.zeros((sorn.c.steps_plastic//self.nth+add1
,sorn.c.N_e))
c.SVD_U = np.zeros((sorn.c.steps_plastic//self.nth+add1,
sorn.c.N_e,sorn.c.N_e))
c.SVD_V = np.zeros((sorn.c.steps_plastic//self.nth+add1,
sorn.c.N_e,sorn.c.N_e))
def add(self,c,sorn):
if self.step < sorn.c.steps_plastic and self.step%self.nth == 0:
# Time intensive!
synapses = sorn.W_ee.get_synapses()
U,s,V = linalg.svd(synapses)
c.SVD_singulars[self.step//self.nth,:] = s
step = self.step//self.nth
c.SVD_U[step] = U
# this returns the real V
# see http://docs.scipy.org/doc/numpy/reference/generated/numpy.linalg.svd.html
c.SVD_V[step] = V.T
# Resolve sign ambiguity
# from http://www.models.life.ku.dk/signflipsvd
# http://prod.sandia.gov/techlib/access-control.cgi/2007/076422.pdf
for i in range(sorn.c.N_e):
tmp = synapses.T.dot(c.SVD_U[step,:,i])
tmp = np.squeeze(asarray(tmp))
s_left = sum(sign(tmp)*tmp**2)
tmp = synapses.T.dot(c.SVD_V[step,:,i])
tmp = np.squeeze(asarray(tmp))
s_right = sum(sign(tmp)*tmp**2)
if s_right*s_left < 0:
if s_left < s_right:
s_left = -s_left
else:
s_right = -s_right
c.SVD_U[step,:,i] *= sign(s_left)
c.SVD_V[step,:,i] *= sign(s_right)
self.step += 1
def report(self,c,sorn):
#~ figure() # combine same submatrices!
#~ imshow(c.SVD_U[-1][:,0].dot(c.SVD_V[-1][:,0].T)\
#~ *c.SVD_singulars[-1,0], interpolation='none')
return c.SVD_singulars
class SVDStat_U(AbstractStat):
def __init__(self):
self.name = 'SVD_U'
self.collection = 'gather'
def report(self,c,sorn):
rec_steps = shape(c.SVD_U)[0]
similar_input = zeros((rec_steps,sorn.c.N_e))
N_indices = max(c.norm_last_input_index)+1
indices = [where(c.norm_last_input_index==i)[0] for i in
range(int(N_indices))]
for s in xrange(rec_steps):
for i in xrange(sorn.c.N_e):
# U transforms back to "spike space"
# Check for best similarities
# Convolution works best:
#~ overlaps = c.norm_last_input_spikes.T.dot(
#~ c.SVD_U[s,:,i])
#~ index_overlap = np.zeros(N_indices)
#~ for j in range(int(N_indices)):
#~ index_overlap[j] = mean(overlaps[indices[j]])
#~ similar_input[s,i] = argmax(index_overlap)
# No big difference to this, but probably more robust
max_overlap = argmax(c.norm_last_input_spikes.T.dot(
c.SVD_U[s,:,i]))
similar_input[s,i] = c.norm_last_input_index[
max_overlap]
c.SVD_U_sim = similar_input # for debugging
return similar_input
class SVDStat_V(AbstractStat):
def __init__(self):
self.name = 'SVD_V'
self.collection = 'gather'
def report(self,c,sorn):
rec_steps = shape(c.SVD_V)[0]
similar_input = zeros((rec_steps,sorn.c.N_e))
N_indices = max(c.norm_last_input_index)+1
indices = [where(c.norm_last_input_index==i)[0] for i in
range(int(N_indices))]
for s in xrange(rec_steps):
for i in xrange(sorn.c.N_e):
# V transforms input by taking product
# Do same here and look which spike vector works best
#~ overlaps = c.norm_last_input_spikes.T.dot(
#~ c.SVD_V[s,:,i])
#~ index_overlap = np.zeros(N_indices)
#~ for j in range(int(N_indices)):
#~ index_overlap[j] = mean(overlaps[indices[j]])
#~ similar_input[s,i] = argmax(index_overlap)
# No big difference to this, but probably more robust
max_overlap = argmax(c.norm_last_input_spikes.T.dot(
c.SVD_V[s,:,i])) # euclidean norm w/o sqrt
similar_input[s,i] = c.norm_last_input_index[
max_overlap]
'''
# For testing purposes command line
!i = 30
!similar_input[:,i]
!c.SVD_U_sim[:,i]
!figure()
!plot(c.SVD_V[-1,:,i])
!max_overlap = argmax(c.norm_last_input_spikes.T.dot(c.SVD_V[s,:,i]))
!plot(c.norm_last_input_spikes[:,max_overlap])
!figure()
!plot(c.SVD_U[-1,:,i])
!max_overlap = argmax(c.norm_last_input_spikes.T.dot(c.SVD_U[s,:,i]))
!plot(c.norm_last_input_spikes[:,max_overlap])
!show()
'''
return similar_input
class MeanActivityStat(AbstractStat):
"""
This stat returns the mean activity for each inputindex
"""
def __init__(self,start,stop,N_indices,LFP=False):
self._start = start
self._stop = stop
self._N_indices = N_indices
self.name = 'meanactivity'
self.collection = 'gather'
self.LFP = LFP
self.tmp = -1
def clear(self,c,sorn):
self.means = zeros(self._N_indices)
self.counter = zeros(self._N_indices)
self.step = 0
self.index = None
def add(self,c,sorn):
if self.step > self._start and self.step < self._stop\
and self.step>0:
# for proper assignment, blank(-1)->0, 0->1...
self.index = sorn.source.global_index()+1
if self.index is not None:
if self.tmp >= 0:
self.counter[self.index] += 1.
if self.LFP:
# save input at current step, but can only compute
# input for next step!
if self.tmp >= 0:
self.means[self.index] += self.tmp+sum(sorn.W_eu
*sorn.u)
self.tmp = sum(sorn.W_ee*sorn.x)
else:
if self.tmp >= 0:
self.means[self.index] += sum(sorn.x)
self.tmp = 0 # dummy value never used
#~ # +1 due to -1 for blank trials
#~ self.index = sorn.source.global_index()+1
self.step += 1
def report(self,c,sorn):
return self.means/self.counter
class MeanPatternStat(AbstractStat):
"""
This stat returns the mean activity for each inputindex
"""
def __init__(self,start,stop,N_indices):
self._start = start
self._stop = stop
self._N_indices = N_indices
self.name = 'meanpattern'
self.collection = 'gather'
def clear(self,c,sorn):
self.means = zeros((self._N_indices,sorn.c.N_e))
self.counter = zeros(self._N_indices)
self.step = 0
self.index = None
def add(self,c,sorn):
if self.step > self._start and self.step < self._stop\
and self.step>0:
# for proper assignment, blank(-1)->0, 0->1...
self.index = sorn.source.global_index()+1
if self.index is not None:
self.counter[self.index] += 1.
self.means[self.index] += sorn.x
self.step += 1
def report(self,c,sorn):
return self.means/self.counter[:,None]
class PatternProbabilityStat(AbstractStat):
"""
This stat estimates the probability distribution of patterns
for different time intervals
Intervals: List of 2-entry lists
[[start1,stop1],...,[startn,stopn]]
zero_correction: Bool
Correct estimates by adding one observation to each pattern
subset: 1-D array
List of neuron indices that create the pattern
"""
def __init__(self,intervals,subset,zero_correction=True):
self.N_intervals = len(intervals)
self.intervals = intervals
self.zero_correction = zero_correction
self.N_nodes = len(subset)
self.subset = subset
self.name = 'patternprobability'
self.collection = 'gather'
self.conversion_array = [2**x for x in range(self.N_nodes)][::-1]
def convert(x):
return np.dot(x,self.conversion_array)
self.convert = convert
def clear(self,c,sorn):
self.patterns = zeros((self.N_intervals,2**self.N_nodes))
self.step = 0
def add(self,c,sorn):
for (i,(start,stop)) in enumerate(self.intervals):
if self.step > start and self.step < stop:
# Convert spiking pattern to integer by taking the
# pattern as a binary number
self.patterns[i,self.convert(sorn.x[self.subset])] += 1
self.step += 1
def report(self,c,sorn):
if self.zero_correction:
self.patterns += 1
# Normalize to probabilities
self.patterns /= self.patterns.sum(1)[:,None]
return self.patterns
class WeeFailureStat(AbstractStat):
def __init__(self):
self.name = 'weefail'
self.collection = 'gather'
def clear(self,c,sorn):
c.weefail = zeros(sorn.c.N_steps)
self.step = 0
def add(self,c,sorn):
if sorn.c.W_ee.use_sparse:
N_weights = sorn.W_ee.W.data.shape[0]
N_fail = N_weights-sum(sorn.W_ee.mask)
else:
N_weights = sum(sorn.W_ee.get_synapses()>0)
N_fail = N_weights-sum(sorn.W_ee.masked>0)
c.weefail[self.step] = N_fail/N_weights
self.step += 1
def report(self,c,sorn):
return c.weefail
class WeeFailureFuncStat(AbstractStat):
def __init__(self):
self.name = 'weefailfunc'
self.collection = 'gather'
def clear(self,c,sorn):
self.x = np.linspace(0,1,1000)
self.y = sorn.W_ee.fail_f(self.x)
def add(self,c,sorn):
pass
def report(self,c,sorn):
return np.array([self.x,self.y])
# From Philip
class XClassifierStat(AbstractStat):
def __init__(self,steps=None, classify_x=True, \
classify_r=False,detailed=False,**args):
'''Steps is a list with the step sizes over which to predict.
e.g.
- a step of +1 means predict the next state
- a step of 0 means identify the current state
- a step of -1 means identify the previous state
'''
if steps is None:
steps = [0]
self.steps = steps
self.classify_x = classify_x
self.classify_r = classify_r
self.detailed = detailed
@property
def name(self):
ans = []
if self.classify_x:
ans.append('xclassifier')
if self.classify_r:
ans.append('rclassifier')
return ans
def build_classifier(self,inp,out,offset):
# Use the input to build a classifier of the output with an
# offset
N = inp.shape[0]
inp_aug = hstack([inp, ones((N,1))])
(ib,ie) = (max(-offset,0),min(N-offset,N))
(ob,oe) = (max(+offset,0),min(N+offset,N))
try:
ans = linalg.lstsq(inp_aug[ib:ie,:],out[ob:oe,:])[0]
except LinAlgError:
ans = zeros( (inp.shape[1]+1,out.shape[1]) )
return ans
def use_classifier(self,inp,classifier,offset,correct):
N = inp.shape[0]
L = classifier.shape[1]
inp_aug = hstack([inp, ones((N,1))])
(ib,ie) = (max(-offset,0),min(N-offset,N))
(ob,oe) = (max(+offset,0),min(N+offset,N))
ind = argmax(inp_aug[ib:ie,:].dot(classifier),1)
actual = argmax(correct,1)[ob:oe]
num = zeros(L)
den = zeros(L)
for l in range(L):
l_ind = actual==l
num[l] = sum(actual[l_ind]==ind[l_ind])
den[l] = sum(l_ind)
return (num,den)
def report(self,_,sorn):
c = sorn.c
#Disable plasticity when measuring network
sorn.update = False
#Don't track statistics when measuring either
self.parent.disable = True
#Build classifiers
Nr = c.test_num_train
Nt = c.test_num_test
#~ (Xr,Rr,Ur) = sorn.simulation(Nr)
dic = sorn.simulation(Nr,['X','R_x','U'])
Xr = dic['X']
Rr = dic['R_x']
Ur = dic['U']
#~ (Xt,Rt,Ut) = sorn.simulation(Nt)
dic = sorn.simulation(Nt,['X','R_x','U'])
Xt = dic['X']
Rt = dic['R_x']
Ut = dic['U']
L = Ur.shape[1]
Rr = (Rr >= 0.0)+0
Rt = (Rt >= 0.0)+0
r = []
x = []
detail_r=[]
detail_x=[]
for step in self.steps:
if self.classify_x:
classifier = self.build_classifier(Xr,Ur,step)
(num,den) = self.use_classifier(Xt,classifier,step,Ut)
ans = sum(num)/sum(den)
x.append(ans)
if self.detailed:
detail_x.append(num/(den+1e-20))
if self.classify_r:
classifier = self.build_classifier(Rr,Ur,step)
(num,den) = self.use_classifier(Rt,classifier,step,Ut)
ans = sum(num)/sum(den)
r.append(ans)
if self.detailed:
detail_r.append(num/(den+1e-20))
ans = []
if self.classify_x:
ans.append( ('xclassifier', 'reduce', array(x)) )
if self.detailed:
ans.append( ('x_detail_classifier%d'%L,'reduce',\
array(detail_x)) )
if self.classify_r:
ans.append( ('rclassifier', 'reduce', array(r)) )
if self.detailed:
ans.append( ('r_detail_classifier%d'%L,'reduce',\
array(detail_r)) )
sorn.update = True
self.parent.disable = False
return ans
# From Philip
class XTotalsStat(AbstractStat):
def __init__(self):
self.name = 'x_tot'
self.collection = 'gather'
def clear(self,c,obj):
N = obj.c.N_e
c.x_tot = zeros(N)
def add(self,c,obj):
c.x_tot += obj.x
def report(self,c,obj):
return c.x_tot
# From Philip
class YTotalsStat(AbstractStat):
def __init__(self):
self.name = 'y_tot'
self.collection = 'gather'
def clear(self,c,obj):
N = obj.c.N_i
c.y_tot = zeros(N)
def add(self,c,obj):
c.y_tot += obj.y
def report(self,c,obj):
return c.y_tot
# From Philip
class SynapticDistributionStat(AbstractStat):
def __init__(self,collection='gatherv'):
self.name = 'synaptic_strength'
self.collection = collection
def report(self,_,sorn):
W = sorn.W_ee.T
Mask = sorn.M_ee.T
# This code might be a little fragile but fast
# (note transposes rely on memory laid out in particular order)
#~ N = sorn.c.N_e
#~ M = sorn.c.lamb
#This relies on a fixed # of non-zero synapses per neuron
#~ ans = (W[Mask]).reshape(N,M).T.copy()
ans = W[Mask]
return ans
# From Philip
class SuccessiveStat(AbstractStat):
def __init__(self):
self.name = 'successive'
self.collection = 'reduce'
def clear(self,c,sorn):
N = sorn.c.N_e
c.successive = zeros( (N+1,N+1) )
c.successive_prev = sum(sorn.x)
def add(self, c, sorn):
curr = sum(sorn.x)
c.successive[c.successive_prev,curr] += 1.0
c.successive_prev = curr
def report(self,c,sorn):
return c.successive
# From Philip
class RClassifierStat(AbstractStat):
def __init__(self,select=None):
if select is None:
select = [True,True,True]
self.name = 'classifier'
self.collection = 'reduce'
self.select = select
def report(self,_,sorn):
c = sorn.c
sorn.update = False
self.parent.disable = True
#Build classifiers
N = c.test_num_train
#~ (X,R,U) = sorn.simulation(N)
dic = sorn.simulation(N,['X','R_x','U'])
X = dic['X']
R = dic['R_x']
U = dic['U']
R = hstack([R>=0,ones((N,1))])
if self.select[0]:
classifier0 = linalg.lstsq(R,U)[0]
if self.select[1]:
classifier1 = dot(linalg.pinv(R),U)
if self.select[2]:
X_aug = hstack([X, ones((N,1))])
classifier2 = linalg.lstsq(X_aug[:-1,:],U[1:,:])[0]
#Now test classifiers
N = c.test_num_test
#~ (X,R,U) = sorn.simulation(N)
dic = sorn.simulation(N,['X','R_x','U'])
X = dic['X']
R = dic['R_x']
U = dic['U']
R = hstack([R>=0,ones((N,1))])
if self.select[0]:
ind0 = argmax(dot(R,classifier0),1)
if self.select[1]:
ind1 = argmax(dot(R,classifier1),1)
if self.select[2]:
X_aug = hstack([X, ones((N,1))])
ind2 = argmax(dot(X_aug[:-1,:],classifier2),1)
actual = argmax(U,1)
ans = []
if self.select[0]:
ans.append(mean(actual==ind0))
if self.select[1]:
ans.append(mean(actual==ind1))
if self.select[2]:
ans.append(mean(actual[1:]==ind2))
sorn.update = True
self.parent.disable = False
return array(ans)
class WeightHistoryStat(HistoryStat):
def add(self,c,obj):
if not (c.history[self.counter] % self.record_every_nth):
c.history[self.name].append(np.copy(
_getvar(obj,self.var).get_synapses()))
c.history[self.counter] += 1
| mit |
motion2015/edx-platform | cms/djangoapps/contentstore/management/commands/git_export.py | 164 | 2816 | """
This command exports a course from CMS to a git repository.
It takes as arguments the course id to export (i.e MITx/999/2020 ) and
the repository to commit too. It takes username as an option for identifying
the commit, as well as a directory path to place the git repository.
By default it will use settings.GIT_REPO_EXPORT_DIR/repo_name as the cloned
directory. It is branch aware, but will reset all local changes to the
repository before attempting to export the XML, add, and commit changes if
any have taken place.
This functionality is also available as an export view in studio if the giturl
attribute is set and the FEATURE['ENABLE_EXPORT_GIT'] is set.
"""
import logging
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.utils.translation import ugettext as _
import contentstore.git_export_utils as git_export_utils
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys import InvalidKeyError
from contentstore.git_export_utils import GitExportError
from opaque_keys.edx.keys import CourseKey
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Take a course from studio and export it to a git repository.
"""
option_list = BaseCommand.option_list + (
make_option('--username', '-u', dest='user',
help=('Specify a username from LMS/Studio to be used '
'as the commit author.')),
make_option('--repo_dir', '-r', dest='repo',
help='Specify existing git repo directory.'),
)
help = _('Take the specified course and attempt to '
'export it to a git repository\n. Course directory '
'must already be a git repository. Usage: '
' git_export <course_loc> <git_url>')
def handle(self, *args, **options):
"""
Checks arguments and runs export function if they are good
"""
if len(args) != 2:
raise CommandError('This script requires exactly two arguments: '
'course_loc and git_url')
# Rethrow GitExportError as CommandError for SystemExit
try:
course_key = CourseKey.from_string(args[0])
except InvalidKeyError:
try:
course_key = SlashSeparatedCourseKey.from_deprecated_string(args[0])
except InvalidKeyError:
raise CommandError(unicode(GitExportError.BAD_COURSE))
try:
git_export_utils.export_to_git(
course_key,
args[1],
options.get('user', ''),
options.get('rdir', None)
)
except git_export_utils.GitExportError as ex:
raise CommandError(unicode(ex.message))
| agpl-3.0 |
crmccreary/openerp_server | openerp/pychart/afm/Helvetica_Bold.py | 15 | 1512 | # -*- coding: utf-8 -*-
# AFM font Helvetica-Bold (path: /usr/share/fonts/afms/adobe/phvb8a.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
import dir
dir.afm["Helvetica-Bold"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 278, 333, 474, 556, 556, 889, 722, 278, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556, 556, 556, 556, 556, 556, 556, 556, 333, 333, 584, 584, 584, 611, 975, 722, 722, 722, 722, 667, 611, 778, 722, 278, 556, 722, 611, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667, 667, 611, 333, 278, 333, 584, 556, 278, 556, 611, 556, 611, 556, 333, 611, 611, 278, 278, 556, 278, 889, 611, 611, 611, 611, 389, 556, 333, 611, 556, 778, 556, 556, 500, 389, 280, 389, 584, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 333, 556, 556, 167, 556, 556, 556, 556, 238, 500, 556, 333, 333, 611, 611, 500, 556, 556, 556, 278, 500, 556, 350, 278, 500, 500, 556, 1000, 1000, 500, 611, 500, 333, 333, 333, 333, 333, 333, 333, 333, 500, 333, 333, 500, 333, 333, 333, 1000, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 1000, 500, 370, 500, 500, 500, 500, 611, 778, 1000, 365, 500, 500, 500, 500, 500, 889, 500, 500, 500, 278, 500, 500, 278, 611, 944, 611, )
| agpl-3.0 |
jallohm/django | tests/db_typecasts/tests.py | 206 | 2412 | # Unit tests for typecast functions in django.db.backends.util
import datetime
import unittest
from django.db.backends import utils as typecasts
from django.utils import six
TEST_CASES = {
'typecast_date': (
('', None),
(None, None),
('2005-08-11', datetime.date(2005, 8, 11)),
('1990-01-01', datetime.date(1990, 1, 1)),
),
'typecast_time': (
('', None),
(None, None),
('0:00:00', datetime.time(0, 0)),
('0:30:00', datetime.time(0, 30)),
('8:50:00', datetime.time(8, 50)),
('08:50:00', datetime.time(8, 50)),
('12:00:00', datetime.time(12, 00)),
('12:30:00', datetime.time(12, 30)),
('13:00:00', datetime.time(13, 00)),
('23:59:00', datetime.time(23, 59)),
('00:00:12', datetime.time(0, 0, 12)),
('00:00:12.5', datetime.time(0, 0, 12, 500000)),
('7:22:13.312', datetime.time(7, 22, 13, 312000)),
),
'typecast_timestamp': (
('', None),
(None, None),
('2005-08-11 0:00:00', datetime.datetime(2005, 8, 11)),
('2005-08-11 0:30:00', datetime.datetime(2005, 8, 11, 0, 30)),
('2005-08-11 8:50:30', datetime.datetime(2005, 8, 11, 8, 50, 30)),
('2005-08-11 8:50:30.123', datetime.datetime(2005, 8, 11, 8, 50, 30, 123000)),
('2005-08-11 8:50:30.9', datetime.datetime(2005, 8, 11, 8, 50, 30, 900000)),
('2005-08-11 8:50:30.312-05', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
('2005-08-11 8:50:30.312+02', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
# ticket 14453
('2010-10-12 15:29:22.063202', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.063202-03', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.063202+04', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.0632021', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.0632029', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
),
}
class DBTypeCasts(unittest.TestCase):
def test_typeCasts(self):
for k, v in six.iteritems(TEST_CASES):
for inpt, expected in v:
got = getattr(typecasts, k)(inpt)
self.assertEqual(got, expected, "In %s: %r doesn't match %r. Got %r instead." % (k, inpt, expected, got))
| bsd-3-clause |
frishberg/django | django/contrib/gis/geos/linestring.py | 136 | 6019 | from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.geometry import GEOSGeometry, LinearGeometryMixin
from django.contrib.gis.geos.point import Point
from django.contrib.gis.shortcuts import numpy
from django.utils.six.moves import range
class LineString(LinearGeometryMixin, GEOSGeometry):
_init_func = capi.create_linestring
_minlength = 2
has_cs = True
def __init__(self, *args, **kwargs):
"""
Initializes on the given sequence -- may take lists, tuples, NumPy arrays
of X,Y pairs, or Point objects. If Point objects are used, ownership is
_not_ transferred to the LineString object.
Examples:
ls = LineString((1, 1), (2, 2))
ls = LineString([(1, 1), (2, 2)])
ls = LineString(array([(1, 1), (2, 2)]))
ls = LineString(Point(1, 1), Point(2, 2))
"""
# If only one argument provided, set the coords array appropriately
if len(args) == 1:
coords = args[0]
else:
coords = args
if not (isinstance(coords, (tuple, list)) or numpy and isinstance(coords, numpy.ndarray)):
raise TypeError('Invalid initialization input for LineStrings.')
# If SRID was passed in with the keyword arguments
srid = kwargs.get('srid')
ncoords = len(coords)
if not ncoords:
super(LineString, self).__init__(self._init_func(None), srid=srid)
return
if ncoords < self._minlength:
raise ValueError(
'%s requires at least %d points, got %s.' % (
self.__class__.__name__,
self._minlength,
ncoords,
)
)
if isinstance(coords, (tuple, list)):
# Getting the number of coords and the number of dimensions -- which
# must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
ndim = None
# Incrementing through each of the coordinates and verifying
for coord in coords:
if not isinstance(coord, (tuple, list, Point)):
raise TypeError('Each coordinate should be a sequence (list or tuple)')
if ndim is None:
ndim = len(coord)
self._checkdim(ndim)
elif len(coord) != ndim:
raise TypeError('Dimension mismatch.')
numpy_coords = False
else:
shape = coords.shape # Using numpy's shape.
if len(shape) != 2:
raise TypeError('Too many dimensions.')
self._checkdim(shape[1])
ndim = shape[1]
numpy_coords = True
# Creating a coordinate sequence object because it is easier to
# set the points using GEOSCoordSeq.__setitem__().
cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim == 3))
for i in range(ncoords):
if numpy_coords:
cs[i] = coords[i, :]
elif isinstance(coords[i], Point):
cs[i] = coords[i].tuple
else:
cs[i] = coords[i]
# Calling the base geometry initialization with the returned pointer
# from the function.
super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)
def __iter__(self):
"Allows iteration over this LineString."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of points in this LineString."
return len(self._cs)
def _get_single_external(self, index):
return self._cs[index]
_get_single_internal = _get_single_external
def _set_list(self, length, items):
ndim = self._cs.dims
hasz = self._cs.hasz # I don't understand why these are different
# create a new coordinate sequence and populate accordingly
cs = GEOSCoordSeq(capi.create_cs(length, ndim), z=hasz)
for i, c in enumerate(items):
cs[i] = c
ptr = self._init_func(cs.ptr)
if ptr:
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(self.srid)
else:
# can this happen?
raise GEOSException('Geometry resulting from slice deletion was invalid.')
def _set_single(self, index, value):
self._checkindex(index)
self._cs[index] = value
def _checkdim(self, dim):
if dim not in (2, 3):
raise TypeError('Dimension mismatch.')
# #### Sequence Properties ####
@property
def tuple(self):
"Returns a tuple version of the geometry from the coordinate sequence."
return self._cs.tuple
coords = tuple
def _listarr(self, func):
"""
Internal routine that returns a sequence (list) corresponding with
the given function. Will return a numpy array if possible.
"""
lst = [func(i) for i in range(len(self))]
if numpy:
return numpy.array(lst) # ARRRR!
else:
return lst
@property
def array(self):
"Returns a numpy array for the LineString."
return self._listarr(self._cs.__getitem__)
@property
def x(self):
"Returns a list or numpy array of the X variable."
return self._listarr(self._cs.getX)
@property
def y(self):
"Returns a list or numpy array of the Y variable."
return self._listarr(self._cs.getY)
@property
def z(self):
"Returns a list or numpy array of the Z variable."
if not self.hasz:
return None
else:
return self._listarr(self._cs.getZ)
# LinearRings are LineStrings used within Polygons.
class LinearRing(LineString):
_minlength = 4
_init_func = capi.create_linearring
| bsd-3-clause |
kawalpemilu/kawalpemilu2014 | internal-backend/http-server/gyp/test/generator-output/gyptest-symlink.py | 216 | 1292 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a target when the --generator-output= option is used to put
the build configuration files in a separate directory tree referenced by a
symlink.
"""
import TestGyp
import os
# Android doesn't support --generator-output.
test = TestGyp.TestGyp(formats=['!android'])
if not hasattr(os, 'symlink'):
test.skip_test('Missing os.symlink -- skipping test.\n')
test.writable(test.workpath('src'), False)
test.writable(test.workpath('src/subdir2/deeper/build'), True)
test.subdir(test.workpath('build'))
test.subdir(test.workpath('build/deeper'))
test.symlink('build/deeper', test.workpath('symlink'))
test.writable(test.workpath('build/deeper'), True)
test.run_gyp('deeper.gyp',
'-Dset_symroot=2',
'--generator-output=' + test.workpath('symlink'),
chdir='src/subdir2/deeper')
chdir = 'symlink'
test.build('deeper.gyp', test.ALL, chdir=chdir)
if test.format == 'xcode':
chdir = 'src/subdir2/deeper'
test.run_built_executable('deeper',
chdir=chdir,
stdout="Hello from deeper.c\n")
test.pass_test()
| agpl-3.0 |
ArneBab/pypyjs | website/demo/home/rfk/repos/pypy/lib-python/2.7/idlelib/TreeWidget.py | 89 | 15231 | # XXX TO DO:
# - popup menu
# - support partial or total redisplay
# - key bindings (instead of quick-n-dirty bindings on Canvas):
# - up/down arrow keys to move focus around
# - ditto for page up/down, home/end
# - left/right arrows to expand/collapse & move out/in
# - more doc strings
# - add icons for "file", "module", "class", "method"; better "python" icon
# - callback for selection???
# - multiple-item selection
# - tooltips
# - redo geometry without magic numbers
# - keep track of object ids to allow more careful cleaning
# - optimize tree redraw after expand of subnode
import os
from Tkinter import *
import imp
from idlelib import ZoomHeight
from idlelib.configHandler import idleConf
ICONDIR = "Icons"
# Look for Icons subdirectory in the same directory as this module
try:
_icondir = os.path.join(os.path.dirname(__file__), ICONDIR)
except NameError:
_icondir = ICONDIR
if os.path.isdir(_icondir):
ICONDIR = _icondir
elif not os.path.isdir(ICONDIR):
raise RuntimeError, "can't find icon directory (%r)" % (ICONDIR,)
def listicons(icondir=ICONDIR):
"""Utility to display the available icons."""
root = Tk()
import glob
list = glob.glob(os.path.join(icondir, "*.gif"))
list.sort()
images = []
row = column = 0
for file in list:
name = os.path.splitext(os.path.basename(file))[0]
image = PhotoImage(file=file, master=root)
images.append(image)
label = Label(root, image=image, bd=1, relief="raised")
label.grid(row=row, column=column)
label = Label(root, text=name)
label.grid(row=row+1, column=column)
column = column + 1
if column >= 10:
row = row+2
column = 0
root.images = images
class TreeNode:
def __init__(self, canvas, parent, item):
self.canvas = canvas
self.parent = parent
self.item = item
self.state = 'collapsed'
self.selected = False
self.children = []
self.x = self.y = None
self.iconimages = {} # cache of PhotoImage instances for icons
def destroy(self):
for c in self.children[:]:
self.children.remove(c)
c.destroy()
self.parent = None
def geticonimage(self, name):
try:
return self.iconimages[name]
except KeyError:
pass
file, ext = os.path.splitext(name)
ext = ext or ".gif"
fullname = os.path.join(ICONDIR, file + ext)
image = PhotoImage(master=self.canvas, file=fullname)
self.iconimages[name] = image
return image
def select(self, event=None):
if self.selected:
return
self.deselectall()
self.selected = True
self.canvas.delete(self.image_id)
self.drawicon()
self.drawtext()
def deselect(self, event=None):
if not self.selected:
return
self.selected = False
self.canvas.delete(self.image_id)
self.drawicon()
self.drawtext()
def deselectall(self):
if self.parent:
self.parent.deselectall()
else:
self.deselecttree()
def deselecttree(self):
if self.selected:
self.deselect()
for child in self.children:
child.deselecttree()
def flip(self, event=None):
if self.state == 'expanded':
self.collapse()
else:
self.expand()
self.item.OnDoubleClick()
return "break"
def expand(self, event=None):
if not self.item._IsExpandable():
return
if self.state != 'expanded':
self.state = 'expanded'
self.update()
self.view()
def collapse(self, event=None):
if self.state != 'collapsed':
self.state = 'collapsed'
self.update()
def view(self):
top = self.y - 2
bottom = self.lastvisiblechild().y + 17
height = bottom - top
visible_top = self.canvas.canvasy(0)
visible_height = self.canvas.winfo_height()
visible_bottom = self.canvas.canvasy(visible_height)
if visible_top <= top and bottom <= visible_bottom:
return
x0, y0, x1, y1 = self.canvas._getints(self.canvas['scrollregion'])
if top >= visible_top and height <= visible_height:
fraction = top + height - visible_height
else:
fraction = top
fraction = float(fraction) / y1
self.canvas.yview_moveto(fraction)
def lastvisiblechild(self):
if self.children and self.state == 'expanded':
return self.children[-1].lastvisiblechild()
else:
return self
def update(self):
if self.parent:
self.parent.update()
else:
oldcursor = self.canvas['cursor']
self.canvas['cursor'] = "watch"
self.canvas.update()
self.canvas.delete(ALL) # XXX could be more subtle
self.draw(7, 2)
x0, y0, x1, y1 = self.canvas.bbox(ALL)
self.canvas.configure(scrollregion=(0, 0, x1, y1))
self.canvas['cursor'] = oldcursor
def draw(self, x, y):
# XXX This hard-codes too many geometry constants!
self.x, self.y = x, y
self.drawicon()
self.drawtext()
if self.state != 'expanded':
return y+17
# draw children
if not self.children:
sublist = self.item._GetSubList()
if not sublist:
# _IsExpandable() was mistaken; that's allowed
return y+17
for item in sublist:
child = self.__class__(self.canvas, self, item)
self.children.append(child)
cx = x+20
cy = y+17
cylast = 0
for child in self.children:
cylast = cy
self.canvas.create_line(x+9, cy+7, cx, cy+7, fill="gray50")
cy = child.draw(cx, cy)
if child.item._IsExpandable():
if child.state == 'expanded':
iconname = "minusnode"
callback = child.collapse
else:
iconname = "plusnode"
callback = child.expand
image = self.geticonimage(iconname)
id = self.canvas.create_image(x+9, cylast+7, image=image)
# XXX This leaks bindings until canvas is deleted:
self.canvas.tag_bind(id, "<1>", callback)
self.canvas.tag_bind(id, "<Double-1>", lambda x: None)
id = self.canvas.create_line(x+9, y+10, x+9, cylast+7,
##stipple="gray50", # XXX Seems broken in Tk 8.0.x
fill="gray50")
self.canvas.tag_lower(id) # XXX .lower(id) before Python 1.5.2
return cy
def drawicon(self):
if self.selected:
imagename = (self.item.GetSelectedIconName() or
self.item.GetIconName() or
"openfolder")
else:
imagename = self.item.GetIconName() or "folder"
image = self.geticonimage(imagename)
id = self.canvas.create_image(self.x, self.y, anchor="nw", image=image)
self.image_id = id
self.canvas.tag_bind(id, "<1>", self.select)
self.canvas.tag_bind(id, "<Double-1>", self.flip)
def drawtext(self):
textx = self.x+20-1
texty = self.y-1
labeltext = self.item.GetLabelText()
if labeltext:
id = self.canvas.create_text(textx, texty, anchor="nw",
text=labeltext)
self.canvas.tag_bind(id, "<1>", self.select)
self.canvas.tag_bind(id, "<Double-1>", self.flip)
x0, y0, x1, y1 = self.canvas.bbox(id)
textx = max(x1, 200) + 10
text = self.item.GetText() or "<no text>"
try:
self.entry
except AttributeError:
pass
else:
self.edit_finish()
try:
label = self.label
except AttributeError:
# padding carefully selected (on Windows) to match Entry widget:
self.label = Label(self.canvas, text=text, bd=0, padx=2, pady=2)
theme = idleConf.GetOption('main','Theme','name')
if self.selected:
self.label.configure(idleConf.GetHighlight(theme, 'hilite'))
else:
self.label.configure(idleConf.GetHighlight(theme, 'normal'))
id = self.canvas.create_window(textx, texty,
anchor="nw", window=self.label)
self.label.bind("<1>", self.select_or_edit)
self.label.bind("<Double-1>", self.flip)
self.text_id = id
def select_or_edit(self, event=None):
if self.selected and self.item.IsEditable():
self.edit(event)
else:
self.select(event)
def edit(self, event=None):
self.entry = Entry(self.label, bd=0, highlightthickness=1, width=0)
self.entry.insert(0, self.label['text'])
self.entry.selection_range(0, END)
self.entry.pack(ipadx=5)
self.entry.focus_set()
self.entry.bind("<Return>", self.edit_finish)
self.entry.bind("<Escape>", self.edit_cancel)
def edit_finish(self, event=None):
try:
entry = self.entry
del self.entry
except AttributeError:
return
text = entry.get()
entry.destroy()
if text and text != self.item.GetText():
self.item.SetText(text)
text = self.item.GetText()
self.label['text'] = text
self.drawtext()
self.canvas.focus_set()
def edit_cancel(self, event=None):
try:
entry = self.entry
del self.entry
except AttributeError:
return
entry.destroy()
self.drawtext()
self.canvas.focus_set()
class TreeItem:
"""Abstract class representing tree items.
Methods should typically be overridden, otherwise a default action
is used.
"""
def __init__(self):
"""Constructor. Do whatever you need to do."""
def GetText(self):
"""Return text string to display."""
def GetLabelText(self):
"""Return label text string to display in front of text (if any)."""
expandable = None
def _IsExpandable(self):
"""Do not override! Called by TreeNode."""
if self.expandable is None:
self.expandable = self.IsExpandable()
return self.expandable
def IsExpandable(self):
"""Return whether there are subitems."""
return 1
def _GetSubList(self):
"""Do not override! Called by TreeNode."""
if not self.IsExpandable():
return []
sublist = self.GetSubList()
if not sublist:
self.expandable = 0
return sublist
def IsEditable(self):
"""Return whether the item's text may be edited."""
def SetText(self, text):
"""Change the item's text (if it is editable)."""
def GetIconName(self):
"""Return name of icon to be displayed normally."""
def GetSelectedIconName(self):
"""Return name of icon to be displayed when selected."""
def GetSubList(self):
"""Return list of items forming sublist."""
def OnDoubleClick(self):
"""Called on a double-click on the item."""
# Example application
class FileTreeItem(TreeItem):
"""Example TreeItem subclass -- browse the file system."""
def __init__(self, path):
self.path = path
def GetText(self):
return os.path.basename(self.path) or self.path
def IsEditable(self):
return os.path.basename(self.path) != ""
def SetText(self, text):
newpath = os.path.dirname(self.path)
newpath = os.path.join(newpath, text)
if os.path.dirname(newpath) != os.path.dirname(self.path):
return
try:
os.rename(self.path, newpath)
self.path = newpath
except os.error:
pass
def GetIconName(self):
if not self.IsExpandable():
return "python" # XXX wish there was a "file" icon
def IsExpandable(self):
return os.path.isdir(self.path)
def GetSubList(self):
try:
names = os.listdir(self.path)
except os.error:
return []
names.sort(key = os.path.normcase)
sublist = []
for name in names:
item = FileTreeItem(os.path.join(self.path, name))
sublist.append(item)
return sublist
# A canvas widget with scroll bars and some useful bindings
class ScrolledCanvas:
def __init__(self, master, **opts):
if 'yscrollincrement' not in opts:
opts['yscrollincrement'] = 17
self.master = master
self.frame = Frame(master)
self.frame.rowconfigure(0, weight=1)
self.frame.columnconfigure(0, weight=1)
self.canvas = Canvas(self.frame, **opts)
self.canvas.grid(row=0, column=0, sticky="nsew")
self.vbar = Scrollbar(self.frame, name="vbar")
self.vbar.grid(row=0, column=1, sticky="nse")
self.hbar = Scrollbar(self.frame, name="hbar", orient="horizontal")
self.hbar.grid(row=1, column=0, sticky="ews")
self.canvas['yscrollcommand'] = self.vbar.set
self.vbar['command'] = self.canvas.yview
self.canvas['xscrollcommand'] = self.hbar.set
self.hbar['command'] = self.canvas.xview
self.canvas.bind("<Key-Prior>", self.page_up)
self.canvas.bind("<Key-Next>", self.page_down)
self.canvas.bind("<Key-Up>", self.unit_up)
self.canvas.bind("<Key-Down>", self.unit_down)
#if isinstance(master, Toplevel) or isinstance(master, Tk):
self.canvas.bind("<Alt-Key-2>", self.zoom_height)
self.canvas.focus_set()
def page_up(self, event):
self.canvas.yview_scroll(-1, "page")
return "break"
def page_down(self, event):
self.canvas.yview_scroll(1, "page")
return "break"
def unit_up(self, event):
self.canvas.yview_scroll(-1, "unit")
return "break"
def unit_down(self, event):
self.canvas.yview_scroll(1, "unit")
return "break"
def zoom_height(self, event):
ZoomHeight.zoom_height(self.master)
return "break"
# Testing functions
def test():
from idlelib import PyShell
root = Toplevel(PyShell.root)
root.configure(bd=0, bg="yellow")
root.focus_set()
sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1)
sc.frame.pack(expand=1, fill="both")
item = FileTreeItem("C:/windows/desktop")
node = TreeNode(sc.canvas, None, item)
node.expand()
def test2():
# test w/o scrolling canvas
root = Tk()
root.configure(bd=0)
canvas = Canvas(root, bg="white", highlightthickness=0)
canvas.pack(expand=1, fill="both")
item = FileTreeItem(os.curdir)
node = TreeNode(canvas, None, item)
node.update()
canvas.focus_set()
if __name__ == '__main__':
test()
| mit |
javierTerry/odoo | addons/base_import_module/controllers/main.py | 354 | 1518 | # -*- coding: utf-8 -*-
import functools
import openerp
from openerp.http import Controller, route, request, Response
def webservice(f):
@functools.wraps(f)
def wrap(*args, **kw):
try:
return f(*args, **kw)
except Exception, e:
return Response(response=str(e), status=500)
return wrap
class ImportModule(Controller):
def check_user(self, uid=None):
if uid is None:
uid = request.uid
is_admin = request.registry['res.users'].has_group(request.cr, uid, 'base.group_erp_manager')
if not is_admin:
raise openerp.exceptions.AccessError("Only administrators can upload a module")
@route('/base_import_module/login', type='http', auth='none', methods=['POST'])
@webservice
def login(self, login, password, db=None):
if db and db != request.db:
raise Exception("Could not select database '%s'" % db)
uid = request.session.authenticate(request.db, login, password)
if not uid:
return Response(response="Wrong login/password", status=401)
self.check_user(uid)
return "ok"
@route('/base_import_module/upload', type='http', auth='user', methods=['POST'])
@webservice
def upload(self, mod_file=None, force='', **kw):
self.check_user()
force = True if force == '1' else False
return request.registry['ir.module.module'].import_zipfile(request.cr, request.uid, mod_file, force=force, context=request.context)[0]
| agpl-3.0 |
paprka/-tg-station | tools/expand_filedir_paths.py | 166 | 3839 | #!/usr/bin/env python
import re, os, sys, fnmatch
# Regex pattern to extract the directory path in a #define FILE_DIR
filedir_pattern = re.compile(r'^#define\s*FILE_DIR\s*"(.*?)"')
# Regex pattern to extract any single quoted piece of text. This can also
# match single quoted strings inside of double quotes, which is part of a
# regular text string and should not be replaced. The replacement function
# however will any match that doesn't appear to be a filename so these
# extra matches should not be a problem.
rename_pattern = re.compile(r"'(.+?)'")
# Only filenames matching this pattern will have their resources renamed
source_pattern = re.compile(r"^.*?\.(dm|dmm)$")
# Open the .dme file and return a list of all FILE_DIR paths in it
def read_filedirs(filename):
result = []
dme_file = file(filename, "rt")
# Read each line from the file and check for regex pattern match
for row in dme_file:
match = filedir_pattern.match(row)
if match:
result.append(match.group(1))
dme_file.close()
return result
# Search through a list of directories, and build a dictionary which
# maps every file to its full pathname (relative to the .dme file)
# If the same filename appears in more than one directory, the earlier
# directory in the list takes preference.
def index_files(file_dirs):
result = {}
# Reverse the directory list so the earlier directories take precedence
# by replacing the previously indexed file of the same name
for directory in reversed(file_dirs):
for name in os.listdir(directory):
# Replace backslash path separators on Windows with forward slash
# Force "name" to lowercase when used as a key since BYOND resource
# names are case insensitive, even on Linux.
if name.find(".") == -1:
continue
result[name.lower()] = directory.replace('\\', '/') + '/' + name
return result
# Recursively search for every .dm/.dmm file in the .dme file directory. For
# each file, search it for any resource names in single quotes, and replace
# them with the full path previously found by index_files()
def rewrite_sources(resources):
# Create a closure for the regex replacement function to capture the
# resources dictionary which can't be passed directly to this function
def replace_func(name):
key = name.group(1).lower()
if key in resources:
replacement = resources[key]
else:
replacement = name.group(1)
return "'" + replacement + "'"
# Search recursively for all .dm and .dmm files
for (dirpath, dirs, files) in os.walk("."):
for name in files:
if source_pattern.match(name):
path = dirpath + '/' + name
source_file = file(path, "rt")
output_file = file(path + ".tmp", "wt")
# Read file one line at a time and perform replacement of all
# single quoted resource names with the fullpath to that resource
# file. Write the updated text back out to a temporary file.
for row in source_file:
row = rename_pattern.sub(replace_func, row)
output_file.write(row)
output_file.close()
source_file.close()
# Delete original source file and replace with the temporary
# output. On Windows, an atomic rename() operation is not
# possible like it is under POSIX.
os.remove(path)
os.rename(path + ".tmp", path)
dirs = read_filedirs("tgstation.dme");
resources = index_files(dirs)
rewrite_sources(resources)
| agpl-3.0 |
apporc/neutron | neutron/plugins/ml2/plugin.py | 1 | 76037 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from eventlet import greenthread
from oslo_config import cfg
from oslo_db import api as oslo_db_api
from oslo_db import exception as os_db_exception
from oslo_log import helpers as log_helpers
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import uuidutils
from sqlalchemy import exc as sql_exc
from sqlalchemy.orm import exc as sa_exc
from neutron._i18n import _, _LE, _LI, _LW
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.rpc.handlers import dhcp_rpc
from neutron.api.rpc.handlers import dvr_rpc
from neutron.api.rpc.handlers import metadata_rpc
from neutron.api.rpc.handlers import resources_rpc
from neutron.api.rpc.handlers import securitygroups_rpc
from neutron.api.v2 import attributes
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as const
from neutron.common import exceptions as exc
from neutron.common import ipv6_utils
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron.db import address_scope_db
from neutron.db import agents_db
from neutron.db import agentschedulers_db
from neutron.db import allowedaddresspairs_db as addr_pair_db
from neutron.db import api as db_api
from neutron.db import db_base_plugin_v2
from neutron.db import dvr_mac_db
from neutron.db import external_net_db
from neutron.db import extradhcpopt_db
from neutron.db import models_v2
from neutron.db import netmtu_db
from neutron.db.quota import driver # noqa
from neutron.db import securitygroups_db
from neutron.db import securitygroups_rpc_base as sg_db_rpc
from neutron.db import vlantransparent_db
from neutron.extensions import allowedaddresspairs as addr_pair
from neutron.extensions import availability_zone as az_ext
from neutron.extensions import extra_dhcp_opt as edo_ext
from neutron.extensions import portbindings
from neutron.extensions import portsecurity as psec
from neutron.extensions import providernet as provider
from neutron.extensions import vlantransparent
from neutron import manager
from neutron.plugins.common import constants as service_constants
from neutron.plugins.ml2.common import exceptions as ml2_exc
from neutron.plugins.ml2 import config # noqa
from neutron.plugins.ml2 import db
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2 import driver_context
from neutron.plugins.ml2 import managers
from neutron.plugins.ml2 import models
from neutron.plugins.ml2 import rpc
from neutron.quota import resource_registry
from neutron.services.qos import qos_consts
LOG = log.getLogger(__name__)
MAX_BIND_TRIES = 10
class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
dvr_mac_db.DVRDbMixin,
external_net_db.External_net_db_mixin,
sg_db_rpc.SecurityGroupServerRpcMixin,
agentschedulers_db.AZDhcpAgentSchedulerDbMixin,
addr_pair_db.AllowedAddressPairsMixin,
vlantransparent_db.Vlantransparent_db_mixin,
extradhcpopt_db.ExtraDhcpOptMixin,
netmtu_db.Netmtu_db_mixin,
address_scope_db.AddressScopeDbMixin):
"""Implement the Neutron L2 abstractions using modules.
Ml2Plugin is a Neutron plugin based on separately extensible sets
of network types and mechanisms for connecting to networks of
those types. The network types and mechanisms are implemented as
drivers loaded via Python entry points. Networks can be made up of
multiple segments (not yet fully implemented).
"""
# This attribute specifies whether the plugin supports or not
# bulk/pagination/sorting operations. Name mangling is used in
# order to ensure it is qualified by class
__native_bulk_support = True
__native_pagination_support = True
__native_sorting_support = True
# List of supported extensions
_supported_extension_aliases = ["provider", "external-net", "binding",
"quotas", "security-group", "agent",
"dhcp_agent_scheduler",
"multi-provider", "allowed-address-pairs",
"extra_dhcp_opt", "subnet_allocation",
"net-mtu", "vlan-transparent",
"address-scope", "dns-integration",
"availability_zone",
"network_availability_zone"]
@property
def supported_extension_aliases(self):
if not hasattr(self, '_aliases'):
aliases = self._supported_extension_aliases[:]
aliases += self.extension_manager.extension_aliases()
sg_rpc.disable_security_group_extension_by_config(aliases)
vlantransparent.disable_extension_by_config(aliases)
self._aliases = aliases
return self._aliases
@resource_registry.tracked_resources(
network=models_v2.Network,
port=models_v2.Port,
subnet=models_v2.Subnet,
subnetpool=models_v2.SubnetPool,
security_group=securitygroups_db.SecurityGroup,
security_group_rule=securitygroups_db.SecurityGroupRule)
def __init__(self):
# First load drivers, then initialize DB, then initialize drivers
self.type_manager = managers.TypeManager()
self.extension_manager = managers.ExtensionManager()
self.mechanism_manager = managers.MechanismManager()
super(Ml2Plugin, self).__init__()
self.type_manager.initialize()
self.extension_manager.initialize()
self.mechanism_manager.initialize()
self._setup_dhcp()
self._start_rpc_notifiers()
self.add_agent_status_check(self.agent_health_check)
LOG.info(_LI("Modular L2 Plugin initialization complete"))
def _setup_rpc(self):
"""Initialize components to support agent communication."""
self.endpoints = [
rpc.RpcCallbacks(self.notifier, self.type_manager),
securitygroups_rpc.SecurityGroupServerRpcCallback(),
dvr_rpc.DVRServerRpcCallback(),
dhcp_rpc.DhcpRpcCallback(),
agents_db.AgentExtRpcCallback(),
metadata_rpc.MetadataRpcCallback(),
resources_rpc.ResourcesPullRpcCallback()
]
def _setup_dhcp(self):
"""Initialize components to support DHCP."""
self.network_scheduler = importutils.import_object(
cfg.CONF.network_scheduler_driver
)
self.start_periodic_dhcp_agent_status_check()
@property
def supported_qos_rule_types(self):
return self.mechanism_manager.supported_qos_rule_types
@log_helpers.log_method_call
def _start_rpc_notifiers(self):
"""Initialize RPC notifiers for agents."""
self.notifier = rpc.AgentNotifierApi(topics.AGENT)
self.agent_notifiers[const.AGENT_TYPE_DHCP] = (
dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
)
@log_helpers.log_method_call
def start_rpc_listeners(self):
"""Start the RPC loop to let the plugin communicate with agents."""
self._setup_rpc()
self.topic = topics.PLUGIN
self.conn = n_rpc.create_connection()
self.conn.create_consumer(self.topic, self.endpoints, fanout=False)
# process state reports despite dedicated rpc workers
self.conn.create_consumer(topics.REPORTS,
[agents_db.AgentExtRpcCallback()],
fanout=False)
return self.conn.consume_in_threads()
def start_rpc_state_reports_listener(self):
self.conn_reports = n_rpc.create_connection(new=True)
self.conn_reports.create_consumer(topics.REPORTS,
[agents_db.AgentExtRpcCallback()],
fanout=False)
return self.conn_reports.consume_in_threads()
def _filter_nets_provider(self, context, networks, filters):
return [network
for network in networks
if self.type_manager.network_matches_filters(network, filters)
]
def _check_mac_update_allowed(self, orig_port, port, binding):
unplugged_types = (portbindings.VIF_TYPE_BINDING_FAILED,
portbindings.VIF_TYPE_UNBOUND)
new_mac = port.get('mac_address')
mac_change = (new_mac is not None and
orig_port['mac_address'] != new_mac)
if (mac_change and binding.vif_type not in unplugged_types):
raise exc.PortBound(port_id=orig_port['id'],
vif_type=binding.vif_type,
old_mac=orig_port['mac_address'],
new_mac=port['mac_address'])
return mac_change
def _process_port_binding(self, mech_context, attrs):
session = mech_context._plugin_context.session
binding = mech_context._binding
port = mech_context.current
port_id = port['id']
changes = False
host = attributes.ATTR_NOT_SPECIFIED
if attrs and portbindings.HOST_ID in attrs:
host = attrs.get(portbindings.HOST_ID) or ''
original_host = binding.host
if (attributes.is_attr_set(host) and
original_host != host):
binding.host = host
changes = True
vnic_type = attrs and attrs.get(portbindings.VNIC_TYPE)
if (attributes.is_attr_set(vnic_type) and
binding.vnic_type != vnic_type):
binding.vnic_type = vnic_type
changes = True
# treat None as clear of profile.
profile = None
if attrs and portbindings.PROFILE in attrs:
profile = attrs.get(portbindings.PROFILE) or {}
if profile not in (None, attributes.ATTR_NOT_SPECIFIED,
self._get_profile(binding)):
binding.profile = jsonutils.dumps(profile)
if len(binding.profile) > models.BINDING_PROFILE_LEN:
msg = _("binding:profile value too large")
raise exc.InvalidInput(error_message=msg)
changes = True
# Unbind the port if needed.
if changes:
binding.vif_type = portbindings.VIF_TYPE_UNBOUND
binding.vif_details = ''
db.clear_binding_levels(session, port_id, original_host)
mech_context._clear_binding_levels()
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
binding.vif_type = portbindings.VIF_TYPE_UNBOUND
binding.vif_details = ''
db.clear_binding_levels(session, port_id, original_host)
mech_context._clear_binding_levels()
binding.host = ''
self._update_port_dict_binding(port, binding)
return changes
def _bind_port_if_needed(self, context, allow_notify=False,
need_notify=False):
# Binding limit does not need to be tunable because no
# more than a couple of attempts should ever be required in
# normal operation.
for count in range(1, MAX_BIND_TRIES + 1):
if count > 1:
# multiple attempts shouldn't happen very often so we log each
# attempt after the 1st.
greenthread.sleep(0) # yield
LOG.info(_LI("Attempt %(count)s to bind port %(port)s"),
{'count': count, 'port': context.current['id']})
context, need_notify, try_again = self._attempt_binding(
context, need_notify)
if not try_again:
if allow_notify and need_notify:
self._notify_port_updated(context)
return context
LOG.error(_LE("Failed to commit binding results for %(port)s "
"after %(max)s tries"),
{'port': context.current['id'], 'max': MAX_BIND_TRIES})
return context
def _attempt_binding(self, context, need_notify):
# Since the mechanism driver bind_port() calls must be made
# outside a DB transaction locking the port state, it is
# possible (but unlikely) that the port's state could change
# concurrently while these calls are being made. If another
# thread or process succeeds in binding the port before this
# thread commits its results, the already committed results are
# used. If attributes such as binding:host_id,
# binding:profile, or binding:vnic_type are updated
# concurrently, the try_again flag is returned to indicate that
# the commit was unsuccessful.
plugin_context = context._plugin_context
port_id = context.current['id']
binding = context._binding
try_again = False
# First, determine whether it is necessary and possible to
# bind the port.
if (binding.vif_type != portbindings.VIF_TYPE_UNBOUND
or not binding.host):
# We either don't need to bind the port or can't
return context, need_notify, try_again
# The port isn't already bound and the necessary
# information is available, so attempt to bind the port.
bind_context = self._bind_port(context)
# Now try to commit result of attempting to bind the port.
new_context, did_commit = self._commit_port_binding(
plugin_context, port_id, binding, bind_context)
if not new_context:
# The port has been deleted concurrently, so just
# return the unbound result from the initial
# transaction that completed before the deletion.
LOG.debug("Port %s has been deleted concurrently",
port_id)
need_notify = False
return context, need_notify, try_again
# Need to notify if we succeed and our results were
# committed.
if did_commit and (new_context._binding.vif_type !=
portbindings.VIF_TYPE_BINDING_FAILED):
need_notify = True
return new_context, need_notify, try_again
try_again = True
return new_context, need_notify, try_again
def _bind_port(self, orig_context):
# Construct a new PortContext from the one from the previous
# transaction.
port = orig_context.current
orig_binding = orig_context._binding
new_binding = models.PortBinding(
host=orig_binding.host,
vnic_type=orig_binding.vnic_type,
profile=orig_binding.profile,
vif_type=portbindings.VIF_TYPE_UNBOUND,
vif_details=''
)
self._update_port_dict_binding(port, new_binding)
new_context = driver_context.PortContext(
self, orig_context._plugin_context, port,
orig_context.network.current, new_binding, None)
# Attempt to bind the port and return the context with the
# result.
self.mechanism_manager.bind_port(new_context)
return new_context
def _commit_port_binding(self, plugin_context, port_id, orig_binding,
new_context):
session = plugin_context.session
new_binding = new_context._binding
# After we've attempted to bind the port, we begin a
# transaction, get the current port state, and decide whether
# to commit the binding results.
with session.begin(subtransactions=True):
# Get the current port state and build a new PortContext
# reflecting this state as original state for subsequent
# mechanism driver update_port_*commit() calls.
port_db, cur_binding = db.get_locked_port_and_binding(session,
port_id)
if not port_db:
# The port has been deleted concurrently.
return (None, False)
oport = self._make_port_dict(port_db)
port = self._make_port_dict(port_db)
network = new_context.network.current
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
# REVISIT(rkukura): The PortBinding instance from the
# ml2_port_bindings table, returned as cur_binding
# from db.get_locked_port_and_binding() above, is
# currently not used for DVR distributed ports, and is
# replaced here with the DVRPortBinding instance from
# the ml2_dvr_port_bindings table specific to the host
# on which the distributed port is being bound. It
# would be possible to optimize this code to avoid
# fetching the PortBinding instance in the DVR case,
# and even to avoid creating the unused entry in the
# ml2_port_bindings table. But the upcoming resolution
# for bug 1367391 will eliminate the
# ml2_dvr_port_bindings table, use the
# ml2_port_bindings table to store non-host-specific
# fields for both distributed and non-distributed
# ports, and introduce a new ml2_port_binding_hosts
# table for the fields that need to be host-specific
# in the distributed case. Since the PortBinding
# instance will then be needed, it does not make sense
# to optimize this code to avoid fetching it.
cur_binding = db.get_dvr_port_binding_by_host(
session, port_id, orig_binding.host)
cur_context = driver_context.PortContext(
self, plugin_context, port, network, cur_binding, None,
original_port=oport)
# Commit our binding results only if port has not been
# successfully bound concurrently by another thread or
# process and no binding inputs have been changed.
commit = ((cur_binding.vif_type in
[portbindings.VIF_TYPE_UNBOUND,
portbindings.VIF_TYPE_BINDING_FAILED]) and
orig_binding.host == cur_binding.host and
orig_binding.vnic_type == cur_binding.vnic_type and
orig_binding.profile == cur_binding.profile)
if commit:
# Update the port's binding state with our binding
# results.
cur_binding.vif_type = new_binding.vif_type
cur_binding.vif_details = new_binding.vif_details
db.clear_binding_levels(session, port_id, cur_binding.host)
db.set_binding_levels(session, new_context._binding_levels)
cur_context._binding_levels = new_context._binding_levels
# Update PortContext's port dictionary to reflect the
# updated binding state.
self._update_port_dict_binding(port, cur_binding)
# Update the port status if requested by the bound driver.
if (new_context._binding_levels and
new_context._new_port_status):
port_db.status = new_context._new_port_status
port['status'] = new_context._new_port_status
# Call the mechanism driver precommit methods, commit
# the results, and call the postcommit methods.
self.mechanism_manager.update_port_precommit(cur_context)
if commit:
self.mechanism_manager.update_port_postcommit(cur_context)
# Continue, using the port state as of the transaction that
# just finished, whether that transaction committed new
# results or discovered concurrent port state changes.
return (cur_context, commit)
def _update_port_dict_binding(self, port, binding):
port[portbindings.VNIC_TYPE] = binding.vnic_type
port[portbindings.PROFILE] = self._get_profile(binding)
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
port[portbindings.HOST_ID] = ''
port[portbindings.VIF_TYPE] = portbindings.VIF_TYPE_DISTRIBUTED
port[portbindings.VIF_DETAILS] = {}
else:
port[portbindings.HOST_ID] = binding.host
port[portbindings.VIF_TYPE] = binding.vif_type
port[portbindings.VIF_DETAILS] = self._get_vif_details(binding)
def _get_vif_details(self, binding):
if binding.vif_details:
try:
return jsonutils.loads(binding.vif_details)
except Exception:
LOG.error(_LE("Serialized vif_details DB value '%(value)s' "
"for port %(port)s is invalid"),
{'value': binding.vif_details,
'port': binding.port_id})
return {}
def _get_profile(self, binding):
if binding.profile:
try:
return jsonutils.loads(binding.profile)
except Exception:
LOG.error(_LE("Serialized profile DB value '%(value)s' for "
"port %(port)s is invalid"),
{'value': binding.profile,
'port': binding.port_id})
return {}
def _ml2_extend_port_dict_binding(self, port_res, port_db):
# None when called during unit tests for other plugins.
if port_db.port_binding:
self._update_port_dict_binding(port_res, port_db.port_binding)
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.PORTS, ['_ml2_extend_port_dict_binding'])
# Register extend dict methods for network and port resources.
# Each mechanism driver that supports extend attribute for the resources
# can add those attribute to the result.
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.NETWORKS, ['_ml2_md_extend_network_dict'])
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.PORTS, ['_ml2_md_extend_port_dict'])
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.SUBNETS, ['_ml2_md_extend_subnet_dict'])
def _ml2_md_extend_network_dict(self, result, netdb):
session = db_api.get_session()
with session.begin(subtransactions=True):
self.extension_manager.extend_network_dict(session, netdb, result)
def _ml2_md_extend_port_dict(self, result, portdb):
session = db_api.get_session()
with session.begin(subtransactions=True):
self.extension_manager.extend_port_dict(session, portdb, result)
def _ml2_md_extend_subnet_dict(self, result, subnetdb):
session = db_api.get_session()
with session.begin(subtransactions=True):
self.extension_manager.extend_subnet_dict(
session, subnetdb, result)
# Note - The following hook methods have "ml2" in their names so
# that they are not called twice during unit tests due to global
# registration of hooks in portbindings_db.py used by other
# plugins.
def _ml2_port_model_hook(self, context, original_model, query):
query = query.outerjoin(models.PortBinding,
(original_model.id ==
models.PortBinding.port_id))
return query
def _ml2_port_result_filter_hook(self, query, filters):
values = filters and filters.get(portbindings.HOST_ID, [])
if not values:
return query
return query.filter(models.PortBinding.host.in_(values))
db_base_plugin_v2.NeutronDbPluginV2.register_model_query_hook(
models_v2.Port,
"ml2_port_bindings",
'_ml2_port_model_hook',
None,
'_ml2_port_result_filter_hook')
def _notify_port_updated(self, mech_context):
port = mech_context.current
segment = mech_context.bottom_bound_segment
if not segment:
# REVISIT(rkukura): This should notify agent to unplug port
network = mech_context.network.current
LOG.warning(_LW("In _notify_port_updated(), no bound segment for "
"port %(port_id)s on network %(network_id)s"),
{'port_id': port['id'],
'network_id': network['id']})
return
self.notifier.port_update(mech_context._plugin_context, port,
segment[api.NETWORK_TYPE],
segment[api.SEGMENTATION_ID],
segment[api.PHYSICAL_NETWORK])
def _delete_objects(self, context, resource, objects):
delete_op = getattr(self, 'delete_%s' % resource)
for obj in objects:
try:
delete_op(context, obj['result']['id'])
except KeyError:
LOG.exception(_LE("Could not find %s to delete."),
resource)
except Exception:
LOG.exception(_LE("Could not delete %(res)s %(id)s."),
{'res': resource,
'id': obj['result']['id']})
def _create_bulk_ml2(self, resource, context, request_items):
objects = []
collection = "%ss" % resource
items = request_items[collection]
try:
with context.session.begin(subtransactions=True):
obj_creator = getattr(self, '_create_%s_db' % resource)
for item in items:
attrs = item[resource]
result, mech_context = obj_creator(context, item)
objects.append({'mech_context': mech_context,
'result': result,
'attributes': attrs})
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("An exception occurred while creating "
"the %(resource)s:%(item)s"),
{'resource': resource, 'item': item})
try:
postcommit_op = getattr(self.mechanism_manager,
'create_%s_postcommit' % resource)
for obj in objects:
postcommit_op(obj['mech_context'])
return objects
except ml2_exc.MechanismDriverError:
with excutils.save_and_reraise_exception():
resource_ids = [res['result']['id'] for res in objects]
LOG.exception(_LE("mechanism_manager.create_%(res)s"
"_postcommit failed for %(res)s: "
"'%(failed_id)s'. Deleting "
"%(res)ss %(resource_ids)s"),
{'res': resource,
'failed_id': obj['result']['id'],
'resource_ids': ', '.join(resource_ids)})
self._delete_objects(context, resource, objects)
def _create_network_db(self, context, network):
net_data = network[attributes.NETWORK]
tenant_id = self._get_tenant_id_for_create(context, net_data)
session = context.session
with session.begin(subtransactions=True):
self._ensure_default_security_group(context, tenant_id)
result = super(Ml2Plugin, self).create_network(context, network)
self.extension_manager.process_create_network(context, net_data,
result)
self._process_l3_create(context, result, net_data)
net_data['id'] = result['id']
self.type_manager.create_network_segments(context, net_data,
tenant_id)
self.type_manager.extend_network_dict_provider(context, result)
mech_context = driver_context.NetworkContext(self, context,
result)
self.mechanism_manager.create_network_precommit(mech_context)
if net_data.get(api.MTU, 0) > 0:
res = super(Ml2Plugin, self).update_network(context,
result['id'], {'network': {api.MTU: net_data[api.MTU]}})
result[api.MTU] = res.get(api.MTU, 0)
if az_ext.AZ_HINTS in net_data:
self.validate_availability_zones(context, 'network',
net_data[az_ext.AZ_HINTS])
az_hints = az_ext.convert_az_list_to_string(
net_data[az_ext.AZ_HINTS])
super(Ml2Plugin, self).update_network(context,
result['id'], {'network': {az_ext.AZ_HINTS: az_hints}})
result[az_ext.AZ_HINTS] = az_hints
# Update the transparent vlan if configured
if utils.is_extension_supported(self, 'vlan-transparent'):
vlt = vlantransparent.get_vlan_transparent(net_data)
super(Ml2Plugin, self).update_network(context,
result['id'], {'network': {'vlan_transparent': vlt}})
result['vlan_transparent'] = vlt
return result, mech_context
def create_network(self, context, network):
result, mech_context = self._create_network_db(context, network)
try:
self.mechanism_manager.create_network_postcommit(mech_context)
except ml2_exc.MechanismDriverError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("mechanism_manager.create_network_postcommit "
"failed, deleting network '%s'"), result['id'])
self.delete_network(context, result['id'])
return result
def create_network_bulk(self, context, networks):
objects = self._create_bulk_ml2(attributes.NETWORK, context, networks)
return [obj['result'] for obj in objects]
def update_network(self, context, id, network):
net_data = network[attributes.NETWORK]
provider._raise_if_updates_provider_attributes(net_data)
session = context.session
with session.begin(subtransactions=True):
original_network = super(Ml2Plugin, self).get_network(context, id)
updated_network = super(Ml2Plugin, self).update_network(context,
id,
network)
self.extension_manager.process_update_network(context, net_data,
updated_network)
self._process_l3_update(context, updated_network, net_data)
self.type_manager.extend_network_dict_provider(context,
updated_network)
# TODO(QoS): Move out to the extension framework somehow.
need_network_update_notify = (
qos_consts.QOS_POLICY_ID in net_data and
original_network[qos_consts.QOS_POLICY_ID] !=
updated_network[qos_consts.QOS_POLICY_ID])
mech_context = driver_context.NetworkContext(
self, context, updated_network,
original_network=original_network)
self.mechanism_manager.update_network_precommit(mech_context)
# TODO(apech) - handle errors raised by update_network, potentially
# by re-calling update_network with the previous attributes. For
# now the error is propogated to the caller, which is expected to
# either undo/retry the operation or delete the resource.
self.mechanism_manager.update_network_postcommit(mech_context)
if need_network_update_notify:
self.notifier.network_update(context, updated_network)
return updated_network
def get_network(self, context, id, fields=None):
session = context.session
with session.begin(subtransactions=True):
result = super(Ml2Plugin, self).get_network(context, id, None)
self.type_manager.extend_network_dict_provider(context, result)
return self._fields(result, fields)
def get_networks(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None, page_reverse=False):
session = context.session
with session.begin(subtransactions=True):
nets = super(Ml2Plugin,
self).get_networks(context, filters, None, sorts,
limit, marker, page_reverse)
for net in nets:
self.type_manager.extend_network_dict_provider(context, net)
nets = self._filter_nets_provider(context, nets, filters)
return [self._fields(net, fields) for net in nets]
def _delete_ports(self, context, port_ids):
for port_id in port_ids:
try:
self.delete_port(context, port_id)
except (exc.PortNotFound, sa_exc.ObjectDeletedError):
# concurrent port deletion can be performed by
# release_dhcp_port caused by concurrent subnet_delete
LOG.info(_LI("Port %s was deleted concurrently"), port_id)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Exception auto-deleting port %s"),
port_id)
def _delete_subnets(self, context, subnet_ids):
for subnet_id in subnet_ids:
try:
self.delete_subnet(context, subnet_id)
except (exc.SubnetNotFound, sa_exc.ObjectDeletedError):
LOG.info(_LI("Subnet %s was deleted concurrently"),
subnet_id)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Exception auto-deleting subnet %s"),
subnet_id)
def delete_network(self, context, id):
# REVISIT(rkukura) The super(Ml2Plugin, self).delete_network()
# function is not used because it auto-deletes ports and
# subnets from the DB without invoking the derived class's
# delete_port() or delete_subnet(), preventing mechanism
# drivers from being called. This approach should be revisited
# when the API layer is reworked during icehouse.
LOG.debug("Deleting network %s", id)
session = context.session
while True:
try:
# REVISIT: Serialize this operation with a semaphore
# to prevent deadlock waiting to acquire a DB lock
# held by another thread in the same process, leading
# to 'lock wait timeout' errors.
#
# Process L3 first, since, depending on the L3 plugin, it may
# involve sending RPC notifications, and/or calling delete_port
# on this plugin.
# Additionally, a rollback may not be enough to undo the
# deletion of a floating IP with certain L3 backends.
self._process_l3_delete(context, id)
# Using query().with_lockmode isn't necessary. Foreign-key
# constraints prevent deletion if concurrent creation happens.
with session.begin(subtransactions=True):
# Get ports to auto-delete.
ports = (session.query(models_v2.Port).
enable_eagerloads(False).
filter_by(network_id=id).all())
LOG.debug("Ports to auto-delete: %s", ports)
only_auto_del = all(p.device_owner
in db_base_plugin_v2.
AUTO_DELETE_PORT_OWNERS
for p in ports)
if not only_auto_del:
LOG.debug("Tenant-owned ports exist")
raise exc.NetworkInUse(net_id=id)
# Get subnets to auto-delete.
subnets = (session.query(models_v2.Subnet).
enable_eagerloads(False).
filter_by(network_id=id).all())
LOG.debug("Subnets to auto-delete: %s", subnets)
if not (ports or subnets):
network = self.get_network(context, id)
mech_context = driver_context.NetworkContext(self,
context,
network)
self.mechanism_manager.delete_network_precommit(
mech_context)
self.type_manager.release_network_segments(session, id)
record = self._get_network(context, id)
LOG.debug("Deleting network record %s", record)
session.delete(record)
# The segment records are deleted via cascade from the
# network record, so explicit removal is not necessary.
LOG.debug("Committing transaction")
break
port_ids = [port.id for port in ports]
subnet_ids = [subnet.id for subnet in subnets]
except os_db_exception.DBError as e:
with excutils.save_and_reraise_exception() as ctxt:
if isinstance(e.inner_exception, sql_exc.IntegrityError):
ctxt.reraise = False
LOG.warning(_LW("A concurrent port creation has "
"occurred"))
continue
self._delete_ports(context, port_ids)
self._delete_subnets(context, subnet_ids)
try:
self.mechanism_manager.delete_network_postcommit(mech_context)
except ml2_exc.MechanismDriverError:
# TODO(apech) - One or more mechanism driver failed to
# delete the network. Ideally we'd notify the caller of
# the fact that an error occurred.
LOG.error(_LE("mechanism_manager.delete_network_postcommit"
" failed"))
self.notifier.network_delete(context, id)
def _create_subnet_db(self, context, subnet):
session = context.session
with session.begin(subtransactions=True):
result = super(Ml2Plugin, self).create_subnet(context, subnet)
self.extension_manager.process_create_subnet(
context, subnet[attributes.SUBNET], result)
network = self.get_network(context, result['network_id'])
mech_context = driver_context.SubnetContext(self, context,
result, network)
self.mechanism_manager.create_subnet_precommit(mech_context)
return result, mech_context
def create_subnet(self, context, subnet):
result, mech_context = self._create_subnet_db(context, subnet)
try:
self.mechanism_manager.create_subnet_postcommit(mech_context)
except ml2_exc.MechanismDriverError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("mechanism_manager.create_subnet_postcommit "
"failed, deleting subnet '%s'"), result['id'])
self.delete_subnet(context, result['id'])
return result
def create_subnet_bulk(self, context, subnets):
objects = self._create_bulk_ml2(attributes.SUBNET, context, subnets)
return [obj['result'] for obj in objects]
def update_subnet(self, context, id, subnet):
session = context.session
with session.begin(subtransactions=True):
original_subnet = super(Ml2Plugin, self).get_subnet(context, id)
updated_subnet = super(Ml2Plugin, self).update_subnet(
context, id, subnet)
self.extension_manager.process_update_subnet(
context, subnet[attributes.SUBNET], updated_subnet)
network = self.get_network(context, updated_subnet['network_id'])
mech_context = driver_context.SubnetContext(
self, context, updated_subnet, network,
original_subnet=original_subnet)
self.mechanism_manager.update_subnet_precommit(mech_context)
# TODO(apech) - handle errors raised by update_subnet, potentially
# by re-calling update_subnet with the previous attributes. For
# now the error is propogated to the caller, which is expected to
# either undo/retry the operation or delete the resource.
self.mechanism_manager.update_subnet_postcommit(mech_context)
return updated_subnet
def delete_subnet(self, context, id):
# REVISIT(rkukura) The super(Ml2Plugin, self).delete_subnet()
# function is not used because it deallocates the subnet's addresses
# from ports in the DB without invoking the derived class's
# update_port(), preventing mechanism drivers from being called.
# This approach should be revisited when the API layer is reworked
# during icehouse.
LOG.debug("Deleting subnet %s", id)
session = context.session
while True:
with session.begin(subtransactions=True):
record = self._get_subnet(context, id)
subnet = self._make_subnet_dict(record, None, context=context)
qry_allocated = (session.query(models_v2.IPAllocation).
filter_by(subnet_id=id).
join(models_v2.Port))
is_auto_addr_subnet = ipv6_utils.is_auto_address_subnet(subnet)
# Remove network owned ports, and delete IP allocations
# for IPv6 addresses which were automatically generated
# via SLAAC
if is_auto_addr_subnet:
self._subnet_check_ip_allocations_internal_router_ports(
context, id)
else:
qry_allocated = (
qry_allocated.filter(models_v2.Port.device_owner.
in_(db_base_plugin_v2.AUTO_DELETE_PORT_OWNERS)))
allocated = qry_allocated.all()
# Delete all the IPAllocation that can be auto-deleted
if allocated:
for x in allocated:
session.delete(x)
LOG.debug("Ports to auto-deallocate: %s", allocated)
# Check if there are more IP allocations, unless
# is_auto_address_subnet is True. In that case the check is
# unnecessary. This additional check not only would be wasteful
# for this class of subnet, but is also error-prone since when
# the isolation level is set to READ COMMITTED allocations made
# concurrently will be returned by this query
if not is_auto_addr_subnet:
alloc = self._subnet_check_ip_allocations(context, id)
if alloc:
user_alloc = self._subnet_get_user_allocation(
context, id)
if user_alloc:
LOG.info(_LI("Found port (%(port_id)s, %(ip)s) "
"having IP allocation on subnet "
"%(subnet)s, cannot delete"),
{'ip': user_alloc.ip_address,
'port_id': user_alloc.port_id,
'subnet': id})
raise exc.SubnetInUse(subnet_id=id)
else:
# allocation found and it was DHCP port
# that appeared after autodelete ports were
# removed - need to restart whole operation
raise os_db_exception.RetryRequest(
exc.SubnetInUse(subnet_id=id))
db_base_plugin_v2._check_subnet_not_used(context, id)
# If allocated is None, then all the IPAllocation were
# correctly deleted during the previous pass.
if not allocated:
network = self.get_network(context, subnet['network_id'])
mech_context = driver_context.SubnetContext(self, context,
subnet,
network)
self.mechanism_manager.delete_subnet_precommit(
mech_context)
LOG.debug("Deleting subnet record")
session.delete(record)
# The super(Ml2Plugin, self).delete_subnet() is not called,
# so need to manually call delete_subnet for pluggable ipam
self.ipam.delete_subnet(context, id)
LOG.debug("Committing transaction")
break
for a in allocated:
if a.port:
# calling update_port() for each allocation to remove the
# IP from the port and call the MechanismDrivers
data = {attributes.PORT:
{'fixed_ips': [{'subnet_id': ip.subnet_id,
'ip_address': ip.ip_address}
for ip in a.port.fixed_ips
if ip.subnet_id != id]}}
try:
self.update_port(context, a.port_id, data)
except exc.PortNotFound:
LOG.debug("Port %s deleted concurrently", a.port_id)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Exception deleting fixed_ip "
"from port %s"), a.port_id)
try:
self.mechanism_manager.delete_subnet_postcommit(mech_context)
except ml2_exc.MechanismDriverError:
# TODO(apech) - One or more mechanism driver failed to
# delete the subnet. Ideally we'd notify the caller of
# the fact that an error occurred.
LOG.error(_LE("mechanism_manager.delete_subnet_postcommit failed"))
# TODO(yalei) - will be simplified after security group and address pair be
# converted to ext driver too.
def _portsec_ext_port_create_processing(self, context, port_data, port):
attrs = port[attributes.PORT]
port_security = ((port_data.get(psec.PORTSECURITY) is None) or
port_data[psec.PORTSECURITY])
# allowed address pair checks
if self._check_update_has_allowed_address_pairs(port):
if not port_security:
raise addr_pair.AddressPairAndPortSecurityRequired()
else:
# remove ATTR_NOT_SPECIFIED
attrs[addr_pair.ADDRESS_PAIRS] = []
if port_security:
self._ensure_default_security_group_on_port(context, port)
elif self._check_update_has_security_groups(port):
raise psec.PortSecurityAndIPRequiredForSecurityGroups()
def _create_port_db(self, context, port):
attrs = port[attributes.PORT]
if not attrs.get('status'):
attrs['status'] = const.PORT_STATUS_DOWN
session = context.session
with session.begin(subtransactions=True):
dhcp_opts = attrs.get(edo_ext.EXTRADHCPOPTS, [])
result = super(Ml2Plugin, self).create_port(context, port)
self.extension_manager.process_create_port(context, attrs, result)
self._portsec_ext_port_create_processing(context, result, port)
# sgids must be got after portsec checked with security group
sgids = self._get_security_groups_on_port(context, port)
self._process_port_create_security_group(context, result, sgids)
network = self.get_network(context, result['network_id'])
binding = db.add_port_binding(session, result['id'])
mech_context = driver_context.PortContext(self, context, result,
network, binding, None)
self._process_port_binding(mech_context, attrs)
result[addr_pair.ADDRESS_PAIRS] = (
self._process_create_allowed_address_pairs(
context, result,
attrs.get(addr_pair.ADDRESS_PAIRS)))
self._process_port_create_extra_dhcp_opts(context, result,
dhcp_opts)
self.mechanism_manager.create_port_precommit(mech_context)
return result, mech_context
def create_port(self, context, port):
result, mech_context = self._create_port_db(context, port)
# notify any plugin that is interested in port create events
kwargs = {'context': context, 'port': result}
registry.notify(resources.PORT, events.AFTER_CREATE, self, **kwargs)
try:
self.mechanism_manager.create_port_postcommit(mech_context)
except ml2_exc.MechanismDriverError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("mechanism_manager.create_port_postcommit "
"failed, deleting port '%s'"), result['id'])
self.delete_port(context, result['id'])
# REVISIT(rkukura): Is there any point in calling this before
# a binding has been successfully established?
self.notify_security_groups_member_updated(context, result)
try:
bound_context = self._bind_port_if_needed(mech_context)
except ml2_exc.MechanismDriverError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("_bind_port_if_needed "
"failed, deleting port '%s'"), result['id'])
self.delete_port(context, result['id'])
return bound_context.current
def create_port_bulk(self, context, ports):
objects = self._create_bulk_ml2(attributes.PORT, context, ports)
# REVISIT(rkukura): Is there any point in calling this before
# a binding has been successfully established?
results = [obj['result'] for obj in objects]
self.notify_security_groups_member_updated_bulk(context, results)
for obj in objects:
attrs = obj['attributes']
if attrs and attrs.get(portbindings.HOST_ID):
kwargs = {'context': context, 'port': obj['result']}
registry.notify(
resources.PORT, events.AFTER_CREATE, self, **kwargs)
try:
for obj in objects:
obj['bound_context'] = self._bind_port_if_needed(
obj['mech_context'])
return [obj['bound_context'].current for obj in objects]
except ml2_exc.MechanismDriverError:
with excutils.save_and_reraise_exception():
resource_ids = [res['result']['id'] for res in objects]
LOG.error(_LE("_bind_port_if_needed failed. "
"Deleting all ports from create bulk '%s'"),
resource_ids)
self._delete_objects(context, attributes.PORT, objects)
# TODO(yalei) - will be simplified after security group and address pair be
# converted to ext driver too.
def _portsec_ext_port_update_processing(self, updated_port, context, port,
id):
port_security = ((updated_port.get(psec.PORTSECURITY) is None) or
updated_port[psec.PORTSECURITY])
if port_security:
return
# check the address-pairs
if self._check_update_has_allowed_address_pairs(port):
# has address pairs in request
raise addr_pair.AddressPairAndPortSecurityRequired()
elif (not
self._check_update_deletes_allowed_address_pairs(port)):
# not a request for deleting the address-pairs
updated_port[addr_pair.ADDRESS_PAIRS] = (
self.get_allowed_address_pairs(context, id))
# check if address pairs has been in db, if address pairs could
# be put in extension driver, we can refine here.
if updated_port[addr_pair.ADDRESS_PAIRS]:
raise addr_pair.AddressPairAndPortSecurityRequired()
# checks if security groups were updated adding/modifying
# security groups, port security is set
if self._check_update_has_security_groups(port):
raise psec.PortSecurityAndIPRequiredForSecurityGroups()
elif (not
self._check_update_deletes_security_groups(port)):
# Update did not have security groups passed in. Check
# that port does not have any security groups already on it.
filters = {'port_id': [id]}
security_groups = (
super(Ml2Plugin, self)._get_port_security_group_bindings(
context, filters)
)
if security_groups:
raise psec.PortSecurityPortHasSecurityGroup()
def update_port(self, context, id, port):
attrs = port[attributes.PORT]
need_port_update_notify = False
session = context.session
bound_mech_contexts = []
with session.begin(subtransactions=True):
port_db, binding = db.get_locked_port_and_binding(session, id)
if not port_db:
raise exc.PortNotFound(port_id=id)
mac_address_updated = self._check_mac_update_allowed(
port_db, attrs, binding)
need_port_update_notify |= mac_address_updated
original_port = self._make_port_dict(port_db)
updated_port = super(Ml2Plugin, self).update_port(context, id,
port)
self.extension_manager.process_update_port(context, attrs,
updated_port)
self._portsec_ext_port_update_processing(updated_port, context,
port, id)
if (psec.PORTSECURITY in attrs) and (
original_port[psec.PORTSECURITY] !=
updated_port[psec.PORTSECURITY]):
need_port_update_notify = True
# TODO(QoS): Move out to the extension framework somehow.
# Follow https://review.openstack.org/#/c/169223 for a solution.
if (qos_consts.QOS_POLICY_ID in attrs and
original_port[qos_consts.QOS_POLICY_ID] !=
updated_port[qos_consts.QOS_POLICY_ID]):
need_port_update_notify = True
if addr_pair.ADDRESS_PAIRS in attrs:
need_port_update_notify |= (
self.update_address_pairs_on_port(context, id, port,
original_port,
updated_port))
need_port_update_notify |= self.update_security_group_on_port(
context, id, port, original_port, updated_port)
network = self.get_network(context, original_port['network_id'])
need_port_update_notify |= self._update_extra_dhcp_opts_on_port(
context, id, port, updated_port)
levels = db.get_binding_levels(session, id, binding.host)
mech_context = driver_context.PortContext(
self, context, updated_port, network, binding, levels,
original_port=original_port)
need_port_update_notify |= self._process_port_binding(
mech_context, attrs)
# For DVR router interface ports we need to retrieve the
# DVRPortbinding context instead of the normal port context.
# The normal Portbinding context does not have the status
# of the ports that are required by the l2pop to process the
# postcommit events.
# NOTE:Sometimes during the update_port call, the DVR router
# interface port may not have the port binding, so we cannot
# create a generic bindinglist that will address both the
# DVR and non-DVR cases here.
# TODO(Swami): This code need to be revisited.
if port_db['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
dvr_binding_list = db.get_dvr_port_bindings(session, id)
for dvr_binding in dvr_binding_list:
levels = db.get_binding_levels(session, id,
dvr_binding.host)
dvr_mech_context = driver_context.PortContext(
self, context, updated_port, network,
dvr_binding, levels, original_port=original_port)
self.mechanism_manager.update_port_precommit(
dvr_mech_context)
bound_mech_contexts.append(dvr_mech_context)
else:
self.mechanism_manager.update_port_precommit(mech_context)
bound_mech_contexts.append(mech_context)
# Notifications must be sent after the above transaction is complete
kwargs = {
'context': context,
'port': updated_port,
'mac_address_updated': mac_address_updated,
'original_port': original_port,
}
registry.notify(resources.PORT, events.AFTER_UPDATE, self, **kwargs)
# Note that DVR Interface ports will have bindings on
# multiple hosts, and so will have multiple mech_contexts,
# while other ports typically have just one.
# Since bound_mech_contexts has both the DVR and non-DVR
# contexts we can manage just with a single for loop.
try:
for mech_context in bound_mech_contexts:
self.mechanism_manager.update_port_postcommit(
mech_context)
except ml2_exc.MechanismDriverError:
LOG.error(_LE("mechanism_manager.update_port_postcommit "
"failed for port %s"), id)
self.check_and_notify_security_group_member_changed(
context, original_port, updated_port)
need_port_update_notify |= self.is_security_group_member_updated(
context, original_port, updated_port)
if original_port['admin_state_up'] != updated_port['admin_state_up']:
need_port_update_notify = True
# NOTE: In the case of DVR ports, the port-binding is done after
# router scheduling when sync_routers is called and so this call
# below may not be required for DVR routed interfaces. But still
# since we don't have the mech_context for the DVR router interfaces
# at certain times, we just pass the port-context and return it, so
# that we don't disturb other methods that are expecting a return
# value.
bound_context = self._bind_port_if_needed(
mech_context,
allow_notify=True,
need_notify=need_port_update_notify)
return bound_context.current
def _process_dvr_port_binding(self, mech_context, context, attrs):
session = mech_context._plugin_context.session
binding = mech_context._binding
port = mech_context.current
port_id = port['id']
if binding.vif_type != portbindings.VIF_TYPE_UNBOUND:
binding.vif_details = ''
binding.vif_type = portbindings.VIF_TYPE_UNBOUND
if binding.host:
db.clear_binding_levels(session, port_id, binding.host)
binding.host = ''
self._update_port_dict_binding(port, binding)
binding.host = attrs and attrs.get(portbindings.HOST_ID)
binding.router_id = attrs and attrs.get('device_id')
def update_dvr_port_binding(self, context, id, port):
attrs = port[attributes.PORT]
host = attrs and attrs.get(portbindings.HOST_ID)
host_set = attributes.is_attr_set(host)
if not host_set:
LOG.error(_LE("No Host supplied to bind DVR Port %s"), id)
return
session = context.session
binding = db.get_dvr_port_binding_by_host(session, id, host)
device_id = attrs and attrs.get('device_id')
router_id = binding and binding.get('router_id')
update_required = (not binding or
binding.vif_type == portbindings.VIF_TYPE_BINDING_FAILED or
router_id != device_id)
if update_required:
with session.begin(subtransactions=True):
try:
orig_port = super(Ml2Plugin, self).get_port(context, id)
except exc.PortNotFound:
LOG.debug("DVR Port %s has been deleted concurrently", id)
return
if not binding:
binding = db.ensure_dvr_port_binding(
session, id, host, router_id=device_id)
network = self.get_network(context, orig_port['network_id'])
levels = db.get_binding_levels(session, id, host)
mech_context = driver_context.PortContext(self,
context, orig_port, network,
binding, levels, original_port=orig_port)
self._process_dvr_port_binding(mech_context, context, attrs)
self._bind_port_if_needed(mech_context)
def _pre_delete_port(self, context, port_id, port_check):
"""Do some preliminary operations before deleting the port."""
LOG.debug("Deleting port %s", port_id)
try:
# notify interested parties of imminent port deletion;
# a failure here prevents the operation from happening
kwargs = {
'context': context,
'port_id': port_id,
'port_check': port_check
}
registry.notify(
resources.PORT, events.BEFORE_DELETE, self, **kwargs)
except exceptions.CallbackFailure as e:
# NOTE(armax): preserve old check's behavior
if len(e.errors) == 1:
raise e.errors[0].error
raise exc.ServicePortInUse(port_id=port_id, reason=e)
def delete_port(self, context, id, l3_port_check=True):
self._pre_delete_port(context, id, l3_port_check)
# TODO(armax): get rid of the l3 dependency in the with block
removed_routers = []
router_ids = []
l3plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
is_dvr_enabled = utils.is_extension_supported(
l3plugin, const.L3_DISTRIBUTED_EXT_ALIAS)
session = context.session
with session.begin(subtransactions=True):
port_db, binding = db.get_locked_port_and_binding(session, id)
if not port_db:
LOG.debug("The port '%s' was deleted", id)
return
port = self._make_port_dict(port_db)
network = self.get_network(context, port['network_id'])
bound_mech_contexts = []
device_owner = port['device_owner']
if device_owner == const.DEVICE_OWNER_DVR_INTERFACE:
bindings = db.get_dvr_port_bindings(context.session, id)
for bind in bindings:
levels = db.get_binding_levels(context.session, id,
bind.host)
mech_context = driver_context.PortContext(
self, context, port, network, bind, levels)
self.mechanism_manager.delete_port_precommit(mech_context)
bound_mech_contexts.append(mech_context)
else:
levels = db.get_binding_levels(context.session, id,
binding.host)
mech_context = driver_context.PortContext(
self, context, port, network, binding, levels)
if is_dvr_enabled and utils.is_dvr_serviced(device_owner):
removed_routers = l3plugin.dvr_deletens_if_no_port(
context, id)
self.mechanism_manager.delete_port_precommit(mech_context)
bound_mech_contexts.append(mech_context)
if l3plugin:
router_ids = l3plugin.disassociate_floatingips(
context, id, do_notify=False)
LOG.debug("Calling delete_port for %(port_id)s owned by %(owner)s",
{"port_id": id, "owner": device_owner})
super(Ml2Plugin, self).delete_port(context, id)
self._post_delete_port(
context, port, router_ids, removed_routers, bound_mech_contexts)
def _post_delete_port(
self, context, port, router_ids, removed_routers, bound_mech_contexts):
kwargs = {
'context': context,
'port': port,
'router_ids': router_ids,
'removed_routers': removed_routers
}
registry.notify(resources.PORT, events.AFTER_DELETE, self, **kwargs)
try:
# Note that DVR Interface ports will have bindings on
# multiple hosts, and so will have multiple mech_contexts,
# while other ports typically have just one.
for mech_context in bound_mech_contexts:
self.mechanism_manager.delete_port_postcommit(mech_context)
except ml2_exc.MechanismDriverError:
# TODO(apech) - One or more mechanism driver failed to
# delete the port. Ideally we'd notify the caller of the
# fact that an error occurred.
LOG.error(_LE("mechanism_manager.delete_port_postcommit failed for"
" port %s"), port['id'])
self.notifier.port_delete(context, port['id'])
self.notify_security_groups_member_updated(context, port)
def get_bound_port_context(self, plugin_context, port_id, host=None,
cached_networks=None):
session = plugin_context.session
with session.begin(subtransactions=True):
try:
port_db = (session.query(models_v2.Port).
enable_eagerloads(False).
filter(models_v2.Port.id.startswith(port_id)).
one())
except sa_exc.NoResultFound:
LOG.debug("No ports have port_id starting with %s",
port_id)
return
except sa_exc.MultipleResultsFound:
LOG.error(_LE("Multiple ports have port_id starting with %s"),
port_id)
return
port = self._make_port_dict(port_db)
network = (cached_networks or {}).get(port['network_id'])
if not network:
network = self.get_network(plugin_context, port['network_id'])
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
binding = db.get_dvr_port_binding_by_host(
session, port['id'], host)
if not binding:
LOG.error(_LE("Binding info for DVR port %s not found"),
port_id)
return None
levels = db.get_binding_levels(session, port_db.id, host)
port_context = driver_context.PortContext(
self, plugin_context, port, network, binding, levels)
else:
# since eager loads are disabled in port_db query
# related attribute port_binding could disappear in
# concurrent port deletion.
# It's not an error condition.
binding = port_db.port_binding
if not binding:
LOG.info(_LI("Binding info for port %s was not found, "
"it might have been deleted already."),
port_id)
return
levels = db.get_binding_levels(session, port_db.id,
port_db.port_binding.host)
port_context = driver_context.PortContext(
self, plugin_context, port, network, binding, levels)
return self._bind_port_if_needed(port_context)
@oslo_db_api.wrap_db_retry(
max_retries=db_api.MAX_RETRIES, retry_on_request=True,
exception_checker=lambda e: isinstance(e, (sa_exc.StaleDataError,
os_db_exception.DBDeadlock))
)
def update_port_status(self, context, port_id, status, host=None,
network=None):
"""
Returns port_id (non-truncated uuid) if the port exists.
Otherwise returns None.
network can be passed in to avoid another get_network call if
one was already performed by the caller.
"""
updated = False
session = context.session
with session.begin(subtransactions=True):
port = db.get_port(session, port_id)
if not port:
LOG.debug("Port %(port)s update to %(val)s by agent not found",
{'port': port_id, 'val': status})
return None
if (port.status != status and
port['device_owner'] != const.DEVICE_OWNER_DVR_INTERFACE):
original_port = self._make_port_dict(port)
port.status = status
updated_port = self._make_port_dict(port)
network = network or self.get_network(
context, original_port['network_id'])
levels = db.get_binding_levels(session, port.id,
port.port_binding.host)
mech_context = driver_context.PortContext(
self, context, updated_port, network, port.port_binding,
levels, original_port=original_port)
self.mechanism_manager.update_port_precommit(mech_context)
updated = True
elif port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
binding = db.get_dvr_port_binding_by_host(
session, port['id'], host)
if not binding:
return
binding['status'] = status
binding.update(binding)
updated = True
if (updated and
port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE):
with session.begin(subtransactions=True):
port = db.get_port(session, port_id)
if not port:
LOG.warning(_LW("Port %s not found during update"),
port_id)
return
original_port = self._make_port_dict(port)
network = network or self.get_network(
context, original_port['network_id'])
port.status = db.generate_dvr_port_status(session, port['id'])
updated_port = self._make_port_dict(port)
levels = db.get_binding_levels(session, port_id, host)
mech_context = (driver_context.PortContext(
self, context, updated_port, network,
binding, levels, original_port=original_port))
self.mechanism_manager.update_port_precommit(mech_context)
if updated:
self.mechanism_manager.update_port_postcommit(mech_context)
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
db.delete_dvr_port_binding_if_stale(session, binding)
return port['id']
def port_bound_to_host(self, context, port_id, host):
port = db.get_port(context.session, port_id)
if not port:
LOG.debug("No Port match for: %s", port_id)
return False
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
bindings = db.get_dvr_port_bindings(context.session, port_id)
for b in bindings:
if b.host == host:
return True
LOG.debug("No binding found for DVR port %s", port['id'])
return False
else:
port_host = db.get_port_binding_host(context.session, port_id)
return (port_host == host)
def get_ports_from_devices(self, context, devices):
port_ids_to_devices = dict(
(self._device_to_port_id(context, device), device)
for device in devices)
port_ids = list(port_ids_to_devices.keys())
ports = db.get_ports_and_sgs(context, port_ids)
for port in ports:
# map back to original requested id
port_id = next((port_id for port_id in port_ids
if port['id'].startswith(port_id)), None)
port['device'] = port_ids_to_devices.get(port_id)
return ports
@staticmethod
def _device_to_port_id(context, device):
# REVISIT(rkukura): Consider calling into MechanismDrivers to
# process device names, or having MechanismDrivers supply list
# of device prefixes to strip.
for prefix in const.INTERFACE_PREFIXES:
if device.startswith(prefix):
return device[len(prefix):]
# REVISIT(irenab): Consider calling into bound MD to
# handle the get_device_details RPC
if not uuidutils.is_uuid_like(device):
port = db.get_port_from_device_mac(context, device)
if port:
return port.id
return device
def get_workers(self):
return self.mechanism_manager.get_workers()
| apache-2.0 |
sleach/python-nmap | setup.py | 1 | 1543 | from distutils.core import setup, Extension
nmap = Extension('nmap',
sources = ['nmap/nmap.py', 'nmap/__init__.py', 'nmap/example.py'])
from nmap import *
# Install : python setup.py install
# Register : python setup.py register
# platform = 'Unix',
# download_url = 'https://github.com/sleach/python-nmap',
setup (
name = 'python-nmap',
version = nmap.__version__,
author = 'Alexandre Norman',
author_email = 'norman@xael.org',
license ='LICENSE'
keywords="nmap, portscanner, network, sysadmin",
# Get more strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
platforms=[
"Operating System :: OS Independent",
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Operating System :: OS Independent",
"Topic :: System :: Monitoring",
"Topic :: System :: Networking",
"Topic :: System :: Networking :: Firewalls",
"Topic :: System :: Networking :: Monitoring",
],
packages=['nmap'],
url = 'https://github.com/sleach/python-nmap',
description = 'This is a python class to use nmap and access scan results from python',
long_description=open('README.md').read() + "\n" + open('CHANGELOG').read(),
)
| gpl-3.0 |
leopittelli/Django-on-App-Engine-Example | urls.py | 1 | 1067 | from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView, RedirectView
from main.views import cached_javascript_catalog
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
js_info_dict = {
'packages': ('main',),
}
handler500 = 'djangotoolbox.errorviews.server_error'
urlpatterns = patterns('',
('^_ah/warmup$', 'djangoappengine.views.warmup'),
url(r'^jsi18n/$', cached_javascript_catalog, js_info_dict, name='cached_javascript_catalog'),
(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^admin/', include(admin.site.urls)),
(r'^summernote/', include('django_summernote.urls')),
(r'^favicon\.ico$', RedirectView.as_view(url='/static/global/favicon.ico')),
(r'^robots\.txt$', TemplateView.as_view(template_name='main/robots.txt', content_type='text/plain')),
(r'^sitemap\.xml$', TemplateView.as_view(template_name='main/sitemap.xml', content_type='application/xml')),
url('', include('main.urls', namespace='main')),
)
| mit |
GunoH/intellij-community | python/helpers/pydev/pydev_tests/test_pydevconsole.py | 10 | 10777 | import sys
import threading
import unittest
import pydevconsole
from _pydev_comm.pydev_rpc import make_rpc_client, start_rpc_server_and_make_client, start_rpc_server
from _pydevd_bundle import pydevd_io
from pydev_console.pydev_protocol import PythonConsoleFrontendService, PythonConsoleBackendService
from pydevconsole import enable_thrift_logging, create_server_handler_factory
try:
raw_input
raw_input_name = 'raw_input'
except NameError:
raw_input_name = 'input'
#=======================================================================================================================
# Test
#=======================================================================================================================
class Test(unittest.TestCase):
def test_console_hello(self):
self.original_stdout = sys.stdout
sys.stdout = pydevd_io.IOBuf()
try:
sys.stdout.encoding = sys.stdin.encoding
except AttributeError:
# In Python 3 encoding is not writable (whereas in Python 2 it doesn't exist).
pass
try:
rpc_client = self.start_client_thread() #@UnusedVariable
import time
time.sleep(.3) #let's give it some time to start the threads
from _pydev_bundle import pydev_localhost
interpreter = pydevconsole.InterpreterInterface(threading.currentThread(), rpc_client=rpc_client)
(result,) = interpreter.hello("Hello pydevconsole")
self.assertEqual(result, "Hello eclipse")
finally:
sys.stdout = self.original_stdout
def test_console_requests(self):
self.original_stdout = sys.stdout
sys.stdout = pydevd_io.IOBuf()
try:
rpc_client = self.start_client_thread() #@UnusedVariable
import time
time.sleep(.3) #let's give it some time to start the threads
from _pydev_bundle import pydev_localhost
from _pydev_bundle.pydev_console_types import CodeFragment
interpreter = pydevconsole.InterpreterInterface(threading.currentThread(), rpc_client=rpc_client)
sys.stdout = pydevd_io.IOBuf()
interpreter.add_exec(CodeFragment('class Foo:\n CONSTANT=1\n'))
interpreter.add_exec(CodeFragment('foo=Foo()'))
interpreter.add_exec(CodeFragment('foo.__doc__=None'))
interpreter.add_exec(CodeFragment('val = %s()' % (raw_input_name,)))
interpreter.add_exec(CodeFragment('50'))
interpreter.add_exec(CodeFragment('print (val)'))
found = sys.stdout.getvalue().split()
try:
self.assertEqual(['50', 'input_request'], found)
except:
try:
self.assertEqual(['input_request'], found) #IPython
except:
self.assertEqual([u'50', u'input_request'], found[1:]) # IPython 5.1
self.assertTrue(found[0].startswith(u'Out'))
comps = interpreter.do_get_completions('foo.', 'foo.')
self.assertTrue(
('CONSTANT', '', '', '3') in comps or ('CONSTANT', '', '', '4') in comps, \
'Found: %s' % comps
)
comps = interpreter.do_get_completions('"".', '"".')
self.assertTrue(
('__add__', 'x.__add__(y) <==> x+y', '', '3') in comps or
('__add__', '', '', '4') in comps or
('__add__', 'x.__add__(y) <==> x+y\r\nx.__add__(y) <==> x+y', '()', '2') in comps or
('__add__', 'x.\n__add__(y) <==> x+yx.\n__add__(y) <==> x+y', '()', '2'),
'Did not find __add__ in : %s' % (comps,)
)
completions = interpreter.do_get_completions('', '')
for c in completions:
if c[0] == 'AssertionError':
break
else:
self.fail('Could not find AssertionError')
completions = interpreter.do_get_completions('Assert', 'Assert')
for c in completions:
if c[0] == 'RuntimeError':
self.fail('Did not expect to find RuntimeError there')
self.assertTrue(('__doc__', None, '', '3') not in interpreter.do_get_completions('foo.CO', 'foo.'))
comps = interpreter.do_get_completions('va', 'va')
self.assertTrue(('val', '', '', '3') in comps or ('vars', '', '', '4') in comps)
interpreter.add_exec(CodeFragment('s = "mystring"'))
desc = interpreter.getDescription('val')
self.assertTrue(desc.find('str(object) -> string') >= 0 or
desc == "'input_request'" or
desc.find('str(string[, encoding[, errors]]) -> str') >= 0 or
desc.find('str(Char* value)') >= 0 or
desc.find('str(object=\'\') -> string') >= 0 or
desc.find('str(value: Char*)') >= 0 or
desc.find('str(object=\'\') -> str') >= 0 or
desc.find('unicode(object=\'\') -> unicode object') >= 0 or
desc.find('The most base type') >= 0 # Jython 2.7 is providing this :P
,
'Could not find what was needed in %s' % desc)
desc = interpreter.getDescription('val.join')
self.assertTrue(desc.find('S.join(sequence) -> string') >= 0 or
desc.find('S.join(sequence) -> str') >= 0 or
desc.find('S.join(iterable) -> string') >= 0 or
desc == "<builtin method 'join'>" or
desc == "<built-in method join of str object>" or
desc.find('str join(str self, list sequence)') >= 0 or
desc.find('S.join(iterable) -> str') >= 0 or
desc.find('S.join(iterable) -> unicode') >= 0 or
desc.find('join(self: str, sequence: list) -> str') >= 0 or
desc.find('Concatenate any number of strings.') >= 0,
"Could not recognize: %s" % (desc,))
finally:
sys.stdout = self.original_stdout
def create_frontend_handler(self):
class HandleRequestInput:
def __init__(self):
self.requested_input = False
self.notified_finished = 0
self.rpc_client = None
def requestInput(self, path):
self.requested_input = True
return 'input_request'
def notifyFinished(self, needs_more_input):
self.notified_finished += 1
def notifyAboutMagic(self, commands, is_auto_magic):
pass
return HandleRequestInput()
def start_client_thread(self):
from _pydev_bundle import pydev_localhost
enable_thrift_logging()
# here we start the test server
server_socket = start_rpc_server_and_make_client(pydev_localhost.get_localhost(), 0,
PythonConsoleFrontendService,
PythonConsoleBackendService,
create_server_handler_factory(self.create_frontend_handler()))
host, port = server_socket.getsockname()
import time
time.sleep(1)
rpc_client, _ = make_rpc_client(PythonConsoleFrontendService, host, port)
return rpc_client
def start_debugger_server_thread(self, debugger_port, socket_code):
class DebuggerServerThread(threading.Thread):
def __init__(self, debugger_port, socket_code):
threading.Thread.__init__(self)
self.debugger_port = debugger_port
self.socket_code = socket_code
def run(self):
import socket
s = socket.socket()
s.bind(('', debugger_port))
s.listen(1)
socket, unused_addr = s.accept()
socket_code(socket)
debugger_thread = DebuggerServerThread(debugger_port, socket_code)
debugger_thread.setDaemon(True)
debugger_thread.start()
return debugger_thread
def get_free_addresses(self):
from _pydev_bundle.pydev_localhost import get_socket_names
socket_names = get_socket_names(2, True)
port0 = socket_names[0][1]
port1 = socket_names[1][1]
assert port0 != port1
assert port0 > 0
assert port1 > 0
return port0, port1
def test_server(self):
self.original_stdout = sys.stdout
sys.stdout = pydevd_io.IOBuf()
try:
from _pydev_bundle.pydev_localhost import get_socket_name
host, port = get_socket_name(close=True)
class ServerThread(threading.Thread):
def __init__(self, backend_port):
threading.Thread.__init__(self)
self.backend_port = backend_port
def run(self):
from _pydev_bundle import pydev_localhost
pydevconsole.start_server(self.backend_port)
server_thread = ServerThread(port)
server_thread.setDaemon(True)
server_thread.start()
import time
time.sleep(1) #let's give it some time to start the threads
rpc_client, server_transport = make_rpc_client(PythonConsoleBackendService, host, port)
server_service = PythonConsoleFrontendService
server_handler = self.create_frontend_handler()
start_rpc_server(server_transport, server_service, server_handler)
rpc_client.execLine('class Foo:')
rpc_client.execLine(' pass')
rpc_client.execLine('')
rpc_client.execLine('foo = Foo()')
rpc_client.execLine('a = %s()' % (raw_input_name,))
rpc_client.execLine('print (a)')
initial = time.time()
while not server_handler.requested_input:
if time.time() - initial > 2:
raise AssertionError('Did not get the return asked before the timeout.')
time.sleep(.1)
found = sys.stdout.getvalue()
while ['input_request'] != found.split():
found += sys.stdout.getvalue()
if time.time() - initial > 2:
break
time.sleep(.1)
self.assertIn('input_request', found.split())
finally:
sys.stdout = self.original_stdout
| apache-2.0 |
ravindrapanda/tensorflow | tensorflow/contrib/tensorboard/plugins/trace/__init__.py | 147 | 1045 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Public API for the Trace plugin."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.tensorboard.plugins.trace.trace import *
from tensorflow.contrib.tensorboard.plugins.trace.trace_info_pb2 import *
# pylint: enable=wildcard-import
| apache-2.0 |
kangear/git-repo | subcmds/sync.py | 4 | 28299 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import json
import netrc
from optparse import SUPPRESS_HELP
import os
import re
import shutil
import socket
import subprocess
import sys
import time
from pyversion import is_python3
if is_python3():
import urllib.parse
import xmlrpc.client
else:
import imp
import urlparse
import xmlrpclib
urllib = imp.new_module('urllib')
urllib.parse = urlparse
xmlrpc = imp.new_module('xmlrpc')
xmlrpc.client = xmlrpclib
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
try:
import resource
def _rlimit_nofile():
return resource.getrlimit(resource.RLIMIT_NOFILE)
except ImportError:
def _rlimit_nofile():
return (256, 256)
try:
import multiprocessing
except ImportError:
multiprocessing = None
from git_command import GIT, git_require
from git_refs import R_HEADS, HEAD
from project import Project
from project import RemoteSpec
from command import Command, MirrorSafeCommand
from error import RepoChangedException, GitError, ManifestParseError
from project import SyncBuffer
from progress import Progress
from wrapper import Wrapper
_ONE_DAY_S = 24 * 60 * 60
class _FetchError(Exception):
"""Internal error thrown in _FetchHelper() when we don't want stack trace."""
pass
class Sync(Command, MirrorSafeCommand):
jobs = 1
common = True
helpSummary = "Update working tree to the latest revision"
helpUsage = """
%prog [<project>...]
"""
helpDescription = """
The '%prog' command synchronizes local project directories
with the remote repositories specified in the manifest. If a local
project does not yet exist, it will clone a new local directory from
the remote repository and set up tracking branches as specified in
the manifest. If the local project already exists, '%prog'
will update the remote branches and rebase any new local changes
on top of the new remote changes.
'%prog' will synchronize all projects listed at the command
line. Projects can be specified either by name, or by a relative
or absolute path to the project's local directory. If no projects
are specified, '%prog' will synchronize all projects listed in
the manifest.
The -d/--detach option can be used to switch specified projects
back to the manifest revision. This option is especially helpful
if the project is currently on a topic branch, but the manifest
revision is temporarily needed.
The -s/--smart-sync option can be used to sync to a known good
build as specified by the manifest-server element in the current
manifest. The -t/--smart-tag option is similar and allows you to
specify a custom tag/label.
The -u/--manifest-server-username and -p/--manifest-server-password
options can be used to specify a username and password to authenticate
with the manifest server when using the -s or -t option.
If -u and -p are not specified when using the -s or -t option, '%prog'
will attempt to read authentication credentials for the manifest server
from the user's .netrc file.
'%prog' will not use authentication credentials from -u/-p or .netrc
if the manifest server specified in the manifest file already includes
credentials.
The -f/--force-broken option can be used to proceed with syncing
other projects if a project sync fails.
The --no-clone-bundle option disables any attempt to use
$URL/clone.bundle to bootstrap a new Git repository from a
resumeable bundle file on a content delivery network. This
may be necessary if there are problems with the local Python
HTTP client or proxy configuration, but the Git binary works.
The --fetch-submodules option enables fetching Git submodules
of a project from server.
The -c/--current-branch option can be used to only fetch objects that
are on the branch specified by a project's revision.
The --optimized-fetch option can be used to only fetch projects that
are fixed to a sha1 revision if the sha1 revision does not already
exist locally.
SSH Connections
---------------
If at least one project remote URL uses an SSH connection (ssh://,
git+ssh://, or user@host:path syntax) repo will automatically
enable the SSH ControlMaster option when connecting to that host.
This feature permits other projects in the same '%prog' session to
reuse the same SSH tunnel, saving connection setup overheads.
To disable this behavior on UNIX platforms, set the GIT_SSH
environment variable to 'ssh'. For example:
export GIT_SSH=ssh
%prog
Compatibility
~~~~~~~~~~~~~
This feature is automatically disabled on Windows, due to the lack
of UNIX domain socket support.
This feature is not compatible with url.insteadof rewrites in the
user's ~/.gitconfig. '%prog' is currently not able to perform the
rewrite early enough to establish the ControlMaster tunnel.
If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
later is required to fix a server side protocol bug.
"""
def _Options(self, p, show_smart=True):
try:
self.jobs = self.manifest.default.sync_j
except ManifestParseError:
self.jobs = 1
p.add_option('-f', '--force-broken',
dest='force_broken', action='store_true',
help="continue sync even if a project fails to sync")
p.add_option('-l', '--local-only',
dest='local_only', action='store_true',
help="only update working tree, don't fetch")
p.add_option('-n', '--network-only',
dest='network_only', action='store_true',
help="fetch only, don't update working tree")
p.add_option('-d', '--detach',
dest='detach_head', action='store_true',
help='detach projects back to manifest revision')
p.add_option('-c', '--current-branch',
dest='current_branch_only', action='store_true',
help='fetch only current branch from server')
p.add_option('-q', '--quiet',
dest='quiet', action='store_true',
help='be more quiet')
p.add_option('-j', '--jobs',
dest='jobs', action='store', type='int',
help="projects to fetch simultaneously (default %d)" % self.jobs)
p.add_option('-m', '--manifest-name',
dest='manifest_name',
help='temporary manifest to use for this sync', metavar='NAME.xml')
p.add_option('--no-clone-bundle',
dest='no_clone_bundle', action='store_true',
help='disable use of /clone.bundle on HTTP/HTTPS')
p.add_option('-u', '--manifest-server-username', action='store',
dest='manifest_server_username',
help='username to authenticate with the manifest server')
p.add_option('-p', '--manifest-server-password', action='store',
dest='manifest_server_password',
help='password to authenticate with the manifest server')
p.add_option('--fetch-submodules',
dest='fetch_submodules', action='store_true',
help='fetch submodules from server')
p.add_option('--no-tags',
dest='no_tags', action='store_true',
help="don't fetch tags")
p.add_option('--optimized-fetch',
dest='optimized_fetch', action='store_true',
help='only fetch projects fixed to sha1 if revision does not exist locally')
if show_smart:
p.add_option('-s', '--smart-sync',
dest='smart_sync', action='store_true',
help='smart sync using manifest from a known good build')
p.add_option('-t', '--smart-tag',
dest='smart_tag', action='store',
help='smart sync using manifest from a known tag')
g = p.add_option_group('repo Version options')
g.add_option('--no-repo-verify',
dest='no_repo_verify', action='store_true',
help='do not verify repo source code')
g.add_option('--repo-upgraded',
dest='repo_upgraded', action='store_true',
help=SUPPRESS_HELP)
def _FetchProjectList(self, opt, projects, *args, **kwargs):
"""Main function of the fetch threads when jobs are > 1.
Delegates most of the work to _FetchHelper.
Args:
opt: Program options returned from optparse. See _Options().
projects: Projects to fetch.
*args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
_FetchHelper docstring for details.
"""
for project in projects:
success = self._FetchHelper(opt, project, *args, **kwargs)
if not success and not opt.force_broken:
break
def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
"""Fetch git objects for a single project.
Args:
opt: Program options returned from optparse. See _Options().
project: Project object for the project to fetch.
lock: Lock for accessing objects that are shared amongst multiple
_FetchHelper() threads.
fetched: set object that we will add project.gitdir to when we're done
(with our lock held).
pm: Instance of a Project object. We will call pm.update() (with our
lock held).
sem: We'll release() this semaphore when we exit so that another thread
can be started up.
err_event: We'll set this event in the case of an error (after printing
out info about the error).
Returns:
Whether the fetch was successful.
"""
# We'll set to true once we've locked the lock.
did_lock = False
if not opt.quiet:
print('Fetching project %s' % project.name)
# Encapsulate everything in a try/except/finally so that:
# - We always set err_event in the case of an exception.
# - We always make sure we call sem.release().
# - We always make sure we unlock the lock if we locked it.
try:
try:
start = time.time()
success = project.Sync_NetworkHalf(
quiet=opt.quiet,
current_branch_only=opt.current_branch_only,
clone_bundle=not opt.no_clone_bundle,
no_tags=opt.no_tags, archive=self.manifest.IsArchive,
optimized_fetch=opt.optimized_fetch)
self._fetch_times.Set(project, time.time() - start)
# Lock around all the rest of the code, since printing, updating a set
# and Progress.update() are not thread safe.
lock.acquire()
did_lock = True
if not success:
print('error: Cannot fetch %s' % project.name, file=sys.stderr)
if opt.force_broken:
print('warn: --force-broken, continuing to sync',
file=sys.stderr)
else:
raise _FetchError()
fetched.add(project.gitdir)
pm.update()
except _FetchError:
err_event.set()
except:
err_event.set()
raise
finally:
if did_lock:
lock.release()
sem.release()
return success
def _Fetch(self, projects, opt):
fetched = set()
lock = _threading.Lock()
pm = Progress('Fetching projects', len(projects))
objdir_project_map = dict()
for project in projects:
objdir_project_map.setdefault(project.objdir, []).append(project)
threads = set()
sem = _threading.Semaphore(self.jobs)
err_event = _threading.Event()
for project_list in objdir_project_map.values():
# Check for any errors before running any more tasks.
# ...we'll let existing threads finish, though.
if err_event.isSet() and not opt.force_broken:
break
sem.acquire()
kwargs = dict(opt=opt,
projects=project_list,
lock=lock,
fetched=fetched,
pm=pm,
sem=sem,
err_event=err_event)
if self.jobs > 1:
t = _threading.Thread(target = self._FetchProjectList,
kwargs = kwargs)
# Ensure that Ctrl-C will not freeze the repo process.
t.daemon = True
threads.add(t)
t.start()
else:
self._FetchProjectList(**kwargs)
for t in threads:
t.join()
# If we saw an error, exit with code 1 so that other scripts can check.
if err_event.isSet():
print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
sys.exit(1)
pm.end()
self._fetch_times.Save()
if not self.manifest.IsArchive:
self._GCProjects(projects)
return fetched
def _GCProjects(self, projects):
gitdirs = {}
for project in projects:
gitdirs[project.gitdir] = project.bare_git
has_dash_c = git_require((1, 7, 2))
if multiprocessing and has_dash_c:
cpu_count = multiprocessing.cpu_count()
else:
cpu_count = 1
jobs = min(self.jobs, cpu_count)
if jobs < 2:
for bare_git in gitdirs.values():
bare_git.gc('--auto')
return
config = {'pack.threads': cpu_count / jobs if cpu_count > jobs else 1}
threads = set()
sem = _threading.Semaphore(jobs)
err_event = _threading.Event()
def GC(bare_git):
try:
try:
bare_git.gc('--auto', config=config)
except GitError:
err_event.set()
except:
err_event.set()
raise
finally:
sem.release()
for bare_git in gitdirs.values():
if err_event.isSet():
break
sem.acquire()
t = _threading.Thread(target=GC, args=(bare_git,))
t.daemon = True
threads.add(t)
t.start()
for t in threads:
t.join()
if err_event.isSet():
print('\nerror: Exited sync due to gc errors', file=sys.stderr)
sys.exit(1)
def _ReloadManifest(self, manifest_name=None):
if manifest_name:
# Override calls _Unload already
self.manifest.Override(manifest_name)
else:
self.manifest._Unload()
def UpdateProjectList(self):
new_project_paths = []
for project in self.GetProjects(None, missing_ok=True):
if project.relpath:
new_project_paths.append(project.relpath)
file_name = 'project.list'
file_path = os.path.join(self.manifest.repodir, file_name)
old_project_paths = []
if os.path.exists(file_path):
fd = open(file_path, 'r')
try:
old_project_paths = fd.read().split('\n')
finally:
fd.close()
for path in old_project_paths:
if not path:
continue
if path not in new_project_paths:
# If the path has already been deleted, we don't need to do it
if os.path.exists(self.manifest.topdir + '/' + path):
gitdir = os.path.join(self.manifest.topdir, path, '.git')
project = Project(
manifest = self.manifest,
name = path,
remote = RemoteSpec('origin'),
gitdir = gitdir,
objdir = gitdir,
worktree = os.path.join(self.manifest.topdir, path),
relpath = path,
revisionExpr = 'HEAD',
revisionId = None,
groups = None)
if project.IsDirty():
print('error: Cannot remove project "%s": uncommitted changes '
'are present' % project.relpath, file=sys.stderr)
print(' commit changes, then run sync again',
file=sys.stderr)
return -1
else:
print('Deleting obsolete path %s' % project.worktree,
file=sys.stderr)
shutil.rmtree(project.worktree)
# Try deleting parent subdirs if they are empty
project_dir = os.path.dirname(project.worktree)
while project_dir != self.manifest.topdir:
try:
os.rmdir(project_dir)
except OSError:
break
project_dir = os.path.dirname(project_dir)
new_project_paths.sort()
fd = open(file_path, 'w')
try:
fd.write('\n'.join(new_project_paths))
fd.write('\n')
finally:
fd.close()
return 0
def Execute(self, opt, args):
if opt.jobs:
self.jobs = opt.jobs
if self.jobs > 1:
soft_limit, _ = _rlimit_nofile()
self.jobs = min(self.jobs, (soft_limit - 5) / 3)
if opt.network_only and opt.detach_head:
print('error: cannot combine -n and -d', file=sys.stderr)
sys.exit(1)
if opt.network_only and opt.local_only:
print('error: cannot combine -n and -l', file=sys.stderr)
sys.exit(1)
if opt.manifest_name and opt.smart_sync:
print('error: cannot combine -m and -s', file=sys.stderr)
sys.exit(1)
if opt.manifest_name and opt.smart_tag:
print('error: cannot combine -m and -t', file=sys.stderr)
sys.exit(1)
if opt.manifest_server_username or opt.manifest_server_password:
if not (opt.smart_sync or opt.smart_tag):
print('error: -u and -p may only be combined with -s or -t',
file=sys.stderr)
sys.exit(1)
if None in [opt.manifest_server_username, opt.manifest_server_password]:
print('error: both -u and -p must be given', file=sys.stderr)
sys.exit(1)
if opt.manifest_name:
self.manifest.Override(opt.manifest_name)
manifest_name = opt.manifest_name
if opt.smart_sync or opt.smart_tag:
if not self.manifest.manifest_server:
print('error: cannot smart sync: no manifest server defined in '
'manifest', file=sys.stderr)
sys.exit(1)
manifest_server = self.manifest.manifest_server
if not opt.quiet:
print('Using manifest server %s' % manifest_server)
if not '@' in manifest_server:
username = None
password = None
if opt.manifest_server_username and opt.manifest_server_password:
username = opt.manifest_server_username
password = opt.manifest_server_password
else:
try:
info = netrc.netrc()
except IOError:
print('.netrc file does not exist or could not be opened',
file=sys.stderr)
else:
try:
parse_result = urllib.parse.urlparse(manifest_server)
if parse_result.hostname:
username, _account, password = \
info.authenticators(parse_result.hostname)
except TypeError:
# TypeError is raised when the given hostname is not present
# in the .netrc file.
print('No credentials found for %s in .netrc'
% parse_result.hostname, file=sys.stderr)
except netrc.NetrcParseError as e:
print('Error parsing .netrc file: %s' % e, file=sys.stderr)
if (username and password):
manifest_server = manifest_server.replace('://', '://%s:%s@' %
(username, password),
1)
try:
server = xmlrpc.client.Server(manifest_server)
if opt.smart_sync:
p = self.manifest.manifestProject
b = p.GetBranch(p.CurrentBranch)
branch = b.merge
if branch.startswith(R_HEADS):
branch = branch[len(R_HEADS):]
env = os.environ.copy()
if 'SYNC_TARGET' in env:
target = env['SYNC_TARGET']
[success, manifest_str] = server.GetApprovedManifest(branch, target)
elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
target = '%s-%s' % (env['TARGET_PRODUCT'],
env['TARGET_BUILD_VARIANT'])
[success, manifest_str] = server.GetApprovedManifest(branch, target)
else:
[success, manifest_str] = server.GetApprovedManifest(branch)
else:
assert(opt.smart_tag)
[success, manifest_str] = server.GetManifest(opt.smart_tag)
if success:
manifest_name = "smart_sync_override.xml"
manifest_path = os.path.join(self.manifest.manifestProject.worktree,
manifest_name)
try:
f = open(manifest_path, 'w')
try:
f.write(manifest_str)
finally:
f.close()
except IOError:
print('error: cannot write manifest to %s' % manifest_path,
file=sys.stderr)
sys.exit(1)
self._ReloadManifest(manifest_name)
else:
print('error: manifest server RPC call failed: %s' %
manifest_str, file=sys.stderr)
sys.exit(1)
except (socket.error, IOError, xmlrpc.client.Fault) as e:
print('error: cannot connect to manifest server %s:\n%s'
% (self.manifest.manifest_server, e), file=sys.stderr)
sys.exit(1)
except xmlrpc.client.ProtocolError as e:
print('error: cannot connect to manifest server %s:\n%d %s'
% (self.manifest.manifest_server, e.errcode, e.errmsg),
file=sys.stderr)
sys.exit(1)
rp = self.manifest.repoProject
rp.PreSync()
mp = self.manifest.manifestProject
mp.PreSync()
if opt.repo_upgraded:
_PostRepoUpgrade(self.manifest, quiet=opt.quiet)
if not opt.local_only:
mp.Sync_NetworkHalf(quiet=opt.quiet,
current_branch_only=opt.current_branch_only,
no_tags=opt.no_tags,
optimized_fetch=opt.optimized_fetch)
if mp.HasChanges:
syncbuf = SyncBuffer(mp.config)
mp.Sync_LocalHalf(syncbuf)
if not syncbuf.Finish():
sys.exit(1)
self._ReloadManifest(manifest_name)
if opt.jobs is None:
self.jobs = self.manifest.default.sync_j
all_projects = self.GetProjects(args,
missing_ok=True,
submodules_ok=opt.fetch_submodules)
self._fetch_times = _FetchTimes(self.manifest)
if not opt.local_only:
to_fetch = []
now = time.time()
if _ONE_DAY_S <= (now - rp.LastFetch):
to_fetch.append(rp)
to_fetch.extend(all_projects)
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
fetched = self._Fetch(to_fetch, opt)
_PostRepoFetch(rp, opt.no_repo_verify)
if opt.network_only:
# bail out now; the rest touches the working tree
return
# Iteratively fetch missing and/or nested unregistered submodules
previously_missing_set = set()
while True:
self._ReloadManifest(manifest_name)
all_projects = self.GetProjects(args,
missing_ok=True,
submodules_ok=opt.fetch_submodules)
missing = []
for project in all_projects:
if project.gitdir not in fetched:
missing.append(project)
if not missing:
break
# Stop us from non-stopped fetching actually-missing repos: If set of
# missing repos has not been changed from last fetch, we break.
missing_set = set(p.name for p in missing)
if previously_missing_set == missing_set:
break
previously_missing_set = missing_set
fetched.update(self._Fetch(missing, opt))
if self.manifest.IsMirror or self.manifest.IsArchive:
# bail out now, we have no working tree
return
if self.UpdateProjectList():
sys.exit(1)
syncbuf = SyncBuffer(mp.config,
detach_head = opt.detach_head)
pm = Progress('Syncing work tree', len(all_projects))
for project in all_projects:
pm.update()
if project.worktree:
project.Sync_LocalHalf(syncbuf)
pm.end()
print(file=sys.stderr)
if not syncbuf.Finish():
sys.exit(1)
# If there's a notice that's supposed to print at the end of the sync, print
# it now...
if self.manifest.notice:
print(self.manifest.notice)
def _PostRepoUpgrade(manifest, quiet=False):
wrapper = Wrapper()
if wrapper.NeedSetupGnuPG():
wrapper.SetupGnuPG(quiet)
for project in manifest.projects:
if project.Exists:
project.PostRepoUpgrade()
def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
if rp.HasChanges:
print('info: A new version of repo is available', file=sys.stderr)
print(file=sys.stderr)
if no_repo_verify or _VerifyTag(rp):
syncbuf = SyncBuffer(rp.config)
rp.Sync_LocalHalf(syncbuf)
if not syncbuf.Finish():
sys.exit(1)
print('info: Restarting repo with latest version', file=sys.stderr)
raise RepoChangedException(['--repo-upgraded'])
else:
print('warning: Skipped upgrade to unverified version', file=sys.stderr)
else:
if verbose:
print('repo version %s is current' % rp.work_git.describe(HEAD),
file=sys.stderr)
def _VerifyTag(project):
gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
if not os.path.exists(gpg_dir):
print('warning: GnuPG was not available during last "repo init"\n'
'warning: Cannot automatically authenticate repo."""',
file=sys.stderr)
return True
try:
cur = project.bare_git.describe(project.GetRevisionId())
except GitError:
cur = None
if not cur \
or re.compile(r'^.*-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur):
rev = project.revisionExpr
if rev.startswith(R_HEADS):
rev = rev[len(R_HEADS):]
print(file=sys.stderr)
print("warning: project '%s' branch '%s' is not signed"
% (project.name, rev), file=sys.stderr)
return False
env = os.environ.copy()
env['GIT_DIR'] = project.gitdir.encode()
env['GNUPGHOME'] = gpg_dir.encode()
cmd = [GIT, 'tag', '-v', cur]
proc = subprocess.Popen(cmd,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
env = env)
out = proc.stdout.read()
proc.stdout.close()
err = proc.stderr.read()
proc.stderr.close()
if proc.wait() != 0:
print(file=sys.stderr)
print(out, file=sys.stderr)
print(err, file=sys.stderr)
print(file=sys.stderr)
return False
return True
class _FetchTimes(object):
_ALPHA = 0.5
def __init__(self, manifest):
self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
self._times = None
self._seen = set()
def Get(self, project):
self._Load()
return self._times.get(project.name, _ONE_DAY_S)
def Set(self, project, t):
self._Load()
name = project.name
old = self._times.get(name, t)
self._seen.add(name)
a = self._ALPHA
self._times[name] = (a*t) + ((1-a) * old)
def _Load(self):
if self._times is None:
try:
f = open(self._path)
try:
self._times = json.load(f)
finally:
f.close()
except (IOError, ValueError):
try:
os.remove(self._path)
except OSError:
pass
self._times = {}
def Save(self):
if self._times is None:
return
to_delete = []
for name in self._times:
if name not in self._seen:
to_delete.append(name)
for name in to_delete:
del self._times[name]
try:
f = open(self._path, 'w')
try:
json.dump(self._times, f, indent=2)
finally:
f.close()
except (IOError, TypeError):
try:
os.remove(self._path)
except OSError:
pass
| apache-2.0 |
VanirAOSP/external_chromium_org | third_party/jinja2/compiler.py | 623 | 61785 | # -*- coding: utf-8 -*-
"""
jinja2.compiler
~~~~~~~~~~~~~~~
Compiles nodes into python code.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from itertools import chain
from copy import deepcopy
from keyword import iskeyword as is_python_keyword
from jinja2 import nodes
from jinja2.nodes import EvalContext
from jinja2.visitor import NodeVisitor
from jinja2.exceptions import TemplateAssertionError
from jinja2.utils import Markup, concat, escape
from jinja2._compat import range_type, next, text_type, string_types, \
iteritems, NativeStringIO, imap
operators = {
'eq': '==',
'ne': '!=',
'gt': '>',
'gteq': '>=',
'lt': '<',
'lteq': '<=',
'in': 'in',
'notin': 'not in'
}
# what method to iterate over items do we want to use for dict iteration
# in generated code? on 2.x let's go with iteritems, on 3.x with items
if hasattr(dict, 'iteritems'):
dict_item_iter = 'iteritems'
else:
dict_item_iter = 'items'
# does if 0: dummy(x) get us x into the scope?
def unoptimize_before_dead_code():
x = 42
def f():
if 0: dummy(x)
return f
# The getattr is necessary for pypy which does not set this attribute if
# no closure is on the function
unoptimize_before_dead_code = bool(
getattr(unoptimize_before_dead_code(), '__closure__', None))
def generate(node, environment, name, filename, stream=None,
defer_init=False):
"""Generate the python source for a node tree."""
if not isinstance(node, nodes.Template):
raise TypeError('Can\'t compile non template nodes')
generator = CodeGenerator(environment, name, filename, stream, defer_init)
generator.visit(node)
if stream is None:
return generator.stream.getvalue()
def has_safe_repr(value):
"""Does the node have a safe representation?"""
if value is None or value is NotImplemented or value is Ellipsis:
return True
if isinstance(value, (bool, int, float, complex, range_type,
Markup) + string_types):
return True
if isinstance(value, (tuple, list, set, frozenset)):
for item in value:
if not has_safe_repr(item):
return False
return True
elif isinstance(value, dict):
for key, value in iteritems(value):
if not has_safe_repr(key):
return False
if not has_safe_repr(value):
return False
return True
return False
def find_undeclared(nodes, names):
"""Check if the names passed are accessed undeclared. The return value
is a set of all the undeclared names from the sequence of names found.
"""
visitor = UndeclaredNameVisitor(names)
try:
for node in nodes:
visitor.visit(node)
except VisitorExit:
pass
return visitor.undeclared
class Identifiers(object):
"""Tracks the status of identifiers in frames."""
def __init__(self):
# variables that are known to be declared (probably from outer
# frames or because they are special for the frame)
self.declared = set()
# undeclared variables from outer scopes
self.outer_undeclared = set()
# names that are accessed without being explicitly declared by
# this one or any of the outer scopes. Names can appear both in
# declared and undeclared.
self.undeclared = set()
# names that are declared locally
self.declared_locally = set()
# names that are declared by parameters
self.declared_parameter = set()
def add_special(self, name):
"""Register a special name like `loop`."""
self.undeclared.discard(name)
self.declared.add(name)
def is_declared(self, name):
"""Check if a name is declared in this or an outer scope."""
if name in self.declared_locally or name in self.declared_parameter:
return True
return name in self.declared
def copy(self):
return deepcopy(self)
class Frame(object):
"""Holds compile time information for us."""
def __init__(self, eval_ctx, parent=None):
self.eval_ctx = eval_ctx
self.identifiers = Identifiers()
# a toplevel frame is the root + soft frames such as if conditions.
self.toplevel = False
# the root frame is basically just the outermost frame, so no if
# conditions. This information is used to optimize inheritance
# situations.
self.rootlevel = False
# in some dynamic inheritance situations the compiler needs to add
# write tests around output statements.
self.require_output_check = parent and parent.require_output_check
# inside some tags we are using a buffer rather than yield statements.
# this for example affects {% filter %} or {% macro %}. If a frame
# is buffered this variable points to the name of the list used as
# buffer.
self.buffer = None
# the name of the block we're in, otherwise None.
self.block = parent and parent.block or None
# a set of actually assigned names
self.assigned_names = set()
# the parent of this frame
self.parent = parent
if parent is not None:
self.identifiers.declared.update(
parent.identifiers.declared |
parent.identifiers.declared_parameter |
parent.assigned_names
)
self.identifiers.outer_undeclared.update(
parent.identifiers.undeclared -
self.identifiers.declared
)
self.buffer = parent.buffer
def copy(self):
"""Create a copy of the current one."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.identifiers = object.__new__(self.identifiers.__class__)
rv.identifiers.__dict__.update(self.identifiers.__dict__)
return rv
def inspect(self, nodes):
"""Walk the node and check for identifiers. If the scope is hard (eg:
enforce on a python level) overrides from outer scopes are tracked
differently.
"""
visitor = FrameIdentifierVisitor(self.identifiers)
for node in nodes:
visitor.visit(node)
def find_shadowed(self, extra=()):
"""Find all the shadowed names. extra is an iterable of variables
that may be defined with `add_special` which may occour scoped.
"""
i = self.identifiers
return (i.declared | i.outer_undeclared) & \
(i.declared_locally | i.declared_parameter) | \
set(x for x in extra if i.is_declared(x))
def inner(self):
"""Return an inner frame."""
return Frame(self.eval_ctx, self)
def soft(self):
"""Return a soft frame. A soft frame may not be modified as
standalone thing as it shares the resources with the frame it
was created of, but it's not a rootlevel frame any longer.
"""
rv = self.copy()
rv.rootlevel = False
return rv
__copy__ = copy
class VisitorExit(RuntimeError):
"""Exception used by the `UndeclaredNameVisitor` to signal a stop."""
class DependencyFinderVisitor(NodeVisitor):
"""A visitor that collects filter and test calls."""
def __init__(self):
self.filters = set()
self.tests = set()
def visit_Filter(self, node):
self.generic_visit(node)
self.filters.add(node.name)
def visit_Test(self, node):
self.generic_visit(node)
self.tests.add(node.name)
def visit_Block(self, node):
"""Stop visiting at blocks."""
class UndeclaredNameVisitor(NodeVisitor):
"""A visitor that checks if a name is accessed without being
declared. This is different from the frame visitor as it will
not stop at closure frames.
"""
def __init__(self, names):
self.names = set(names)
self.undeclared = set()
def visit_Name(self, node):
if node.ctx == 'load' and node.name in self.names:
self.undeclared.add(node.name)
if self.undeclared == self.names:
raise VisitorExit()
else:
self.names.discard(node.name)
def visit_Block(self, node):
"""Stop visiting a blocks."""
class FrameIdentifierVisitor(NodeVisitor):
"""A visitor for `Frame.inspect`."""
def __init__(self, identifiers):
self.identifiers = identifiers
def visit_Name(self, node):
"""All assignments to names go through this function."""
if node.ctx == 'store':
self.identifiers.declared_locally.add(node.name)
elif node.ctx == 'param':
self.identifiers.declared_parameter.add(node.name)
elif node.ctx == 'load' and not \
self.identifiers.is_declared(node.name):
self.identifiers.undeclared.add(node.name)
def visit_If(self, node):
self.visit(node.test)
real_identifiers = self.identifiers
old_names = real_identifiers.declared_locally | \
real_identifiers.declared_parameter
def inner_visit(nodes):
if not nodes:
return set()
self.identifiers = real_identifiers.copy()
for subnode in nodes:
self.visit(subnode)
rv = self.identifiers.declared_locally - old_names
# we have to remember the undeclared variables of this branch
# because we will have to pull them.
real_identifiers.undeclared.update(self.identifiers.undeclared)
self.identifiers = real_identifiers
return rv
body = inner_visit(node.body)
else_ = inner_visit(node.else_ or ())
# the differences between the two branches are also pulled as
# undeclared variables
real_identifiers.undeclared.update(body.symmetric_difference(else_) -
real_identifiers.declared)
# remember those that are declared.
real_identifiers.declared_locally.update(body | else_)
def visit_Macro(self, node):
self.identifiers.declared_locally.add(node.name)
def visit_Import(self, node):
self.generic_visit(node)
self.identifiers.declared_locally.add(node.target)
def visit_FromImport(self, node):
self.generic_visit(node)
for name in node.names:
if isinstance(name, tuple):
self.identifiers.declared_locally.add(name[1])
else:
self.identifiers.declared_locally.add(name)
def visit_Assign(self, node):
"""Visit assignments in the correct order."""
self.visit(node.node)
self.visit(node.target)
def visit_For(self, node):
"""Visiting stops at for blocks. However the block sequence
is visited as part of the outer scope.
"""
self.visit(node.iter)
def visit_CallBlock(self, node):
self.visit(node.call)
def visit_FilterBlock(self, node):
self.visit(node.filter)
def visit_Scope(self, node):
"""Stop visiting at scopes."""
def visit_Block(self, node):
"""Stop visiting at blocks."""
class CompilerExit(Exception):
"""Raised if the compiler encountered a situation where it just
doesn't make sense to further process the code. Any block that
raises such an exception is not further processed.
"""
class CodeGenerator(NodeVisitor):
def __init__(self, environment, name, filename, stream=None,
defer_init=False):
if stream is None:
stream = NativeStringIO()
self.environment = environment
self.name = name
self.filename = filename
self.stream = stream
self.created_block_context = False
self.defer_init = defer_init
# aliases for imports
self.import_aliases = {}
# a registry for all blocks. Because blocks are moved out
# into the global python scope they are registered here
self.blocks = {}
# the number of extends statements so far
self.extends_so_far = 0
# some templates have a rootlevel extends. In this case we
# can safely assume that we're a child template and do some
# more optimizations.
self.has_known_extends = False
# the current line number
self.code_lineno = 1
# registry of all filters and tests (global, not block local)
self.tests = {}
self.filters = {}
# the debug information
self.debug_info = []
self._write_debug_info = None
# the number of new lines before the next write()
self._new_lines = 0
# the line number of the last written statement
self._last_line = 0
# true if nothing was written so far.
self._first_write = True
# used by the `temporary_identifier` method to get new
# unique, temporary identifier
self._last_identifier = 0
# the current indentation
self._indentation = 0
# -- Various compilation helpers
def fail(self, msg, lineno):
"""Fail with a :exc:`TemplateAssertionError`."""
raise TemplateAssertionError(msg, lineno, self.name, self.filename)
def temporary_identifier(self):
"""Get a new unique identifier."""
self._last_identifier += 1
return 't_%d' % self._last_identifier
def buffer(self, frame):
"""Enable buffering for the frame from that point onwards."""
frame.buffer = self.temporary_identifier()
self.writeline('%s = []' % frame.buffer)
def return_buffer_contents(self, frame):
"""Return the buffer contents of the frame."""
if frame.eval_ctx.volatile:
self.writeline('if context.eval_ctx.autoescape:')
self.indent()
self.writeline('return Markup(concat(%s))' % frame.buffer)
self.outdent()
self.writeline('else:')
self.indent()
self.writeline('return concat(%s)' % frame.buffer)
self.outdent()
elif frame.eval_ctx.autoescape:
self.writeline('return Markup(concat(%s))' % frame.buffer)
else:
self.writeline('return concat(%s)' % frame.buffer)
def indent(self):
"""Indent by one."""
self._indentation += 1
def outdent(self, step=1):
"""Outdent by step."""
self._indentation -= step
def start_write(self, frame, node=None):
"""Yield or write into the frame buffer."""
if frame.buffer is None:
self.writeline('yield ', node)
else:
self.writeline('%s.append(' % frame.buffer, node)
def end_write(self, frame):
"""End the writing process started by `start_write`."""
if frame.buffer is not None:
self.write(')')
def simple_write(self, s, frame, node=None):
"""Simple shortcut for start_write + write + end_write."""
self.start_write(frame, node)
self.write(s)
self.end_write(frame)
def blockvisit(self, nodes, frame):
"""Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically
unless the force_generator parameter is set to False.
"""
if frame.buffer is None:
self.writeline('if 0: yield None')
else:
self.writeline('pass')
try:
for node in nodes:
self.visit(node, frame)
except CompilerExit:
pass
def write(self, x):
"""Write a string into the output stream."""
if self._new_lines:
if not self._first_write:
self.stream.write('\n' * self._new_lines)
self.code_lineno += self._new_lines
if self._write_debug_info is not None:
self.debug_info.append((self._write_debug_info,
self.code_lineno))
self._write_debug_info = None
self._first_write = False
self.stream.write(' ' * self._indentation)
self._new_lines = 0
self.stream.write(x)
def writeline(self, x, node=None, extra=0):
"""Combination of newline and write."""
self.newline(node, extra)
self.write(x)
def newline(self, node=None, extra=0):
"""Add one or more newlines before the next write."""
self._new_lines = max(self._new_lines, 1 + extra)
if node is not None and node.lineno != self._last_line:
self._write_debug_info = node.lineno
self._last_line = node.lineno
def signature(self, node, frame, extra_kwargs=None):
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occour. The extra keyword arguments should be given
as python dict.
"""
# if any of the given keyword arguments is a python keyword
# we have to make sure that no invalid call is created.
kwarg_workaround = False
for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
if is_python_keyword(kwarg):
kwarg_workaround = True
break
for arg in node.args:
self.write(', ')
self.visit(arg, frame)
if not kwarg_workaround:
for kwarg in node.kwargs:
self.write(', ')
self.visit(kwarg, frame)
if extra_kwargs is not None:
for key, value in iteritems(extra_kwargs):
self.write(', %s=%s' % (key, value))
if node.dyn_args:
self.write(', *')
self.visit(node.dyn_args, frame)
if kwarg_workaround:
if node.dyn_kwargs is not None:
self.write(', **dict({')
else:
self.write(', **{')
for kwarg in node.kwargs:
self.write('%r: ' % kwarg.key)
self.visit(kwarg.value, frame)
self.write(', ')
if extra_kwargs is not None:
for key, value in iteritems(extra_kwargs):
self.write('%r: %s, ' % (key, value))
if node.dyn_kwargs is not None:
self.write('}, **')
self.visit(node.dyn_kwargs, frame)
self.write(')')
else:
self.write('}')
elif node.dyn_kwargs is not None:
self.write(', **')
self.visit(node.dyn_kwargs, frame)
def pull_locals(self, frame):
"""Pull all the references identifiers into the local scope."""
for name in frame.identifiers.undeclared:
self.writeline('l_%s = context.resolve(%r)' % (name, name))
def pull_dependencies(self, nodes):
"""Pull all the dependencies."""
visitor = DependencyFinderVisitor()
for node in nodes:
visitor.visit(node)
for dependency in 'filters', 'tests':
mapping = getattr(self, dependency)
for name in getattr(visitor, dependency):
if name not in mapping:
mapping[name] = self.temporary_identifier()
self.writeline('%s = environment.%s[%r]' %
(mapping[name], dependency, name))
def unoptimize_scope(self, frame):
"""Disable Python optimizations for the frame."""
# XXX: this is not that nice but it has no real overhead. It
# mainly works because python finds the locals before dead code
# is removed. If that breaks we have to add a dummy function
# that just accepts the arguments and does nothing.
if frame.identifiers.declared:
self.writeline('%sdummy(%s)' % (
unoptimize_before_dead_code and 'if 0: ' or '',
', '.join('l_' + name for name in frame.identifiers.declared)
))
def push_scope(self, frame, extra_vars=()):
"""This function returns all the shadowed variables in a dict
in the form name: alias and will write the required assignments
into the current scope. No indentation takes place.
This also predefines locally declared variables from the loop
body because under some circumstances it may be the case that
`extra_vars` is passed to `Frame.find_shadowed`.
"""
aliases = {}
for name in frame.find_shadowed(extra_vars):
aliases[name] = ident = self.temporary_identifier()
self.writeline('%s = l_%s' % (ident, name))
to_declare = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_declare.add('l_' + name)
if to_declare:
self.writeline(' = '.join(to_declare) + ' = missing')
return aliases
def pop_scope(self, aliases, frame):
"""Restore all aliases and delete unused variables."""
for name, alias in iteritems(aliases):
self.writeline('l_%s = %s' % (name, alias))
to_delete = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_delete.add('l_' + name)
if to_delete:
# we cannot use the del statement here because enclosed
# scopes can trigger a SyntaxError:
# a = 42; b = lambda: a; del a
self.writeline(' = '.join(to_delete) + ' = missing')
def function_scoping(self, node, frame, children=None,
find_special=True):
"""In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn't allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.
"""
# we have to iterate twice over it, make sure that works
if children is None:
children = node.iter_child_nodes()
children = list(children)
func_frame = frame.inner()
func_frame.inspect(children)
# variables that are undeclared (accessed before declaration) and
# declared locally *and* part of an outside scope raise a template
# assertion error. Reason: we can't generate reasonable code from
# it without aliasing all the variables.
# this could be fixed in Python 3 where we have the nonlocal
# keyword or if we switch to bytecode generation
overridden_closure_vars = (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared &
(func_frame.identifiers.declared_locally |
func_frame.identifiers.declared_parameter)
)
if overridden_closure_vars:
self.fail('It\'s not possible to set and access variables '
'derived from an outer scope! (affects: %s)' %
', '.join(sorted(overridden_closure_vars)), node.lineno)
# remove variables from a closure from the frame's undeclared
# identifiers.
func_frame.identifiers.undeclared -= (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared
)
# no special variables for this scope, abort early
if not find_special:
return func_frame
func_frame.accesses_kwargs = False
func_frame.accesses_varargs = False
func_frame.accesses_caller = False
func_frame.arguments = args = ['l_' + x.name for x in node.args]
undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
if 'caller' in undeclared:
func_frame.accesses_caller = True
func_frame.identifiers.add_special('caller')
args.append('l_caller')
if 'kwargs' in undeclared:
func_frame.accesses_kwargs = True
func_frame.identifiers.add_special('kwargs')
args.append('l_kwargs')
if 'varargs' in undeclared:
func_frame.accesses_varargs = True
func_frame.identifiers.add_special('varargs')
args.append('l_varargs')
return func_frame
def macro_body(self, node, frame, children=None):
"""Dump the function def of a macro or call block."""
frame = self.function_scoping(node, frame, children)
# macros are delayed, they never require output checks
frame.require_output_check = False
args = frame.arguments
# XXX: this is an ugly fix for the loop nesting bug
# (tests.test_old_bugs.test_loop_call_bug). This works around
# a identifier nesting problem we have in general. It's just more
# likely to happen in loops which is why we work around it. The
# real solution would be "nonlocal" all the identifiers that are
# leaking into a new python frame and might be used both unassigned
# and assigned.
if 'loop' in frame.identifiers.declared:
args = args + ['l_loop=l_loop']
self.writeline('def macro(%s):' % ', '.join(args), node)
self.indent()
self.buffer(frame)
self.pull_locals(frame)
self.blockvisit(node.body, frame)
self.return_buffer_contents(frame)
self.outdent()
return frame
def macro_def(self, node, frame):
"""Dump the macro definition for the def created by macro_body."""
arg_tuple = ', '.join(repr(x.name) for x in node.args)
name = getattr(node, 'name', None)
if len(node.args) == 1:
arg_tuple += ','
self.write('Macro(environment, macro, %r, (%s), (' %
(name, arg_tuple))
for arg in node.defaults:
self.visit(arg, frame)
self.write(', ')
self.write('), %r, %r, %r)' % (
bool(frame.accesses_kwargs),
bool(frame.accesses_varargs),
bool(frame.accesses_caller)
))
def position(self, node):
"""Return a human readable position for the node."""
rv = 'line %d' % node.lineno
if self.name is not None:
rv += ' in ' + repr(self.name)
return rv
# -- Statement Visitors
def visit_Template(self, node, frame=None):
assert frame is None, 'no root frame allowed'
eval_ctx = EvalContext(self.environment, self.name)
from jinja2.runtime import __all__ as exported
self.writeline('from __future__ import division')
self.writeline('from jinja2.runtime import ' + ', '.join(exported))
if not unoptimize_before_dead_code:
self.writeline('dummy = lambda *x: None')
# if we want a deferred initialization we cannot move the
# environment into a local name
envenv = not self.defer_init and ', environment=environment' or ''
# do we have an extends tag at all? If not, we can save some
# overhead by just not processing any inheritance code.
have_extends = node.find(nodes.Extends) is not None
# find all blocks
for block in node.find_all(nodes.Block):
if block.name in self.blocks:
self.fail('block %r defined twice' % block.name, block.lineno)
self.blocks[block.name] = block
# find all imports and import them
for import_ in node.find_all(nodes.ImportedName):
if import_.importname not in self.import_aliases:
imp = import_.importname
self.import_aliases[imp] = alias = self.temporary_identifier()
if '.' in imp:
module, obj = imp.rsplit('.', 1)
self.writeline('from %s import %s as %s' %
(module, obj, alias))
else:
self.writeline('import %s as %s' % (imp, alias))
# add the load name
self.writeline('name = %r' % self.name)
# generate the root render function.
self.writeline('def root(context%s):' % envenv, extra=1)
# process the root
frame = Frame(eval_ctx)
frame.inspect(node.body)
frame.toplevel = frame.rootlevel = True
frame.require_output_check = have_extends and not self.has_known_extends
self.indent()
if have_extends:
self.writeline('parent_template = None')
if 'self' in find_undeclared(node.body, ('self',)):
frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
self.pull_locals(frame)
self.pull_dependencies(node.body)
self.blockvisit(node.body, frame)
self.outdent()
# make sure that the parent root is called.
if have_extends:
if not self.has_known_extends:
self.indent()
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('for event in parent_template.'
'root_render_func(context):')
self.indent()
self.writeline('yield event')
self.outdent(2 + (not self.has_known_extends))
# at this point we now have the blocks collected and can visit them too.
for name, block in iteritems(self.blocks):
block_frame = Frame(eval_ctx)
block_frame.inspect(block.body)
block_frame.block = name
self.writeline('def block_%s(context%s):' % (name, envenv),
block, 1)
self.indent()
undeclared = find_undeclared(block.body, ('self', 'super'))
if 'self' in undeclared:
block_frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
if 'super' in undeclared:
block_frame.identifiers.add_special('super')
self.writeline('l_super = context.super(%r, '
'block_%s)' % (name, name))
self.pull_locals(block_frame)
self.pull_dependencies(block.body)
self.blockvisit(block.body, block_frame)
self.outdent()
self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x)
for x in self.blocks),
extra=1)
# add a function that returns the debug info
self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x
in self.debug_info))
def visit_Block(self, node, frame):
"""Call a block and register it for the template."""
level = 1
if frame.toplevel:
# if we know that we are a child template, there is no need to
# check if we are one
if self.has_known_extends:
return
if self.extends_so_far > 0:
self.writeline('if parent_template is None:')
self.indent()
level += 1
context = node.scoped and 'context.derived(locals())' or 'context'
self.writeline('for event in context.blocks[%r][0](%s):' % (
node.name, context), node)
self.indent()
self.simple_write('event', frame)
self.outdent(level)
def visit_Extends(self, node, frame):
"""Calls the extender."""
if not frame.toplevel:
self.fail('cannot use extend from a non top-level scope',
node.lineno)
# if the number of extends statements in general is zero so
# far, we don't have to add a check if something extended
# the template before this one.
if self.extends_so_far > 0:
# if we have a known extends we just add a template runtime
# error into the generated code. We could catch that at compile
# time too, but i welcome it not to confuse users by throwing the
# same error at different times just "because we can".
if not self.has_known_extends:
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('raise TemplateRuntimeError(%r)' %
'extended multiple times')
# if we have a known extends already we don't need that code here
# as we know that the template execution will end here.
if self.has_known_extends:
raise CompilerExit()
else:
self.outdent()
self.writeline('parent_template = environment.get_template(', node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
self.writeline('for name, parent_block in parent_template.'
'blocks.%s():' % dict_item_iter)
self.indent()
self.writeline('context.blocks.setdefault(name, []).'
'append(parent_block)')
self.outdent()
# if this extends statement was in the root level we can take
# advantage of that information and simplify the generated code
# in the top level from this point onwards
if frame.rootlevel:
self.has_known_extends = True
# and now we have one more
self.extends_so_far += 1
def visit_Include(self, node, frame):
"""Handles includes."""
if node.with_context:
self.unoptimize_scope(frame)
if node.ignore_missing:
self.writeline('try:')
self.indent()
func_name = 'get_or_select_template'
if isinstance(node.template, nodes.Const):
if isinstance(node.template.value, string_types):
func_name = 'get_template'
elif isinstance(node.template.value, (tuple, list)):
func_name = 'select_template'
elif isinstance(node.template, (nodes.Tuple, nodes.List)):
func_name = 'select_template'
self.writeline('template = environment.%s(' % func_name, node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
if node.ignore_missing:
self.outdent()
self.writeline('except TemplateNotFound:')
self.indent()
self.writeline('pass')
self.outdent()
self.writeline('else:')
self.indent()
if node.with_context:
self.writeline('for event in template.root_render_func('
'template.new_context(context.parent, True, '
'locals())):')
else:
self.writeline('for event in template.module._body_stream:')
self.indent()
self.simple_write('event', frame)
self.outdent()
if node.ignore_missing:
self.outdent()
def visit_Import(self, node, frame):
"""Visit regular imports."""
if node.with_context:
self.unoptimize_scope(frame)
self.writeline('l_%s = ' % node.target, node)
if frame.toplevel:
self.write('context.vars[%r] = ' % node.target)
self.write('environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True, locals())')
else:
self.write('module')
if frame.toplevel and not node.target.startswith('_'):
self.writeline('context.exported_vars.discard(%r)' % node.target)
frame.assigned_names.add(node.target)
def visit_FromImport(self, node, frame):
"""Visit named imports."""
self.newline(node)
self.write('included_template = environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True)')
else:
self.write('module')
var_names = []
discarded_names = []
for name in node.names:
if isinstance(name, tuple):
name, alias = name
else:
alias = name
self.writeline('l_%s = getattr(included_template, '
'%r, missing)' % (alias, name))
self.writeline('if l_%s is missing:' % alias)
self.indent()
self.writeline('l_%s = environment.undefined(%r %% '
'included_template.__name__, '
'name=%r)' %
(alias, 'the template %%r (imported on %s) does '
'not export the requested name %s' % (
self.position(node),
repr(name)
), name))
self.outdent()
if frame.toplevel:
var_names.append(alias)
if not alias.startswith('_'):
discarded_names.append(alias)
frame.assigned_names.add(alias)
if var_names:
if len(var_names) == 1:
name = var_names[0]
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({%s})' % ', '.join(
'%r: l_%s' % (name, name) for name in var_names
))
if discarded_names:
if len(discarded_names) == 1:
self.writeline('context.exported_vars.discard(%r)' %
discarded_names[0])
else:
self.writeline('context.exported_vars.difference_'
'update((%s))' % ', '.join(imap(repr, discarded_names)))
def visit_For(self, node, frame):
# when calculating the nodes for the inner frame we have to exclude
# the iterator contents from it
children = node.iter_child_nodes(exclude=('iter',))
if node.recursive:
loop_frame = self.function_scoping(node, frame, children,
find_special=False)
else:
loop_frame = frame.inner()
loop_frame.inspect(children)
# try to figure out if we have an extended loop. An extended loop
# is necessary if the loop is in recursive mode if the special loop
# variable is accessed in the body.
extended_loop = node.recursive or 'loop' in \
find_undeclared(node.iter_child_nodes(
only=('body',)), ('loop',))
# if we don't have an recursive loop we have to find the shadowed
# variables at that point. Because loops can be nested but the loop
# variable is a special one we have to enforce aliasing for it.
if not node.recursive:
aliases = self.push_scope(loop_frame, ('loop',))
# otherwise we set up a buffer and add a function def
else:
self.writeline('def loop(reciter, loop_render_func, depth=0):', node)
self.indent()
self.buffer(loop_frame)
aliases = {}
# make sure the loop variable is a special one and raise a template
# assertion error if a loop tries to write to loop
if extended_loop:
self.writeline('l_loop = missing')
loop_frame.identifiers.add_special('loop')
for name in node.find_all(nodes.Name):
if name.ctx == 'store' and name.name == 'loop':
self.fail('Can\'t assign to special loop variable '
'in for-loop target', name.lineno)
self.pull_locals(loop_frame)
if node.else_:
iteration_indicator = self.temporary_identifier()
self.writeline('%s = 1' % iteration_indicator)
# Create a fake parent loop if the else or test section of a
# loop is accessing the special loop variable and no parent loop
# exists.
if 'loop' not in aliases and 'loop' in find_undeclared(
node.iter_child_nodes(only=('else_', 'test')), ('loop',)):
self.writeline("l_loop = environment.undefined(%r, name='loop')" %
("'loop' is undefined. the filter section of a loop as well "
"as the else block don't have access to the special 'loop'"
" variable of the current loop. Because there is no parent "
"loop it's undefined. Happened in loop on %s" %
self.position(node)))
self.writeline('for ', node)
self.visit(node.target, loop_frame)
self.write(extended_loop and ', l_loop in LoopContext(' or ' in ')
# if we have an extened loop and a node test, we filter in the
# "outer frame".
if extended_loop and node.test is not None:
self.write('(')
self.visit(node.target, loop_frame)
self.write(' for ')
self.visit(node.target, loop_frame)
self.write(' in ')
if node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
self.write(' if (')
test_frame = loop_frame.copy()
self.visit(node.test, test_frame)
self.write('))')
elif node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
if node.recursive:
self.write(', loop_render_func, depth):')
else:
self.write(extended_loop and '):' or ':')
# tests in not extended loops become a continue
if not extended_loop and node.test is not None:
self.indent()
self.writeline('if not ')
self.visit(node.test, loop_frame)
self.write(':')
self.indent()
self.writeline('continue')
self.outdent(2)
self.indent()
self.blockvisit(node.body, loop_frame)
if node.else_:
self.writeline('%s = 0' % iteration_indicator)
self.outdent()
if node.else_:
self.writeline('if %s:' % iteration_indicator)
self.indent()
self.blockvisit(node.else_, loop_frame)
self.outdent()
# reset the aliases if there are any.
if not node.recursive:
self.pop_scope(aliases, loop_frame)
# if the node was recursive we have to return the buffer contents
# and start the iteration code
if node.recursive:
self.return_buffer_contents(loop_frame)
self.outdent()
self.start_write(frame, node)
self.write('loop(')
self.visit(node.iter, frame)
self.write(', loop)')
self.end_write(frame)
def visit_If(self, node, frame):
if_frame = frame.soft()
self.writeline('if ', node)
self.visit(node.test, if_frame)
self.write(':')
self.indent()
self.blockvisit(node.body, if_frame)
self.outdent()
if node.else_:
self.writeline('else:')
self.indent()
self.blockvisit(node.else_, if_frame)
self.outdent()
def visit_Macro(self, node, frame):
macro_frame = self.macro_body(node, frame)
self.newline()
if frame.toplevel:
if not node.name.startswith('_'):
self.write('context.exported_vars.add(%r)' % node.name)
self.writeline('context.vars[%r] = ' % node.name)
self.write('l_%s = ' % node.name)
self.macro_def(node, macro_frame)
frame.assigned_names.add(node.name)
def visit_CallBlock(self, node, frame):
children = node.iter_child_nodes(exclude=('call',))
call_frame = self.macro_body(node, frame, children)
self.writeline('caller = ')
self.macro_def(node, call_frame)
self.start_write(frame, node)
self.visit_Call(node.call, call_frame, forward_caller=True)
self.end_write(frame)
def visit_FilterBlock(self, node, frame):
filter_frame = frame.inner()
filter_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(filter_frame)
self.pull_locals(filter_frame)
self.buffer(filter_frame)
self.blockvisit(node.body, filter_frame)
self.start_write(frame, node)
self.visit_Filter(node.filter, filter_frame)
self.end_write(frame)
self.pop_scope(aliases, filter_frame)
def visit_ExprStmt(self, node, frame):
self.newline(node)
self.visit(node.node, frame)
def visit_Output(self, node, frame):
# if we have a known extends statement, we don't output anything
# if we are in a require_output_check section
if self.has_known_extends and frame.require_output_check:
return
if self.environment.finalize:
finalize = lambda x: text_type(self.environment.finalize(x))
else:
finalize = text_type
# if we are inside a frame that requires output checking, we do so
outdent_later = False
if frame.require_output_check:
self.writeline('if parent_template is None:')
self.indent()
outdent_later = True
# try to evaluate as many chunks as possible into a static
# string at compile time.
body = []
for child in node.nodes:
try:
const = child.as_const(frame.eval_ctx)
except nodes.Impossible:
body.append(child)
continue
# the frame can't be volatile here, becaus otherwise the
# as_const() function would raise an Impossible exception
# at that point.
try:
if frame.eval_ctx.autoescape:
if hasattr(const, '__html__'):
const = const.__html__()
else:
const = escape(const)
const = finalize(const)
except Exception:
# if something goes wrong here we evaluate the node
# at runtime for easier debugging
body.append(child)
continue
if body and isinstance(body[-1], list):
body[-1].append(const)
else:
body.append([const])
# if we have less than 3 nodes or a buffer we yield or extend/append
if len(body) < 3 or frame.buffer is not None:
if frame.buffer is not None:
# for one item we append, for more we extend
if len(body) == 1:
self.writeline('%s.append(' % frame.buffer)
else:
self.writeline('%s.extend((' % frame.buffer)
self.indent()
for item in body:
if isinstance(item, list):
val = repr(concat(item))
if frame.buffer is None:
self.writeline('yield ' + val)
else:
self.writeline(val + ', ')
else:
if frame.buffer is None:
self.writeline('yield ', item)
else:
self.newline(item)
close = 1
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
elif frame.eval_ctx.autoescape:
self.write('escape(')
else:
self.write('to_string(')
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(item, frame)
self.write(')' * close)
if frame.buffer is not None:
self.write(', ')
if frame.buffer is not None:
# close the open parentheses
self.outdent()
self.writeline(len(body) == 1 and ')' or '))')
# otherwise we create a format string as this is faster in that case
else:
format = []
arguments = []
for item in body:
if isinstance(item, list):
format.append(concat(item).replace('%', '%%'))
else:
format.append('%s')
arguments.append(item)
self.writeline('yield ')
self.write(repr(concat(format)) + ' % (')
idx = -1
self.indent()
for argument in arguments:
self.newline(argument)
close = 0
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
close += 1
elif frame.eval_ctx.autoescape:
self.write('escape(')
close += 1
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(argument, frame)
self.write(')' * close + ', ')
self.outdent()
self.writeline(')')
if outdent_later:
self.outdent()
def visit_Assign(self, node, frame):
self.newline(node)
# toplevel assignments however go into the local namespace and
# the current template's context. We create a copy of the frame
# here and add a set so that the Name visitor can add the assigned
# names here.
if frame.toplevel:
assignment_frame = frame.copy()
assignment_frame.toplevel_assignments = set()
else:
assignment_frame = frame
self.visit(node.target, assignment_frame)
self.write(' = ')
self.visit(node.node, frame)
# make sure toplevel assignments are added to the context.
if frame.toplevel:
public_names = [x for x in assignment_frame.toplevel_assignments
if not x.startswith('_')]
if len(assignment_frame.toplevel_assignments) == 1:
name = next(iter(assignment_frame.toplevel_assignments))
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({')
for idx, name in enumerate(assignment_frame.toplevel_assignments):
if idx:
self.write(', ')
self.write('%r: l_%s' % (name, name))
self.write('})')
if public_names:
if len(public_names) == 1:
self.writeline('context.exported_vars.add(%r)' %
public_names[0])
else:
self.writeline('context.exported_vars.update((%s))' %
', '.join(imap(repr, public_names)))
# -- Expression Visitors
def visit_Name(self, node, frame):
if node.ctx == 'store' and frame.toplevel:
frame.toplevel_assignments.add(node.name)
self.write('l_' + node.name)
frame.assigned_names.add(node.name)
def visit_Const(self, node, frame):
val = node.value
if isinstance(val, float):
self.write(str(val))
else:
self.write(repr(val))
def visit_TemplateData(self, node, frame):
try:
self.write(repr(node.as_const(frame.eval_ctx)))
except nodes.Impossible:
self.write('(context.eval_ctx.autoescape and Markup or identity)(%r)'
% node.data)
def visit_Tuple(self, node, frame):
self.write('(')
idx = -1
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(idx == 0 and ',)' or ')')
def visit_List(self, node, frame):
self.write('[')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(']')
def visit_Dict(self, node, frame):
self.write('{')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item.key, frame)
self.write(': ')
self.visit(item.value, frame)
self.write('}')
def binop(operator, interceptable=True):
def visitor(self, node, frame):
if self.environment.sandboxed and \
operator in self.environment.intercepted_binops:
self.write('environment.call_binop(context, %r, ' % operator)
self.visit(node.left, frame)
self.write(', ')
self.visit(node.right, frame)
else:
self.write('(')
self.visit(node.left, frame)
self.write(' %s ' % operator)
self.visit(node.right, frame)
self.write(')')
return visitor
def uaop(operator, interceptable=True):
def visitor(self, node, frame):
if self.environment.sandboxed and \
operator in self.environment.intercepted_unops:
self.write('environment.call_unop(context, %r, ' % operator)
self.visit(node.node, frame)
else:
self.write('(' + operator)
self.visit(node.node, frame)
self.write(')')
return visitor
visit_Add = binop('+')
visit_Sub = binop('-')
visit_Mul = binop('*')
visit_Div = binop('/')
visit_FloorDiv = binop('//')
visit_Pow = binop('**')
visit_Mod = binop('%')
visit_And = binop('and', interceptable=False)
visit_Or = binop('or', interceptable=False)
visit_Pos = uaop('+')
visit_Neg = uaop('-')
visit_Not = uaop('not ', interceptable=False)
del binop, uaop
def visit_Concat(self, node, frame):
if frame.eval_ctx.volatile:
func_name = '(context.eval_ctx.volatile and' \
' markup_join or unicode_join)'
elif frame.eval_ctx.autoescape:
func_name = 'markup_join'
else:
func_name = 'unicode_join'
self.write('%s((' % func_name)
for arg in node.nodes:
self.visit(arg, frame)
self.write(', ')
self.write('))')
def visit_Compare(self, node, frame):
self.visit(node.expr, frame)
for op in node.ops:
self.visit(op, frame)
def visit_Operand(self, node, frame):
self.write(' %s ' % operators[node.op])
self.visit(node.expr, frame)
def visit_Getattr(self, node, frame):
self.write('environment.getattr(')
self.visit(node.node, frame)
self.write(', %r)' % node.attr)
def visit_Getitem(self, node, frame):
# slices bypass the environment getitem method.
if isinstance(node.arg, nodes.Slice):
self.visit(node.node, frame)
self.write('[')
self.visit(node.arg, frame)
self.write(']')
else:
self.write('environment.getitem(')
self.visit(node.node, frame)
self.write(', ')
self.visit(node.arg, frame)
self.write(')')
def visit_Slice(self, node, frame):
if node.start is not None:
self.visit(node.start, frame)
self.write(':')
if node.stop is not None:
self.visit(node.stop, frame)
if node.step is not None:
self.write(':')
self.visit(node.step, frame)
def visit_Filter(self, node, frame):
self.write(self.filters[node.name] + '(')
func = self.environment.filters.get(node.name)
if func is None:
self.fail('no filter named %r' % node.name, node.lineno)
if getattr(func, 'contextfilter', False):
self.write('context, ')
elif getattr(func, 'evalcontextfilter', False):
self.write('context.eval_ctx, ')
elif getattr(func, 'environmentfilter', False):
self.write('environment, ')
# if the filter node is None we are inside a filter block
# and want to write to the current buffer
if node.node is not None:
self.visit(node.node, frame)
elif frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' Markup(concat(%s)) or concat(%s))' %
(frame.buffer, frame.buffer))
elif frame.eval_ctx.autoescape:
self.write('Markup(concat(%s))' % frame.buffer)
else:
self.write('concat(%s)' % frame.buffer)
self.signature(node, frame)
self.write(')')
def visit_Test(self, node, frame):
self.write(self.tests[node.name] + '(')
if node.name not in self.environment.tests:
self.fail('no test named %r' % node.name, node.lineno)
self.visit(node.node, frame)
self.signature(node, frame)
self.write(')')
def visit_CondExpr(self, node, frame):
def write_expr2():
if node.expr2 is not None:
return self.visit(node.expr2, frame)
self.write('environment.undefined(%r)' % ('the inline if-'
'expression on %s evaluated to false and '
'no else section was defined.' % self.position(node)))
self.write('(')
self.visit(node.expr1, frame)
self.write(' if ')
self.visit(node.test, frame)
self.write(' else ')
write_expr2()
self.write(')')
def visit_Call(self, node, frame, forward_caller=False):
if self.environment.sandboxed:
self.write('environment.call(context, ')
else:
self.write('context.call(')
self.visit(node.node, frame)
extra_kwargs = forward_caller and {'caller': 'caller'} or None
self.signature(node, frame, extra_kwargs)
self.write(')')
def visit_Keyword(self, node, frame):
self.write(node.key + '=')
self.visit(node.value, frame)
# -- Unused nodes for extensions
def visit_MarkSafe(self, node, frame):
self.write('Markup(')
self.visit(node.expr, frame)
self.write(')')
def visit_MarkSafeIfAutoescape(self, node, frame):
self.write('(context.eval_ctx.autoescape and Markup or identity)(')
self.visit(node.expr, frame)
self.write(')')
def visit_EnvironmentAttribute(self, node, frame):
self.write('environment.' + node.name)
def visit_ExtensionAttribute(self, node, frame):
self.write('environment.extensions[%r].%s' % (node.identifier, node.name))
def visit_ImportedName(self, node, frame):
self.write(self.import_aliases[node.importname])
def visit_InternalName(self, node, frame):
self.write(node.name)
def visit_ContextReference(self, node, frame):
self.write('context')
def visit_Continue(self, node, frame):
self.writeline('continue', node)
def visit_Break(self, node, frame):
self.writeline('break', node)
def visit_Scope(self, node, frame):
scope_frame = frame.inner()
scope_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(scope_frame)
self.pull_locals(scope_frame)
self.blockvisit(node.body, scope_frame)
self.pop_scope(aliases, scope_frame)
def visit_EvalContextModifier(self, node, frame):
for keyword in node.options:
self.writeline('context.eval_ctx.%s = ' % keyword.key)
self.visit(keyword.value, frame)
try:
val = keyword.value.as_const(frame.eval_ctx)
except nodes.Impossible:
frame.eval_ctx.volatile = True
else:
setattr(frame.eval_ctx, keyword.key, val)
def visit_ScopedEvalContextModifier(self, node, frame):
old_ctx_name = self.temporary_identifier()
safed_ctx = frame.eval_ctx.save()
self.writeline('%s = context.eval_ctx.save()' % old_ctx_name)
self.visit_EvalContextModifier(node, frame)
for child in node.body:
self.visit(child, frame)
frame.eval_ctx.revert(safed_ctx)
self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name)
| bsd-3-clause |
jsirois/commons | src/python/twitter/common/log/parsers.py | 14 | 3099 | from datetime import datetime, timedelta
from twitter.common.lang import total_ordering
# TODO(wickman) Do something that won't break if this is running over NYE?
_CURRENT_YEAR = str(datetime.now().year)
class Level(object):
DEBUG = 0
INFO = 10
WARNING = 20
ERROR = 30
FATAL = 40
@total_ordering
class Line(object):
__slots__ = (
'raw',
'level',
'datetime',
'pid',
'source',
'message'
)
@classmethod
def parse(cls, line):
"""parses a line and returns Line if successfully parsed, ValueError/None otherwise."""
raise NotImplementedError
@staticmethod
def parse_order(line, *line_parsers):
"""Given a text line and any number of Line implementations, return the first that matches
or None if no lines match."""
for parser in line_parsers:
try:
return parser.parse(line)
except ValueError:
continue
def __init__(self, raw, level, dt, pid, source, message):
(self.raw, self.level, self.datetime, self.pid, self.source, self.message) = (
raw, level, dt, pid, source, message)
def extend(self, lines):
extension = '\n'.join(lines)
return self.__class__('\n'.join([self.raw, extension]), self.level, self.datetime, self.pid,
self.source, '\n'.join([self.message, extension]))
def __lt__(self, other):
return self.datetime < other.datetime
def __gt__(self, other):
return self.datetime > other.datetime
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return (self.datetime == other.datetime and
self.level == other.level and
self.pid == other.pid and
self.source == other.source and
self.message == other.message)
def __str__(self):
return self.raw
class GlogLine(Line):
LEVEL_MAP = {
'I': Level.INFO,
'W': Level.WARNING,
'E': Level.ERROR,
'F': Level.FATAL,
'D': Level.DEBUG
}
@classmethod
def split_time(cls, line):
if len(line) == 0:
raise ValueError
if line[0] not in 'IWEFD':
raise ValueError
sline = line[1:].split(' ')
if len(sline) < 2:
raise ValueError
t = datetime.strptime(''.join([_CURRENT_YEAR, sline[0], ' ', sline[1]]), '%Y%m%d %H:%M:%S.%f')
return cls.LEVEL_MAP[line[0]], t, sline[2:]
@classmethod
def parse(cls, line):
level, dt, rest = cls.split_time(line)
pid, source, message = rest[0], rest[1], ' '.join(rest[2:])
return cls(line, level, dt, pid, source, message)
class ZooLine(Line):
LEVEL_MAP = {
"ZOO_INVALID": 0,
"ZOO_ERROR": Level.ERROR,
"ZOO_WARN": Level.WARNING,
"ZOO_INFO": Level.INFO,
"ZOO_DEBUG": Level.DEBUG
}
@classmethod
def parse(cls, line):
sline = line.split(':')
if len(sline) < 6:
raise ValueError
t = datetime.strptime(':'.join(sline[0:3]), '%Y-%m-%d %H:%M:%S,%f')
pid = sline[3]
ssource = sline[4].split('@')
level = cls.LEVEL_MAP.get(ssource[0], 0)
source = '@'.join(ssource[1:])
return cls(line, level, t, pid, source, ':'.join(sline[5:]))
| apache-2.0 |
rkashapov/buildbot | master/buildbot/test/unit/test_steps_package_rpm_rpmlint.py | 10 | 2185 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.trial import unittest
from buildbot.process.results import SUCCESS
from buildbot.steps.package.rpm import rpmlint
from buildbot.test.fake.remotecommand import ExpectShell
from buildbot.test.util import steps
class TestRpmLint(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(rpmlint.RpmLint())
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['rpmlint', '-i', '.'])
+ 0)
self.expectOutcome(
result=SUCCESS, state_string='Finished checking RPM/SPEC issues')
return self.runStep()
def test_fileloc_success(self):
self.setupStep(rpmlint.RpmLint(fileloc='RESULT'))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['rpmlint', '-i', 'RESULT'])
+ 0)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_config_success(self):
self.setupStep(rpmlint.RpmLint(config='foo.cfg'))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['rpmlint', '-i', '-f', 'foo.cfg', '.'])
+ 0)
self.expectOutcome(result=SUCCESS)
return self.runStep()
| gpl-2.0 |
onepercentclub/onepercentclub-site | apps/bluebottle_salesforce/export.py | 1 | 33154 | import csv
import logging
from apps.projects.models import ProjectBudgetLine
from bluebottle.payments.models import OrderPayment
import os
from registration.models import RegistrationProfile
from django.utils import timezone
from django.conf import settings
from apps.cowry_docdata.models import payment_method_mapping
from bluebottle.donations.models import Donation
from apps.recurring_donations.models import MonthlyDonor
from apps.vouchers.models import Voucher, VoucherStatuses
from apps.organizations.models import Organization, OrganizationMember
from bluebottle.fundraisers.models import FundRaiser
from apps.tasks.models import Task, TaskMember
from apps.projects.models import Project
from apps.members.models import Member
logger = logging.getLogger('bluebottle.salesforce')
def generate_organizations_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Organizations.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(["Organization_External_Id__c",
"Name",
"BillingStreet",
"BillingCity",
"BillingState",
"BillingCountry",
"BillingPostalCode",
"E_mail_address__c",
"Phone",
"Website",
"Twitter__c",
"Facebook__c",
"Skype__c",
"Tags__c",
"Bank_account_name__c",
"Bank_account_address__c",
"Bank_account_postalcode__c",
"Bank_account_city__c",
"Bank_account_country__c",
"Bank_account_IBAN__c",
"Bank_SWIFT__c",
"Bank_account_number__c",
"Bank_bankname__c",
"Bank_address__c",
"Bank_postalcode__c",
"Bank_city__c",
"Bank_country__c",
"Organization_created_date__c",
"Deleted__c"])
organizations = Organization.objects.all()
logger.info("Exporting {0} Organization objects to {1}".format(organizations.count(), filename))
for organization in organizations:
try:
billing_street = organization.address_line1 + " " + organization.address_line2
if organization.country:
billing_country = organization.country.name
else:
billing_country = ''
if organization.account_bank_country:
bank_country = organization.account_bank_country.name
else:
bank_country = ''
if organization.account_holder_country:
bank_account_country = organization.account_holder_country.name
else:
bank_account_country = ''
tags = ""
for tag in organization.tags.all():
tags = str(tag) + ", " + tags
deleted = ""
if organization.deleted:
deleted = organization.deleted.strftime("%Y-%m-%dT%H:%M:%S.000Z")
csvwriter.writerow([organization.id,
organization.name.encode("utf-8"),
billing_street.encode("utf-8"),
organization.city[:40].encode("utf-8"),
organization.state.encode("utf-8"),
billing_country.encode("utf-8"),
organization.postal_code.encode("utf-8"),
organization.email.encode("utf-8"),
organization.phone_number.encode("utf-8"),
organization.website.encode("utf-8"),
organization.twitter.encode("utf-8"),
organization.facebook.encode("utf-8"),
organization.skype.encode("utf-8"),
tags.encode("utf-8"),
organization.account_holder_name.encode("utf-8"),
organization.account_holder_address.encode("utf-8"),
organization.account_holder_postal_code.encode("utf-8"),
organization.account_holder_city.encode("utf-8"),
bank_account_country.encode("utf-8"),
organization.account_iban.encode("utf-8"),
organization.account_bic.encode("utf-8"),
organization.account_number.encode("utf-8"),
organization.account_bank_name.encode("utf-8"),
organization.account_bank_address.encode("utf-8"),
organization.account_bank_postal_code.encode("utf-8"),
organization.account_bank_city.encode("utf-8"),
bank_country.encode("utf-8"),
organization.created.date().strftime("%Y-%m-%dT%H:%M:%S.000Z"),
deleted])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving organization id {0}: ".format(organization.id) + str(e))
return success_count, error_count
def generate_users_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Users.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Contact_External_Id__c",
"Category1__c",
"FirstName",
"LastName",
"Gender__c",
"Username__c",
"Active__c",
"Deleted__c",
"Member_since__c",
"Location__c",
"Birthdate",
"Email",
"Website__c",
"Picture_Location__c",
"Tags__c",
"MailingCity",
"MailingStreet",
"MailingCountry",
"MailingPostalCode",
"MailingState",
"Receive_newsletter__c",
"Primary_language__c",
"Available_to_share_time_and_knowledge__c",
"Available_to_donate__c",
"Availability__c",
"Facebook__c",
"Twitter__c",
"Skype__c",
"Has_Activated_Account__c",
"Date_Joined__c",
"Date_Last_Login__c",
"Account_number__c",
"Account_holder__c",
"Account_city__c",
"Account_IBAN__c",
"Account_Active_Recurring_Debit__c",
"Phone"])
users = Member.objects.all()
logger.info("Exporting {0} User objects to {1}".format(users.count(), filename))
for user in users:
try:
if user.address:
mailing_city = user.address.city
mailing_street = user.address.line1 + ' ' + user.address.line2
if user.address.country:
mailing_country = user.address.country.name
else:
mailing_country = ''
mailing_postal_code = user.address.postal_code
mailing_state = user.address.state
else:
mailing_city = ''
mailing_street = ''
mailing_country = ''
mailing_postal_code = ''
mailing_state = ''
if user.last_name.strip():
last_name = user.last_name
else:
last_name = "1%MEMBER"
gender = ""
if user.gender == "male":
gender = Member.Gender.values['male'].title()
elif user.gender == "female":
gender = Member.Gender.values['female'].title()
date_deleted = ""
if user.deleted:
date_deleted = user.deleted.date().strftime("%Y-%m-%dT%H:%M:%S.000Z")
birth_date = ""
if user.birthdate:
birth_date = user.birthdate.strftime("%Y-%m-%dT%H:%M:%S.000Z")
tags = ""
for tag in user.tags.all():
tags = str(tag) + ", " + tags
date_joined = ""
member_since = ""
if user.date_joined:
member_since = user.date_joined.date().strftime("%Y-%m-%dT%H:%M:%S.000Z")
date_joined = user.date_joined.strftime("%Y-%m-%dT%H:%M:%S.000Z")
last_login = ""
if user.last_login:
last_login = user.last_login.strftime("%Y-%m-%dT%H:%M:%S.000Z")
has_activated = False
try:
rp = RegistrationProfile.objects.get(id=user.id)
if rp.activation_key == RegistrationProfile.ACTIVATED:
has_activated = True
except RegistrationProfile.DoesNotExist:
if not user.is_active and user.date_joined == user.last_login:
has_activated = False
else:
has_activated = True
try:
monthly_donor = MonthlyDonor.objects.get(user=user)
bank_account_city = monthly_donor.city
bank_account_holder = monthly_donor.name
bank_account_number = ''
bank_account_iban = monthly_donor.iban
bank_account_active = monthly_donor.active
except MonthlyDonor.DoesNotExist:
bank_account_city = ''
bank_account_holder = ''
bank_account_number = ''
bank_account_iban = ''
bank_account_active = False
availability = user.available_time
csvwriter.writerow([user.id,
Member.UserType.values[user.user_type].title(),
user.first_name.encode("utf-8"),
last_name.encode("utf-8"),
gender,
user.username.encode("utf-8"),
int(user.is_active),
date_deleted,
member_since,
user.location.encode("utf-8"),
birth_date,
user.email.encode("utf-8"),
user.website.encode("utf-8"),
user.picture,
tags.encode("utf-8"),
mailing_city.encode("utf-8"),
mailing_street.encode("utf-8"),
mailing_country.encode("utf-8"),
mailing_postal_code.encode("utf-8"),
mailing_state.encode("utf-8"),
int(user.newsletter),
user.primary_language.encode("utf-8"),
int(user.share_time_knowledge),
int(user.share_money),
availability,
user.facebook.encode("utf-8"),
user.twitter.encode("utf-8"),
user.skypename.encode("utf-8"),
int(has_activated),
date_joined,
last_login,
bank_account_number,
bank_account_holder.encode("utf-8"),
bank_account_city.encode("utf-8"),
bank_account_iban.encode("utf-8"),
int(bank_account_active),
user.phone_number.encode("utf-8")])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving user id {0}: ".format(user.id) + str(e))
return success_count, error_count
def generate_projects_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Projects.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Project_External_ID__c",
"Project_name__c",
"NumerOfPeopleReachedDirect__c",
"VideoURL__c",
"Date_project_deadline__c",
"Is_Campaign__c",
"Amount_requested__c",
"Amount_at_the_moment__c",
"Amount_still_needed__c",
"Allow_Overfunding__c",
"Date_plan_submitted",
"Date_Started__c",
"Date_Ended__c",
"Date_Funded__c",
"Picture_Location__c",
"Project_Owner__c",
"Organization__c",
"Country_in_which_the_project_is_located__c",
"Theme__c",
"Status_project__c",
"Project_created_date__c",
"Project_updated_date__c",
"Tags__c",
"Partner_Organization__c",
"Slug__c",
"Region__c",
"Sub_region__c",
"Donation_total__c",
"Donation_oo_total__c",
"Supporter_count__c",
"Supporter_oo_count__c"])
projects = Project.objects.all()
logger.info("Exporting {0} Project objects to {1}".format(projects.count(), filename))
for project in projects:
country = ''
region = ''
sub_region = ''
if project.country:
country = project.country.name.encode("utf-8")
region = project.country.subregion.region.name.encode("utf-8")
sub_region = project.country.subregion.name.encode("utf-8")
status = ''
if project.status:
status = project.status.name.encode("utf-8")
organization_id = ''
if project.organization:
organization_id = project.organization.id
video_url = ''
if project.video_url:
video_url = project.video_url
tags = ""
deadline = ""
date_submitted = ""
date_started = ""
date_ended = ""
date_funded = ""
theme = ""
if project.deadline:
deadline = project.deadline.date().strftime("%Y-%m-%dT%H:%M:%S.000Z")
if project.date_submitted:
date_submitted = project.date_submitted.date().strftime("%Y-%m-%dT%H:%M:%S.000Z")
if project.campaign_started:
date_started = project.campaign_started.date().strftime("%Y-%m-%dT%H:%M:%S.000Z")
if project.campaign_ended:
date_ended = project.campaign_ended.date().strftime("%Y-%m-%dT%H:%M:%S.000Z")
if project.campaign_funded:
date_funded = project.campaign_funded.date().strftime("%Y-%m-%dT%H:%M:%S.000Z")
for tag in project.tags.all():
tags = str(tag) + ", " + tags
if project.theme:
theme = project.theme.name
partner_organization_name = "-"
if project.partner_organization:
partner_organization_name = project.partner_organization.name
csvwriter.writerow([project.id,
project.title.encode("utf-8"),
project.reach,
video_url.encode("utf-8"),
deadline,
int(project.is_campaign),
"%01.2f" % (project.amount_asked or 0),
"%01.2f" % (project.amount_donated or 0),
"%01.2f" % (project.amount_needed or 0),
int(project.allow_overfunding),
date_submitted,
date_started,
date_ended,
date_funded,
project.image,
project.owner.id,
organization_id,
country,
theme,
status,
project.created.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
project.updated.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
tags[:255],
partner_organization_name.encode("utf-8"),
project.slug,
region,
sub_region,
"%01.2f" % ((project.get_money_total(['paid', 'pending'])) / 100),
"%01.2f" % ((project.get_money_total(['paid', 'pending'])) / 100),
project.supporters_count(),
project.supporters_count(True)])
success_count += 1
return success_count, error_count
def generate_projectbudgetlines_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Projectbudgetlines.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Project_Budget_External_ID__c", "Project__c", "Costs__c", "Description__c"])
budget_lines = ProjectBudgetLine.objects.all()
logger.info("Exporting {0} ProjectBudgetLine objects to {1}".format(budget_lines.count(), filename))
for budget_line in budget_lines:
try:
csvwriter.writerow([budget_line.id,
budget_line.project.id,
'%01.2f' % (float(budget_line.amount) / 100),
budget_line.description.encode("utf-8")])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving projectbudgetline id {0}: ".format(budget_line.id) + str(e))
return success_count, error_count
def generate_donations_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Donations.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Donation_External_ID__c",
"Donor__c",
"Project__c",
"Amount",
"CloseDate",
"Name",
"StageName",
"Type",
"Donation_created_date__c",
"Donation_updated_date__c",
"Donation_ready_date__c",
"Payment_method__c",
"RecordTypeId",
"Fundraiser__c"])
donations = Donation.objects.all()
logger.info("Exporting {0} Donation objects to {1}".format(donations.count(), filename))
t = 0
for donation in donations:
t += 1
logger.debug("writing donation {0}/{1}: {2}".format(t, donations.count(), donation.id))
try:
donor_id = ''
if donation.user:
donor_id = donation.user.id
project_id = ''
if donation.project:
project_id = donation.project.id
fundraiser_id = ''
if donation.fundraiser:
fundraiser_id = donation.fundraiser.id
if donation.user and donation.user.get_full_name() != '':
name = donation.user.get_full_name()
else:
name = "Anonymous"
donation_ready = ''
if donation.completed:
donation_ready = donation.completed.strftime("%Y-%m-%dT%H:%M:%S.000Z")
# Get the payment method from the associated order / payment
payment_method = payment_method_mapping[''] # Maps to Unknown for DocData.
if donation.order:
lp = OrderPayment.get_latest_by_order(donation.order)
if lp and lp.payment_method in payment_method_mapping:
payment_method = payment_method_mapping[lp.payment_method]
csvwriter.writerow([donation.id,
donor_id,
project_id,
'%01.2f' % (float(donation.amount)),
donation.created.date().strftime("%Y-%m-%dT%H:%M:%S.000Z"),
name.encode("utf-8"),
donation.order.get_status_display(),
donation.order.order_type,
donation.created.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
donation.updated.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
donation_ready,
payment_method.encode("utf-8"),
'012A0000000ZK6FIAW',
fundraiser_id])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving donation id {0}: ".format(donation.id) + str(e))
return success_count, error_count
def generate_vouchers_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Vouchers_{0}.csv'.format(timezone.localtime(timezone.now()).strftime('%Y%m%d'))
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Voucher_External_ID__c", "Purchaser__c", "Amount", "CloseDate", "Name", "Description",
"StageName", "RecordTypeId"])
vouchers = Voucher.objects.all()
logger.info("Exporting {0} Voucher objects to {1}".format(vouchers.count(), filename))
for voucher in vouchers:
try:
if voucher.sender and voucher.sender.get_full_name() != '':
name = voucher.sender.get_full_name()
else:
name = "1%MEMBER"
csvwriter.writerow([voucher.id,
voucher.sender.id,
'%01.2f' % (float(voucher.amount) / 100),
voucher.created.date(),
name.encode("utf-8"),
voucher.message.encode("utf-8"),
VoucherStatuses.values[voucher.status].title(),
'012A0000000BxfHIAS'])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving voucher id {0}: ".format(voucher.id) + str(e))
return success_count, error_count
def generate_tasks_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Tasks.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Task_External_ID__c",
"Project__c",
"Deadline__c",
"Location_of_the_task__c",
"Task_expertise__c",
"Task_status__c",
"Title__c",
"Task_created_date__c",
"Tags__c",
"Effort__c",
"People_Needed__c",
"Author__c",
"Date_realized__c"])
tasks = Task.objects.all()
logger.info("Exporting {0} Task objects to {1}".format(tasks.count(), filename))
for task in tasks:
tags = ""
for tag in task.tags.all():
tags = str(tag) + ", " + tags
skill = ''
if task.skill:
skill = task.skill.name.encode("utf-8")
author = ''
if task.author:
author = task.author.id
date_realized = ''
if task.status == 'realized' and task.date_status_change:
date_realized = task.date_status_change.strftime("%Y-%m-%dT%H:%M:%S.000Z")
try:
csvwriter.writerow([task.id,
task.project.id,
task.deadline.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
task.location.encode("utf-8"),
skill,
task.status.encode("utf-8"),
task.title.encode("utf-8"),
task.created.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
tags,
task.time_needed.encode("utf-8"),
task.people_needed,
author,
date_realized])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving task id {0}: ".format(task.id) + str(e))
return success_count, error_count
def generate_taskmembers_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Taskmembers.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Task_Member_External_ID__c",
"Contacts__c",
"X1_CLUB_Task__c",
"Status__c",
"Taskmember_Created_Date__c"])
taskmembers = TaskMember.objects.all()
logger.info("Exporting {0} TaskMember objects to {1}".format(taskmembers.count(), filename))
for taskmember in taskmembers:
try:
csvwriter.writerow([taskmember.id,
taskmember.member.id,
taskmember.task.id,
taskmember.status.encode("utf-8"),
taskmember.created.strftime("%Y-%m-%dT%H:%M:%S.000Z")])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving taskmember id {0}: ".format(taskmember.id) + str(e))
return success_count, error_count
def generate_fundraisers_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Fundraisers.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Fundraiser_External_ID__c",
"Name",
"Owner__c",
"Project__c",
"Picture_Location__c",
"VideoURL__c",
"Amount__c",
"Amount_at_the_moment__c",
"Deadline__c",
"Created__c"])
fundraisers = FundRaiser.objects.all()
logger.info("Exporting {0} FundRaiser objects to {1}".format(fundraisers.count(), filename))
for fundraiser in fundraisers:
try:
csvwriter.writerow([fundraiser.id,
fundraiser.title.encode("utf-8"),
fundraiser.owner.id,
fundraiser.project.id,
fundraiser.image,
fundraiser.video_url,
'%01.2f' % (float(fundraiser.amount) / 100),
'%01.2f' % (float(fundraiser.amount_donated) / 100),
fundraiser.deadline.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
fundraiser.created.strftime("%Y-%m-%dT%H:%M:%S.000Z")])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving fundraiser id {0}: ".format(fundraiser.id) + str(e))
return success_count, error_count
def generate_organizationmember_csv_file(path, loglevel):
logger.setLevel(loglevel)
error_count = 0
success_count = 0
filename = 'BLUE2SFDC_Organizationmembers.csv'
with open(os.path.join(path, filename), 'wb') as csv_outfile:
csvwriter = csv.writer(csv_outfile, quoting=csv.QUOTE_ALL)
csvwriter.writerow(["Organization_Member_External_Id__c",
"Contact__c",
"Account__c",
"Role__c"])
orgmembers = OrganizationMember.objects.all()
logger.info("Exporting {0} OrganizationMember objects to {1}".format(orgmembers.count(), filename))
for orgmember in orgmembers:
try:
csvwriter.writerow([orgmember.id,
orgmember.user.id,
orgmember.organization.id,
orgmember.function.encode("utf-8")])
success_count += 1
except Exception as e:
error_count += 1
logger.error("Error while saving organization member id {0}: ".format(orgmember.id) + str(e))
return success_count, error_count
| bsd-3-clause |
AnasGhrab/scikit-learn | examples/model_selection/plot_roc_crossval.py | 247 | 3253 | """
=============================================================
Receiver Operating Characteristic (ROC) with cross validation
=============================================================
Example of Receiver Operating Characteristic (ROC) metric to evaluate
classifier output quality using cross-validation.
ROC curves typically feature true positive rate on the Y axis, and false
positive rate on the X axis. This means that the top left corner of the plot is
the "ideal" point - a false positive rate of zero, and a true positive rate of
one. This is not very realistic, but it does mean that a larger area under the
curve (AUC) is usually better.
The "steepness" of ROC curves is also important, since it is ideal to maximize
the true positive rate while minimizing the false positive rate.
This example shows the ROC response of different datasets, created from K-fold
cross-validation. Taking all of these curves, it is possible to calculate the
mean area under curve, and see the variance of the curve when the
training set is split into different subsets. This roughly shows how the
classifier output is affected by changes in the training data, and how
different the splits generated by K-fold cross-validation are from one another.
.. note::
See also :func:`sklearn.metrics.auc_score`,
:func:`sklearn.cross_validation.cross_val_score`,
:ref:`example_model_selection_plot_roc.py`,
"""
print(__doc__)
import numpy as np
from scipy import interp
import matplotlib.pyplot as plt
from sklearn import svm, datasets
from sklearn.metrics import roc_curve, auc
from sklearn.cross_validation import StratifiedKFold
###############################################################################
# Data IO and generation
# import some data to play with
iris = datasets.load_iris()
X = iris.data
y = iris.target
X, y = X[y != 2], y[y != 2]
n_samples, n_features = X.shape
# Add noisy features
random_state = np.random.RandomState(0)
X = np.c_[X, random_state.randn(n_samples, 200 * n_features)]
###############################################################################
# Classification and ROC analysis
# Run classifier with cross-validation and plot ROC curves
cv = StratifiedKFold(y, n_folds=6)
classifier = svm.SVC(kernel='linear', probability=True,
random_state=random_state)
mean_tpr = 0.0
mean_fpr = np.linspace(0, 1, 100)
all_tpr = []
for i, (train, test) in enumerate(cv):
probas_ = classifier.fit(X[train], y[train]).predict_proba(X[test])
# Compute ROC curve and area the curve
fpr, tpr, thresholds = roc_curve(y[test], probas_[:, 1])
mean_tpr += interp(mean_fpr, fpr, tpr)
mean_tpr[0] = 0.0
roc_auc = auc(fpr, tpr)
plt.plot(fpr, tpr, lw=1, label='ROC fold %d (area = %0.2f)' % (i, roc_auc))
plt.plot([0, 1], [0, 1], '--', color=(0.6, 0.6, 0.6), label='Luck')
mean_tpr /= len(cv)
mean_tpr[-1] = 1.0
mean_auc = auc(mean_fpr, mean_tpr)
plt.plot(mean_fpr, mean_tpr, 'k--',
label='Mean ROC (area = %0.2f)' % mean_auc, lw=2)
plt.xlim([-0.05, 1.05])
plt.ylim([-0.05, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic example')
plt.legend(loc="lower right")
plt.show()
| bsd-3-clause |
D4wN/brickv | src/build_data/windows/OpenGL/raw/_GLX.py | 2 | 34484 | # BEGIN GENERATED CONTENT (do not edit below this line)
# This content is generated by gengl.py.
# Wrapper for /usr/include/GL/glx.h
from OpenGL import platform, constant
from ctypes import *
c_void = None
# H (/usr/include/GL/glx.h:26)
GLX_VERSION_1_1 = constant.Constant( 'GLX_VERSION_1_1', 1 )
GLX_VERSION_1_2 = constant.Constant( 'GLX_VERSION_1_2', 1 )
GLX_VERSION_1_3 = constant.Constant( 'GLX_VERSION_1_3', 1 )
GLX_VERSION_1_4 = constant.Constant( 'GLX_VERSION_1_4', 1 )
GLX_USE_GL = constant.Constant( 'GLX_USE_GL', 1 )
GLX_BUFFER_SIZE = constant.Constant( 'GLX_BUFFER_SIZE', 2 )
GLX_LEVEL = constant.Constant( 'GLX_LEVEL', 3 )
GLX_RGBA = constant.Constant( 'GLX_RGBA', 4 )
GLX_DOUBLEBUFFER = constant.Constant( 'GLX_DOUBLEBUFFER', 5 )
GLX_STEREO = constant.Constant( 'GLX_STEREO', 6 )
GLX_AUX_BUFFERS = constant.Constant( 'GLX_AUX_BUFFERS', 7 )
GLX_RED_SIZE = constant.Constant( 'GLX_RED_SIZE', 8 )
GLX_GREEN_SIZE = constant.Constant( 'GLX_GREEN_SIZE', 9 )
GLX_BLUE_SIZE = constant.Constant( 'GLX_BLUE_SIZE', 10 )
GLX_ALPHA_SIZE = constant.Constant( 'GLX_ALPHA_SIZE', 11 )
GLX_DEPTH_SIZE = constant.Constant( 'GLX_DEPTH_SIZE', 12 )
GLX_STENCIL_SIZE = constant.Constant( 'GLX_STENCIL_SIZE', 13 )
GLX_ACCUM_RED_SIZE = constant.Constant( 'GLX_ACCUM_RED_SIZE', 14 )
GLX_ACCUM_GREEN_SIZE = constant.Constant( 'GLX_ACCUM_GREEN_SIZE', 15 )
GLX_ACCUM_BLUE_SIZE = constant.Constant( 'GLX_ACCUM_BLUE_SIZE', 16 )
GLX_ACCUM_ALPHA_SIZE = constant.Constant( 'GLX_ACCUM_ALPHA_SIZE', 17 )
GLX_BAD_SCREEN = constant.Constant( 'GLX_BAD_SCREEN', 1 )
GLX_BAD_ATTRIBUTE = constant.Constant( 'GLX_BAD_ATTRIBUTE', 2 )
GLX_NO_EXTENSION = constant.Constant( 'GLX_NO_EXTENSION', 3 )
GLX_BAD_VISUAL = constant.Constant( 'GLX_BAD_VISUAL', 4 )
GLX_BAD_CONTEXT = constant.Constant( 'GLX_BAD_CONTEXT', 5 )
GLX_BAD_VALUE = constant.Constant( 'GLX_BAD_VALUE', 6 )
GLX_BAD_ENUM = constant.Constant( 'GLX_BAD_ENUM', 7 )
GLX_VENDOR = constant.Constant( 'GLX_VENDOR', 1 )
GLX_VERSION = constant.Constant( 'GLX_VERSION', 2 )
GLX_EXTENSIONS = constant.Constant( 'GLX_EXTENSIONS', 3 )
GLX_CONFIG_CAVEAT = constant.Constant( 'GLX_CONFIG_CAVEAT', 32 )
GLX_DONT_CARE = constant.Constant( 'GLX_DONT_CARE', 4294967295L )
GLX_X_VISUAL_TYPE = constant.Constant( 'GLX_X_VISUAL_TYPE', 34 )
GLX_TRANSPARENT_TYPE = constant.Constant( 'GLX_TRANSPARENT_TYPE', 35 )
GLX_TRANSPARENT_INDEX_VALUE = constant.Constant( 'GLX_TRANSPARENT_INDEX_VALUE', 36 )
GLX_TRANSPARENT_RED_VALUE = constant.Constant( 'GLX_TRANSPARENT_RED_VALUE', 37 )
GLX_TRANSPARENT_GREEN_VALUE = constant.Constant( 'GLX_TRANSPARENT_GREEN_VALUE', 38 )
GLX_TRANSPARENT_BLUE_VALUE = constant.Constant( 'GLX_TRANSPARENT_BLUE_VALUE', 39 )
GLX_TRANSPARENT_ALPHA_VALUE = constant.Constant( 'GLX_TRANSPARENT_ALPHA_VALUE', 40 )
GLX_WINDOW_BIT = constant.Constant( 'GLX_WINDOW_BIT', 1 )
GLX_PIXMAP_BIT = constant.Constant( 'GLX_PIXMAP_BIT', 2 )
GLX_PBUFFER_BIT = constant.Constant( 'GLX_PBUFFER_BIT', 4 )
GLX_AUX_BUFFERS_BIT = constant.Constant( 'GLX_AUX_BUFFERS_BIT', 16 )
GLX_FRONT_LEFT_BUFFER_BIT = constant.Constant( 'GLX_FRONT_LEFT_BUFFER_BIT', 1 )
GLX_FRONT_RIGHT_BUFFER_BIT = constant.Constant( 'GLX_FRONT_RIGHT_BUFFER_BIT', 2 )
GLX_BACK_LEFT_BUFFER_BIT = constant.Constant( 'GLX_BACK_LEFT_BUFFER_BIT', 4 )
GLX_BACK_RIGHT_BUFFER_BIT = constant.Constant( 'GLX_BACK_RIGHT_BUFFER_BIT', 8 )
GLX_DEPTH_BUFFER_BIT = constant.Constant( 'GLX_DEPTH_BUFFER_BIT', 32 )
GLX_STENCIL_BUFFER_BIT = constant.Constant( 'GLX_STENCIL_BUFFER_BIT', 64 )
GLX_ACCUM_BUFFER_BIT = constant.Constant( 'GLX_ACCUM_BUFFER_BIT', 128 )
GLX_NONE = constant.Constant( 'GLX_NONE', 32768 )
GLX_SLOW_CONFIG = constant.Constant( 'GLX_SLOW_CONFIG', 32769 )
GLX_TRUE_COLOR = constant.Constant( 'GLX_TRUE_COLOR', 32770 )
GLX_DIRECT_COLOR = constant.Constant( 'GLX_DIRECT_COLOR', 32771 )
GLX_PSEUDO_COLOR = constant.Constant( 'GLX_PSEUDO_COLOR', 32772 )
GLX_STATIC_COLOR = constant.Constant( 'GLX_STATIC_COLOR', 32773 )
GLX_GRAY_SCALE = constant.Constant( 'GLX_GRAY_SCALE', 32774 )
GLX_STATIC_GRAY = constant.Constant( 'GLX_STATIC_GRAY', 32775 )
GLX_TRANSPARENT_RGB = constant.Constant( 'GLX_TRANSPARENT_RGB', 32776 )
GLX_TRANSPARENT_INDEX = constant.Constant( 'GLX_TRANSPARENT_INDEX', 32777 )
GLX_VISUAL_ID = constant.Constant( 'GLX_VISUAL_ID', 32779 )
GLX_SCREEN = constant.Constant( 'GLX_SCREEN', 32780 )
GLX_NON_CONFORMANT_CONFIG = constant.Constant( 'GLX_NON_CONFORMANT_CONFIG', 32781 )
GLX_DRAWABLE_TYPE = constant.Constant( 'GLX_DRAWABLE_TYPE', 32784 )
GLX_RENDER_TYPE = constant.Constant( 'GLX_RENDER_TYPE', 32785 )
GLX_X_RENDERABLE = constant.Constant( 'GLX_X_RENDERABLE', 32786 )
GLX_FBCONFIG_ID = constant.Constant( 'GLX_FBCONFIG_ID', 32787 )
GLX_RGBA_TYPE = constant.Constant( 'GLX_RGBA_TYPE', 32788 )
GLX_COLOR_INDEX_TYPE = constant.Constant( 'GLX_COLOR_INDEX_TYPE', 32789 )
GLX_MAX_PBUFFER_WIDTH = constant.Constant( 'GLX_MAX_PBUFFER_WIDTH', 32790 )
GLX_MAX_PBUFFER_HEIGHT = constant.Constant( 'GLX_MAX_PBUFFER_HEIGHT', 32791 )
GLX_MAX_PBUFFER_PIXELS = constant.Constant( 'GLX_MAX_PBUFFER_PIXELS', 32792 )
GLX_PRESERVED_CONTENTS = constant.Constant( 'GLX_PRESERVED_CONTENTS', 32795 )
GLX_LARGEST_PBUFFER = constant.Constant( 'GLX_LARGEST_PBUFFER', 32796 )
GLX_WIDTH = constant.Constant( 'GLX_WIDTH', 32797 )
GLX_HEIGHT = constant.Constant( 'GLX_HEIGHT', 32798 )
GLX_EVENT_MASK = constant.Constant( 'GLX_EVENT_MASK', 32799 )
GLX_DAMAGED = constant.Constant( 'GLX_DAMAGED', 32800 )
GLX_SAVED = constant.Constant( 'GLX_SAVED', 32801 )
GLX_WINDOW = constant.Constant( 'GLX_WINDOW', 32802 )
GLX_PBUFFER = constant.Constant( 'GLX_PBUFFER', 32803 )
GLX_PBUFFER_HEIGHT = constant.Constant( 'GLX_PBUFFER_HEIGHT', 32832 )
GLX_PBUFFER_WIDTH = constant.Constant( 'GLX_PBUFFER_WIDTH', 32833 )
GLX_RGBA_BIT = constant.Constant( 'GLX_RGBA_BIT', 1 )
GLX_COLOR_INDEX_BIT = constant.Constant( 'GLX_COLOR_INDEX_BIT', 2 )
GLX_PBUFFER_CLOBBER_MASK = constant.Constant( 'GLX_PBUFFER_CLOBBER_MASK', 134217728 )
GLX_SAMPLE_BUFFERS = constant.Constant( 'GLX_SAMPLE_BUFFERS', 100000 )
GLX_SAMPLES = constant.Constant( 'GLX_SAMPLES', 100001 )
class struct___GLXcontextRec(Structure):
__slots__ = [
]
struct___GLXcontextRec._fields_ = [
('_opaque_struct', c_int)
]
class struct___GLXcontextRec(Structure):
__slots__ = [
]
struct___GLXcontextRec._fields_ = [
('_opaque_struct', c_int)
]
GLXContext = POINTER(struct___GLXcontextRec) # /usr/include/GL/glx.h:178
XID = c_ulong # /usr/include/X11/X.h:71
GLXPixmap = XID # /usr/include/GL/glx.h:179
GLXDrawable = XID # /usr/include/GL/glx.h:180
class struct___GLXFBConfigRec(Structure):
__slots__ = [
]
struct___GLXFBConfigRec._fields_ = [
('_opaque_struct', c_int)
]
class struct___GLXFBConfigRec(Structure):
__slots__ = [
]
struct___GLXFBConfigRec._fields_ = [
('_opaque_struct', c_int)
]
GLXFBConfig = POINTER(struct___GLXFBConfigRec) # /usr/include/GL/glx.h:182
GLXFBConfigID = XID # /usr/include/GL/glx.h:183
GLXContextID = XID # /usr/include/GL/glx.h:184
GLXWindow = XID # /usr/include/GL/glx.h:185
GLXPbuffer = XID # /usr/include/GL/glx.h:186
class struct_anon_100(Structure):
__slots__ = [
'visual',
'visualid',
'screen',
'depth',
'class',
'red_mask',
'green_mask',
'blue_mask',
'colormap_size',
'bits_per_rgb',
]
class struct_anon_17(Structure):
__slots__ = [
'ext_data',
'visualid',
'class',
'red_mask',
'green_mask',
'blue_mask',
'bits_per_rgb',
'map_entries',
]
class struct__XExtData(Structure):
__slots__ = [
'number',
'next',
'free_private',
'private_data',
]
XPointer = c_char_p # /usr/include/X11/Xlib.h:108
struct__XExtData._fields_ = [
('number', c_int),
('next', POINTER(struct__XExtData)),
('free_private', POINTER(CFUNCTYPE(c_int, POINTER(struct__XExtData)))),
('private_data', XPointer),
]
XExtData = struct__XExtData # /usr/include/X11/Xlib.h:187
VisualID = c_ulong # /usr/include/X11/X.h:81
struct_anon_17._fields_ = [
('ext_data', POINTER(XExtData)),
('visualid', VisualID),
('class', c_int),
('red_mask', c_ulong),
('green_mask', c_ulong),
('blue_mask', c_ulong),
('bits_per_rgb', c_int),
('map_entries', c_int),
]
Visual = struct_anon_17 # /usr/include/X11/Xlib.h:270
struct_anon_100._fields_ = [
('visual', POINTER(Visual)),
('visualid', VisualID),
('screen', c_int),
('depth', c_int),
('class', c_int),
('red_mask', c_ulong),
('green_mask', c_ulong),
('blue_mask', c_ulong),
('colormap_size', c_int),
('bits_per_rgb', c_int),
]
XVisualInfo = struct_anon_100 # /usr/include/X11/Xutil.h:296
class struct__XDisplay(Structure):
__slots__ = [
]
struct__XDisplay._fields_ = [
('_opaque_struct', c_int)
]
class struct__XDisplay(Structure):
__slots__ = [
]
struct__XDisplay._fields_ = [
('_opaque_struct', c_int)
]
Display = struct__XDisplay # /usr/include/X11/Xlib.h:519
glXChooseVisual = platform.createBaseFunction(
'glXChooseVisual', dll=platform.GL, resultType=POINTER(XVisualInfo),
argTypes=[POINTER(Display), c_int, POINTER(c_int)],
doc='glXChooseVisual( POINTER(Display)(dpy), c_int(screen), POINTER(c_int)(attribList) ) -> POINTER(XVisualInfo)',
argNames=['dpy', 'screen', 'attribList'],
)
glXCreateContext = platform.createBaseFunction(
'glXCreateContext', dll=platform.GL, resultType=GLXContext,
argTypes=[POINTER(Display), POINTER(XVisualInfo), GLXContext, c_int],
doc='glXCreateContext( POINTER(Display)(dpy), POINTER(XVisualInfo)(vis), GLXContext(shareList), c_int(direct) ) -> GLXContext',
argNames=['dpy', 'vis', 'shareList', 'direct'],
)
glXDestroyContext = platform.createBaseFunction(
'glXDestroyContext', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXContext],
doc='glXDestroyContext( POINTER(Display)(dpy), GLXContext(ctx) ) -> None',
argNames=['dpy', 'ctx'],
)
glXMakeCurrent = platform.createBaseFunction(
'glXMakeCurrent', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXDrawable, GLXContext],
doc='glXMakeCurrent( POINTER(Display)(dpy), GLXDrawable(drawable), GLXContext(ctx) ) -> c_int',
argNames=['dpy', 'drawable', 'ctx'],
)
glXCopyContext = platform.createBaseFunction(
'glXCopyContext', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXContext, GLXContext, c_ulong],
doc='glXCopyContext( POINTER(Display)(dpy), GLXContext(src), GLXContext(dst), c_ulong(mask) ) -> None',
argNames=['dpy', 'src', 'dst', 'mask'],
)
glXSwapBuffers = platform.createBaseFunction(
'glXSwapBuffers', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXDrawable],
doc='glXSwapBuffers( POINTER(Display)(dpy), GLXDrawable(drawable) ) -> None',
argNames=['dpy', 'drawable'],
)
Pixmap = XID # /usr/include/X11/X.h:107
glXCreateGLXPixmap = platform.createBaseFunction(
'glXCreateGLXPixmap', dll=platform.GL, resultType=GLXPixmap,
argTypes=[POINTER(Display), POINTER(XVisualInfo), Pixmap],
doc='glXCreateGLXPixmap( POINTER(Display)(dpy), POINTER(XVisualInfo)(visual), Pixmap(pixmap) ) -> GLXPixmap',
argNames=['dpy', 'visual', 'pixmap'],
)
glXDestroyGLXPixmap = platform.createBaseFunction(
'glXDestroyGLXPixmap', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXPixmap],
doc='glXDestroyGLXPixmap( POINTER(Display)(dpy), GLXPixmap(pixmap) ) -> None',
argNames=['dpy', 'pixmap'],
)
glXQueryExtension = platform.createBaseFunction(
'glXQueryExtension', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), POINTER(c_int), POINTER(c_int)],
doc='glXQueryExtension( POINTER(Display)(dpy), POINTER(c_int)(errorb), POINTER(c_int)(event) ) -> c_int',
argNames=['dpy', 'errorb', 'event'],
)
glXQueryVersion = platform.createBaseFunction(
'glXQueryVersion', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), POINTER(c_int), POINTER(c_int)],
doc='glXQueryVersion( POINTER(Display)(dpy), POINTER(c_int)(maj), POINTER(c_int)(min) ) -> c_int',
argNames=['dpy', 'maj', 'min'],
)
glXIsDirect = platform.createBaseFunction(
'glXIsDirect', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXContext],
doc='glXIsDirect( POINTER(Display)(dpy), GLXContext(ctx) ) -> c_int',
argNames=['dpy', 'ctx'],
)
glXGetConfig = platform.createBaseFunction(
'glXGetConfig', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), POINTER(XVisualInfo), c_int, POINTER(c_int)],
doc='glXGetConfig( POINTER(Display)(dpy), POINTER(XVisualInfo)(visual), c_int(attrib), POINTER(c_int)(value) ) -> c_int',
argNames=['dpy', 'visual', 'attrib', 'value'],
)
glXGetCurrentContext = platform.createBaseFunction(
'glXGetCurrentContext', dll=platform.GL, resultType=GLXContext,
argTypes=[],
doc='glXGetCurrentContext( ) -> GLXContext',
argNames=[],
)
glXGetCurrentDrawable = platform.createBaseFunction(
'glXGetCurrentDrawable', dll=platform.GL, resultType=GLXDrawable,
argTypes=[],
doc='glXGetCurrentDrawable( ) -> GLXDrawable',
argNames=[],
)
glXWaitGL = platform.createBaseFunction(
'glXWaitGL', dll=platform.GL, resultType=None,
argTypes=[],
doc='glXWaitGL( ) -> None',
argNames=[],
)
glXWaitX = platform.createBaseFunction(
'glXWaitX', dll=platform.GL, resultType=None,
argTypes=[],
doc='glXWaitX( ) -> None',
argNames=[],
)
Font = XID # /usr/include/X11/X.h:105
glXUseXFont = platform.createBaseFunction(
'glXUseXFont', dll=platform.GL, resultType=None,
argTypes=[Font, c_int, c_int, c_int],
doc='glXUseXFont( Font(font), c_int(first), c_int(count), c_int(list) ) -> None',
argNames=['font', 'first', 'count', 'list'],
)
glXQueryExtensionsString = platform.createBaseFunction(
'glXQueryExtensionsString', dll=platform.GL, resultType=c_char_p,
argTypes=[POINTER(Display), c_int],
doc='glXQueryExtensionsString( POINTER(Display)(dpy), c_int(screen) ) -> c_char_p',
argNames=['dpy', 'screen'],
)
glXQueryServerString = platform.createBaseFunction(
'glXQueryServerString', dll=platform.GL, resultType=c_char_p,
argTypes=[POINTER(Display), c_int, c_int],
doc='glXQueryServerString( POINTER(Display)(dpy), c_int(screen), c_int(name) ) -> c_char_p',
argNames=['dpy', 'screen', 'name'],
)
glXGetClientString = platform.createBaseFunction(
'glXGetClientString', dll=platform.GL, resultType=c_char_p,
argTypes=[POINTER(Display), c_int],
doc='glXGetClientString( POINTER(Display)(dpy), c_int(name) ) -> c_char_p',
argNames=['dpy', 'name'],
)
glXGetCurrentDisplay = platform.createBaseFunction(
'glXGetCurrentDisplay', dll=platform.GL, resultType=POINTER(Display),
argTypes=[],
doc='glXGetCurrentDisplay( ) -> POINTER(Display)',
argNames=[],
)
glXChooseFBConfig = platform.createBaseFunction(
'glXChooseFBConfig', dll=platform.GL, resultType=POINTER(GLXFBConfig),
argTypes=[POINTER(Display), c_int, POINTER(c_int), POINTER(c_int)],
doc='glXChooseFBConfig( POINTER(Display)(dpy), c_int(screen), POINTER(c_int)(attribList), POINTER(c_int)(nitems) ) -> POINTER(GLXFBConfig)',
argNames=['dpy', 'screen', 'attribList', 'nitems'],
)
glXGetFBConfigAttrib = platform.createBaseFunction(
'glXGetFBConfigAttrib', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXFBConfig, c_int, POINTER(c_int)],
doc='glXGetFBConfigAttrib( POINTER(Display)(dpy), GLXFBConfig(config), c_int(attribute), POINTER(c_int)(value) ) -> c_int',
argNames=['dpy', 'config', 'attribute', 'value'],
)
glXGetFBConfigs = platform.createBaseFunction(
'glXGetFBConfigs', dll=platform.GL, resultType=POINTER(GLXFBConfig),
argTypes=[POINTER(Display), c_int, POINTER(c_int)],
doc='glXGetFBConfigs( POINTER(Display)(dpy), c_int(screen), POINTER(c_int)(nelements) ) -> POINTER(GLXFBConfig)',
argNames=['dpy', 'screen', 'nelements'],
)
glXGetVisualFromFBConfig = platform.createBaseFunction(
'glXGetVisualFromFBConfig', dll=platform.GL, resultType=POINTER(XVisualInfo),
argTypes=[POINTER(Display), GLXFBConfig],
doc='glXGetVisualFromFBConfig( POINTER(Display)(dpy), GLXFBConfig(config) ) -> POINTER(XVisualInfo)',
argNames=['dpy', 'config'],
)
Window = XID # /usr/include/X11/X.h:101
glXCreateWindow = platform.createBaseFunction(
'glXCreateWindow', dll=platform.GL, resultType=GLXWindow,
argTypes=[POINTER(Display), GLXFBConfig, Window, POINTER(c_int)],
doc='glXCreateWindow( POINTER(Display)(dpy), GLXFBConfig(config), Window(win), POINTER(c_int)(attribList) ) -> GLXWindow',
argNames=['dpy', 'config', 'win', 'attribList'],
)
glXDestroyWindow = platform.createBaseFunction(
'glXDestroyWindow', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXWindow],
doc='glXDestroyWindow( POINTER(Display)(dpy), GLXWindow(window) ) -> None',
argNames=['dpy', 'window'],
)
glXCreatePixmap = platform.createBaseFunction(
'glXCreatePixmap', dll=platform.GL, resultType=GLXPixmap,
argTypes=[POINTER(Display), GLXFBConfig, Pixmap, POINTER(c_int)],
doc='glXCreatePixmap( POINTER(Display)(dpy), GLXFBConfig(config), Pixmap(pixmap), POINTER(c_int)(attribList) ) -> GLXPixmap',
argNames=['dpy', 'config', 'pixmap', 'attribList'],
)
glXDestroyPixmap = platform.createBaseFunction(
'glXDestroyPixmap', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXPixmap],
doc='glXDestroyPixmap( POINTER(Display)(dpy), GLXPixmap(pixmap) ) -> None',
argNames=['dpy', 'pixmap'],
)
glXCreatePbuffer = platform.createBaseFunction(
'glXCreatePbuffer', dll=platform.GL, resultType=GLXPbuffer,
argTypes=[POINTER(Display), GLXFBConfig, POINTER(c_int)],
doc='glXCreatePbuffer( POINTER(Display)(dpy), GLXFBConfig(config), POINTER(c_int)(attribList) ) -> GLXPbuffer',
argNames=['dpy', 'config', 'attribList'],
)
glXDestroyPbuffer = platform.createBaseFunction(
'glXDestroyPbuffer', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXPbuffer],
doc='glXDestroyPbuffer( POINTER(Display)(dpy), GLXPbuffer(pbuf) ) -> None',
argNames=['dpy', 'pbuf'],
)
glXQueryDrawable = platform.createBaseFunction(
'glXQueryDrawable', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXDrawable, c_int, POINTER(c_uint)],
doc='glXQueryDrawable( POINTER(Display)(dpy), GLXDrawable(draw), c_int(attribute), POINTER(c_uint)(value) ) -> None',
argNames=['dpy', 'draw', 'attribute', 'value'],
)
glXCreateNewContext = platform.createBaseFunction(
'glXCreateNewContext', dll=platform.GL, resultType=GLXContext,
argTypes=[POINTER(Display), GLXFBConfig, c_int, GLXContext, c_int],
doc='glXCreateNewContext( POINTER(Display)(dpy), GLXFBConfig(config), c_int(renderType), GLXContext(shareList), c_int(direct) ) -> GLXContext',
argNames=['dpy', 'config', 'renderType', 'shareList', 'direct'],
)
glXMakeContextCurrent = platform.createBaseFunction(
'glXMakeContextCurrent', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXDrawable, GLXDrawable, GLXContext],
doc='glXMakeContextCurrent( POINTER(Display)(dpy), GLXDrawable(draw), GLXDrawable(read), GLXContext(ctx) ) -> c_int',
argNames=['dpy', 'draw', 'read', 'ctx'],
)
glXGetCurrentReadDrawable = platform.createBaseFunction(
'glXGetCurrentReadDrawable', dll=platform.GL, resultType=GLXDrawable,
argTypes=[],
doc='glXGetCurrentReadDrawable( ) -> GLXDrawable',
argNames=[],
)
glXQueryContext = platform.createBaseFunction(
'glXQueryContext', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXContext, c_int, POINTER(c_int)],
doc='glXQueryContext( POINTER(Display)(dpy), GLXContext(ctx), c_int(attribute), POINTER(c_int)(value) ) -> c_int',
argNames=['dpy', 'ctx', 'attribute', 'value'],
)
glXSelectEvent = platform.createBaseFunction(
'glXSelectEvent', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXDrawable, c_ulong],
doc='glXSelectEvent( POINTER(Display)(dpy), GLXDrawable(drawable), c_ulong(mask) ) -> None',
argNames=['dpy', 'drawable', 'mask'],
)
glXGetSelectedEvent = platform.createBaseFunction(
'glXGetSelectedEvent', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), GLXDrawable, POINTER(c_ulong)],
doc='glXGetSelectedEvent( POINTER(Display)(dpy), GLXDrawable(drawable), POINTER(c_ulong)(mask) ) -> None',
argNames=['dpy', 'drawable', 'mask'],
)
GLubyte = c_ubyte # /usr/include/GL/gl.h:154
glXGetProcAddress = platform.createBaseFunction(
'glXGetProcAddress', dll=platform.GL, resultType=POINTER(CFUNCTYPE(None)),
argTypes=[POINTER(GLubyte)],
doc='glXGetProcAddress( POINTER(GLubyte)(procname) ) -> POINTER(CFUNCTYPE(None))',
argNames=['procname'],
)
# GLXEXT_LEGACY (/usr/include/GL/glx.h:298)
# VERSION_1_3 (/usr/include/GL/glxext.h:53)
# VERSION_1_4 (/usr/include/GL/glxext.h:112)
# ARB_get_proc_address (/usr/include/GL/glxext.h:117)
# ARB_multisample (/usr/include/GL/glxext.h:120)
# ARB_fbconfig_float (/usr/include/GL/glxext.h:125)
# SGIS_multisample (/usr/include/GL/glxext.h:130)
# EXT_visual_info (/usr/include/GL/glxext.h:135)
# SGI_swap_control (/usr/include/GL/glxext.h:154)
# SGI_video_sync (/usr/include/GL/glxext.h:157)
# SGI_make_current_read (/usr/include/GL/glxext.h:160)
# SGIX_video_source (/usr/include/GL/glxext.h:163)
# EXT_visual_rating (/usr/include/GL/glxext.h:166)
# EXT_import_context (/usr/include/GL/glxext.h:173)
# SGIX_fbconfig (/usr/include/GL/glxext.h:179)
# SGIX_pbuffer (/usr/include/GL/glxext.h:193)
# SGI_cushion (/usr/include/GL/glxext.h:221)
# SGIX_video_resize (/usr/include/GL/glxext.h:224)
# SGIX_dmbuffer (/usr/include/GL/glxext.h:229)
# SGIX_swap_group (/usr/include/GL/glxext.h:233)
# SGIX_swap_barrier (/usr/include/GL/glxext.h:236)
# SGIS_blended_overlay (/usr/include/GL/glxext.h:239)
# SGIS_shared_multisample (/usr/include/GL/glxext.h:243)
# SUN_get_transparent_index (/usr/include/GL/glxext.h:248)
# 3DFX_multisample (/usr/include/GL/glxext.h:251)
# MESA_copy_sub_buffer (/usr/include/GL/glxext.h:256)
# MESA_pixmap_colormap (/usr/include/GL/glxext.h:259)
# MESA_release_buffers (/usr/include/GL/glxext.h:262)
# MESA_set_3dfx_mode (/usr/include/GL/glxext.h:265)
# SGIX_visual_select_group (/usr/include/GL/glxext.h:270)
# OML_swap_method (/usr/include/GL/glxext.h:274)
# OML_sync_control (/usr/include/GL/glxext.h:281)
# NV_float_buffer (/usr/include/GL/glxext.h:284)
# SGIX_hyperpipe (/usr/include/GL/glxext.h:288)
# MESA_agp_offset (/usr/include/GL/glxext.h:301)
# EXT_fbconfig_packed_float (/usr/include/GL/glxext.h:304)
# EXT_framebuffer_sRGB (/usr/include/GL/glxext.h:309)
# EXT_texture_from_pixmap (/usr/include/GL/glxext.h:313)
# ARB_get_proc_address (/usr/include/GL/glxext.h:352)
# SGIX_video_source (/usr/include/GL/glxext.h:356)
# SGIX_fbconfig (/usr/include/GL/glxext.h:360)
# SGIX_pbuffer (/usr/include/GL/glxext.h:365)
# VERSION_1_3 (/usr/include/GL/glxext.h:419)
# VERSION_1_4 (/usr/include/GL/glxext.h:461)
# ARB_get_proc_address (/usr/include/GL/glxext.h:469)
# ARB_multisample (/usr/include/GL/glxext.h:477)
# ARB_fbconfig_float (/usr/include/GL/glxext.h:481)
# SGIS_multisample (/usr/include/GL/glxext.h:485)
# EXT_visual_info (/usr/include/GL/glxext.h:489)
# SGI_swap_control (/usr/include/GL/glxext.h:493)
# SGI_video_sync (/usr/include/GL/glxext.h:501)
# SGI_make_current_read (/usr/include/GL/glxext.h:511)
# SGIX_video_source (/usr/include/GL/glxext.h:521)
# EXT_visual_rating (/usr/include/GL/glxext.h:533)
# EXT_import_context (/usr/include/GL/glxext.h:537)
# SGIX_fbconfig (/usr/include/GL/glxext.h:553)
# SGIX_pbuffer (/usr/include/GL/glxext.h:571)
# SGI_cushion (/usr/include/GL/glxext.h:587)
# SGIX_video_resize (/usr/include/GL/glxext.h:595)
# SGIX_dmbuffer (/usr/include/GL/glxext.h:611)
# SGIX_swap_group (/usr/include/GL/glxext.h:621)
# SGIX_swap_barrier (/usr/include/GL/glxext.h:629)
# SUN_get_transparent_index (/usr/include/GL/glxext.h:639)
# MESA_copy_sub_buffer (/usr/include/GL/glxext.h:647)
# MESA_pixmap_colormap (/usr/include/GL/glxext.h:655)
# MESA_release_buffers (/usr/include/GL/glxext.h:663)
# MESA_set_3dfx_mode (/usr/include/GL/glxext.h:671)
# SGIX_visual_select_group (/usr/include/GL/glxext.h:679)
# OML_swap_method (/usr/include/GL/glxext.h:683)
# OML_sync_control (/usr/include/GL/glxext.h:687)
# NV_float_buffer (/usr/include/GL/glxext.h:703)
# SGIX_hyperpipe (/usr/include/GL/glxext.h:707)
# MESA_agp_offset (/usr/include/GL/glxext.h:754)
# EXT_fbconfig_packed_float (/usr/include/GL/glxext.h:762)
# EXT_framebuffer_sRGB (/usr/include/GL/glxext.h:766)
# EXT_texture_from_pixmap (/usr/include/GL/glxext.h:770)
# NV_vertex_array_range (/usr/include/GL/glx.h:330)
GLsizei = c_int # /usr/include/GL/gl.h:157
GLfloat = c_float # /usr/include/GL/gl.h:158
glXAllocateMemoryNV = platform.createBaseFunction(
'glXAllocateMemoryNV', dll=platform.GL, resultType=POINTER(c_void),
argTypes=[GLsizei, GLfloat, GLfloat, GLfloat],
doc='glXAllocateMemoryNV( GLsizei(size), GLfloat(readfreq), GLfloat(writefreq), GLfloat(priority) ) -> POINTER(c_void)',
argNames=['size', 'readfreq', 'writefreq', 'priority'],
)
GLvoid = None # /usr/include/GL/gl.h:150
glXFreeMemoryNV = platform.createBaseFunction(
'glXFreeMemoryNV', dll=platform.GL, resultType=None,
argTypes=[POINTER(GLvoid)],
doc='glXFreeMemoryNV( POINTER(GLvoid)(pointer) ) -> None',
argNames=['pointer'],
)
# MESA_allocate_memory (/usr/include/GL/glx.h:344)
GLX_MESA_allocate_memory = constant.Constant( 'GLX_MESA_allocate_memory', 1 )
glXAllocateMemoryMESA = platform.createBaseFunction(
'glXAllocateMemoryMESA', dll=platform.GL, resultType=POINTER(c_void),
argTypes=[POINTER(Display), c_int, c_size_t, c_float, c_float, c_float],
doc='glXAllocateMemoryMESA( POINTER(Display)(dpy), c_int(scrn), c_size_t(size), c_float(readfreq), c_float(writefreq), c_float(priority) ) -> POINTER(c_void)',
argNames=['dpy', 'scrn', 'size', 'readfreq', 'writefreq', 'priority'],
)
glXFreeMemoryMESA = platform.createBaseFunction(
'glXFreeMemoryMESA', dll=platform.GL, resultType=None,
argTypes=[POINTER(Display), c_int, POINTER(None)],
doc='glXFreeMemoryMESA( POINTER(Display)(dpy), c_int(scrn), POINTER(None)(pointer) ) -> None',
argNames=['dpy', 'scrn', 'pointer'],
)
GLuint = c_uint # /usr/include/GL/gl.h:156
glXGetMemoryOffsetMESA = platform.createBaseFunction(
'glXGetMemoryOffsetMESA', dll=platform.GL, resultType=GLuint,
argTypes=[POINTER(Display), c_int, POINTER(None)],
doc='glXGetMemoryOffsetMESA( POINTER(Display)(dpy), c_int(scrn), POINTER(None)(pointer) ) -> GLuint',
argNames=['dpy', 'scrn', 'pointer'],
)
# ARB_render_texture (/usr/include/GL/glx.h:361)
GLX_ARB_render_texture = constant.Constant( 'GLX_ARB_render_texture', 1 )
glXBindTexImageARB = platform.createBaseFunction(
'glXBindTexImageARB', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXPbuffer, c_int],
doc='glXBindTexImageARB( POINTER(Display)(dpy), GLXPbuffer(pbuffer), c_int(buffer) ) -> c_int',
argNames=['dpy', 'pbuffer', 'buffer'],
)
glXReleaseTexImageARB = platform.createBaseFunction(
'glXReleaseTexImageARB', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXPbuffer, c_int],
doc='glXReleaseTexImageARB( POINTER(Display)(dpy), GLXPbuffer(pbuffer), c_int(buffer) ) -> c_int',
argNames=['dpy', 'pbuffer', 'buffer'],
)
glXDrawableAttribARB = platform.createBaseFunction(
'glXDrawableAttribARB', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXDrawable, POINTER(c_int)],
doc='glXDrawableAttribARB( POINTER(Display)(dpy), GLXDrawable(draw), POINTER(c_int)(attribList) ) -> c_int',
argNames=['dpy', 'draw', 'attribList'],
)
# NV_float_buffer (/usr/include/GL/glx.h:374)
# MESA_swap_frame_usage (/usr/include/GL/glx.h:386)
GLX_MESA_swap_frame_usage = constant.Constant( 'GLX_MESA_swap_frame_usage', 1 )
glXGetFrameUsageMESA = platform.createBaseFunction(
'glXGetFrameUsageMESA', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXDrawable, POINTER(c_float)],
doc='glXGetFrameUsageMESA( POINTER(Display)(dpy), GLXDrawable(drawable), POINTER(c_float)(usage) ) -> c_int',
argNames=['dpy', 'drawable', 'usage'],
)
glXBeginFrameTrackingMESA = platform.createBaseFunction(
'glXBeginFrameTrackingMESA', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXDrawable],
doc='glXBeginFrameTrackingMESA( POINTER(Display)(dpy), GLXDrawable(drawable) ) -> c_int',
argNames=['dpy', 'drawable'],
)
glXEndFrameTrackingMESA = platform.createBaseFunction(
'glXEndFrameTrackingMESA', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXDrawable],
doc='glXEndFrameTrackingMESA( POINTER(Display)(dpy), GLXDrawable(drawable) ) -> c_int',
argNames=['dpy', 'drawable'],
)
glXQueryFrameTrackingMESA = platform.createBaseFunction(
'glXQueryFrameTrackingMESA', dll=platform.GL, resultType=c_int,
argTypes=[POINTER(Display), GLXDrawable, POINTER(c_int64), POINTER(c_int64), POINTER(c_float)],
doc='glXQueryFrameTrackingMESA( POINTER(Display)(dpy), GLXDrawable(drawable), POINTER(c_int64)(swapCount), POINTER(c_int64)(missedFrames), POINTER(c_float)(lastMissedUsage) ) -> c_int',
argNames=['dpy', 'drawable', 'swapCount', 'missedFrames', 'lastMissedUsage'],
)
# MESA_swap_control (/usr/include/GL/glx.h:406)
GLX_MESA_swap_control = constant.Constant( 'GLX_MESA_swap_control', 1 )
glXSwapIntervalMESA = platform.createBaseFunction(
'glXSwapIntervalMESA', dll=platform.GL, resultType=c_int,
argTypes=[c_uint],
doc='glXSwapIntervalMESA( c_uint(interval) ) -> c_int',
argNames=['interval'],
)
glXGetSwapIntervalMESA = platform.createBaseFunction(
'glXGetSwapIntervalMESA', dll=platform.GL, resultType=c_int,
argTypes=[],
doc='glXGetSwapIntervalMESA( ) -> c_int',
argNames=[],
)
# EXT_texture_from_pixmap (/usr/include/GL/glx.h:423)
class struct_anon_108(Structure):
__slots__ = [
'event_type',
'draw_type',
'serial',
'send_event',
'display',
'drawable',
'buffer_mask',
'aux_buffer',
'x',
'y',
'width',
'height',
'count',
]
struct_anon_108._fields_ = [
('event_type', c_int),
('draw_type', c_int),
('serial', c_ulong),
('send_event', c_int),
('display', POINTER(Display)),
('drawable', GLXDrawable),
('buffer_mask', c_uint),
('aux_buffer', c_uint),
('x', c_int),
('y', c_int),
('width', c_int),
('height', c_int),
('count', c_int),
]
GLXPbufferClobberEvent = struct_anon_108 # /usr/include/GL/glx.h:489
class struct___GLXEvent(Union):
__slots__ = [
'glxpbufferclobber',
'pad',
]
struct___GLXEvent._fields_ = [
('glxpbufferclobber', GLXPbufferClobberEvent),
('pad', c_long * 24),
]
GLXEvent = struct___GLXEvent # /usr/include/GL/glx.h:494
__all__ = ['GLX_VERSION_1_1', 'GLX_VERSION_1_2', 'GLX_VERSION_1_3',
'GLX_VERSION_1_4', 'GLX_USE_GL', 'GLX_BUFFER_SIZE', 'GLX_LEVEL', 'GLX_RGBA',
'GLX_DOUBLEBUFFER', 'GLX_STEREO', 'GLX_AUX_BUFFERS', 'GLX_RED_SIZE',
'GLX_GREEN_SIZE', 'GLX_BLUE_SIZE', 'GLX_ALPHA_SIZE', 'GLX_DEPTH_SIZE',
'GLX_STENCIL_SIZE', 'GLX_ACCUM_RED_SIZE', 'GLX_ACCUM_GREEN_SIZE',
'GLX_ACCUM_BLUE_SIZE', 'GLX_ACCUM_ALPHA_SIZE', 'GLX_BAD_SCREEN',
'GLX_BAD_ATTRIBUTE', 'GLX_NO_EXTENSION', 'GLX_BAD_VISUAL', 'GLX_BAD_CONTEXT',
'GLX_BAD_VALUE', 'GLX_BAD_ENUM', 'GLX_VENDOR', 'GLX_VERSION',
'GLX_EXTENSIONS', 'GLX_CONFIG_CAVEAT', 'GLX_DONT_CARE', 'GLX_X_VISUAL_TYPE',
'GLX_TRANSPARENT_TYPE', 'GLX_TRANSPARENT_INDEX_VALUE',
'GLX_TRANSPARENT_RED_VALUE', 'GLX_TRANSPARENT_GREEN_VALUE',
'GLX_TRANSPARENT_BLUE_VALUE', 'GLX_TRANSPARENT_ALPHA_VALUE', 'GLX_WINDOW_BIT',
'GLX_PIXMAP_BIT', 'GLX_PBUFFER_BIT', 'GLX_AUX_BUFFERS_BIT',
'GLX_FRONT_LEFT_BUFFER_BIT', 'GLX_FRONT_RIGHT_BUFFER_BIT',
'GLX_BACK_LEFT_BUFFER_BIT', 'GLX_BACK_RIGHT_BUFFER_BIT',
'GLX_DEPTH_BUFFER_BIT', 'GLX_STENCIL_BUFFER_BIT', 'GLX_ACCUM_BUFFER_BIT',
'GLX_NONE', 'GLX_SLOW_CONFIG', 'GLX_TRUE_COLOR', 'GLX_DIRECT_COLOR',
'GLX_PSEUDO_COLOR', 'GLX_STATIC_COLOR', 'GLX_GRAY_SCALE', 'GLX_STATIC_GRAY',
'GLX_TRANSPARENT_RGB', 'GLX_TRANSPARENT_INDEX', 'GLX_VISUAL_ID', 'GLX_SCREEN',
'GLX_NON_CONFORMANT_CONFIG', 'GLX_DRAWABLE_TYPE', 'GLX_RENDER_TYPE',
'GLX_X_RENDERABLE', 'GLX_FBCONFIG_ID', 'GLX_RGBA_TYPE',
'GLX_COLOR_INDEX_TYPE', 'GLX_MAX_PBUFFER_WIDTH', 'GLX_MAX_PBUFFER_HEIGHT',
'GLX_MAX_PBUFFER_PIXELS', 'GLX_PRESERVED_CONTENTS', 'GLX_LARGEST_PBUFFER',
'GLX_WIDTH', 'GLX_HEIGHT', 'GLX_EVENT_MASK', 'GLX_DAMAGED', 'GLX_SAVED',
'GLX_WINDOW', 'GLX_PBUFFER', 'GLX_PBUFFER_HEIGHT', 'GLX_PBUFFER_WIDTH',
'GLX_RGBA_BIT', 'GLX_COLOR_INDEX_BIT', 'GLX_PBUFFER_CLOBBER_MASK',
'GLX_SAMPLE_BUFFERS', 'GLX_SAMPLES', 'GLXContext', 'GLXPixmap', 'GLXDrawable',
'GLXFBConfig', 'GLXFBConfigID', 'GLXContextID', 'GLXWindow', 'GLXPbuffer',
'glXChooseVisual', 'glXCreateContext', 'glXDestroyContext', 'glXMakeCurrent',
'glXCopyContext', 'glXSwapBuffers', 'glXCreateGLXPixmap',
'glXDestroyGLXPixmap', 'glXQueryExtension', 'glXQueryVersion', 'glXIsDirect',
'glXGetConfig', 'glXGetCurrentContext', 'glXGetCurrentDrawable', 'glXWaitGL',
'glXWaitX', 'glXUseXFont', 'glXQueryExtensionsString', 'glXQueryServerString',
'glXGetClientString', 'glXGetCurrentDisplay', 'glXChooseFBConfig',
'glXGetFBConfigAttrib', 'glXGetFBConfigs', 'glXGetVisualFromFBConfig',
'glXCreateWindow', 'glXDestroyWindow', 'glXCreatePixmap', 'glXDestroyPixmap',
'glXCreatePbuffer', 'glXDestroyPbuffer', 'glXQueryDrawable',
'glXCreateNewContext', 'glXMakeContextCurrent', 'glXGetCurrentReadDrawable',
'glXQueryContext', 'glXSelectEvent', 'glXGetSelectedEvent',
'glXGetProcAddress', 'glXAllocateMemoryNV', 'glXFreeMemoryNV',
'GLX_MESA_allocate_memory', 'glXAllocateMemoryMESA', 'glXFreeMemoryMESA',
'glXGetMemoryOffsetMESA', 'GLX_ARB_render_texture', 'glXBindTexImageARB',
'glXReleaseTexImageARB', 'glXDrawableAttribARB', 'GLX_MESA_swap_frame_usage',
'glXGetFrameUsageMESA', 'glXBeginFrameTrackingMESA',
'glXEndFrameTrackingMESA', 'glXQueryFrameTrackingMESA',
'GLX_MESA_swap_control', 'glXSwapIntervalMESA', 'glXGetSwapIntervalMESA',
'GLXPbufferClobberEvent', 'GLXEvent']
# END GENERATED CONTENT (do not edit above this line)
| gpl-2.0 |
Aasmi/scikit-learn | examples/ensemble/plot_adaboost_hastie_10_2.py | 355 | 3576 | """
=============================
Discrete versus Real AdaBoost
=============================
This example is based on Figure 10.2 from Hastie et al 2009 [1] and illustrates
the difference in performance between the discrete SAMME [2] boosting
algorithm and real SAMME.R boosting algorithm. Both algorithms are evaluated
on a binary classification task where the target Y is a non-linear function
of 10 input features.
Discrete SAMME AdaBoost adapts based on errors in predicted class labels
whereas real SAMME.R uses the predicted class probabilities.
.. [1] T. Hastie, R. Tibshirani and J. Friedman, "Elements of Statistical
Learning Ed. 2", Springer, 2009.
.. [2] J. Zhu, H. Zou, S. Rosset, T. Hastie, "Multi-class AdaBoost", 2009.
"""
print(__doc__)
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com>,
# Noel Dawe <noel.dawe@gmail.com>
#
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import zero_one_loss
from sklearn.ensemble import AdaBoostClassifier
n_estimators = 400
# A learning rate of 1. may not be optimal for both SAMME and SAMME.R
learning_rate = 1.
X, y = datasets.make_hastie_10_2(n_samples=12000, random_state=1)
X_test, y_test = X[2000:], y[2000:]
X_train, y_train = X[:2000], y[:2000]
dt_stump = DecisionTreeClassifier(max_depth=1, min_samples_leaf=1)
dt_stump.fit(X_train, y_train)
dt_stump_err = 1.0 - dt_stump.score(X_test, y_test)
dt = DecisionTreeClassifier(max_depth=9, min_samples_leaf=1)
dt.fit(X_train, y_train)
dt_err = 1.0 - dt.score(X_test, y_test)
ada_discrete = AdaBoostClassifier(
base_estimator=dt_stump,
learning_rate=learning_rate,
n_estimators=n_estimators,
algorithm="SAMME")
ada_discrete.fit(X_train, y_train)
ada_real = AdaBoostClassifier(
base_estimator=dt_stump,
learning_rate=learning_rate,
n_estimators=n_estimators,
algorithm="SAMME.R")
ada_real.fit(X_train, y_train)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot([1, n_estimators], [dt_stump_err] * 2, 'k-',
label='Decision Stump Error')
ax.plot([1, n_estimators], [dt_err] * 2, 'k--',
label='Decision Tree Error')
ada_discrete_err = np.zeros((n_estimators,))
for i, y_pred in enumerate(ada_discrete.staged_predict(X_test)):
ada_discrete_err[i] = zero_one_loss(y_pred, y_test)
ada_discrete_err_train = np.zeros((n_estimators,))
for i, y_pred in enumerate(ada_discrete.staged_predict(X_train)):
ada_discrete_err_train[i] = zero_one_loss(y_pred, y_train)
ada_real_err = np.zeros((n_estimators,))
for i, y_pred in enumerate(ada_real.staged_predict(X_test)):
ada_real_err[i] = zero_one_loss(y_pred, y_test)
ada_real_err_train = np.zeros((n_estimators,))
for i, y_pred in enumerate(ada_real.staged_predict(X_train)):
ada_real_err_train[i] = zero_one_loss(y_pred, y_train)
ax.plot(np.arange(n_estimators) + 1, ada_discrete_err,
label='Discrete AdaBoost Test Error',
color='red')
ax.plot(np.arange(n_estimators) + 1, ada_discrete_err_train,
label='Discrete AdaBoost Train Error',
color='blue')
ax.plot(np.arange(n_estimators) + 1, ada_real_err,
label='Real AdaBoost Test Error',
color='orange')
ax.plot(np.arange(n_estimators) + 1, ada_real_err_train,
label='Real AdaBoost Train Error',
color='green')
ax.set_ylim((0.0, 0.5))
ax.set_xlabel('n_estimators')
ax.set_ylabel('error rate')
leg = ax.legend(loc='upper right', fancybox=True)
leg.get_frame().set_alpha(0.7)
plt.show()
| bsd-3-clause |
sxjscience/tvm | python/tvm/topi/cuda/__init__.py | 2 | 1997 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=redefined-builtin, wildcard-import
"""CUDA specific declaration and schedules."""
from __future__ import absolute_import as _abs
from .conv1d import *
from .conv1d_transpose_ncw import *
from .conv2d import *
from .conv2d_hwcn import *
from .conv2d_int8 import *
from .conv2d_winograd import *
from .conv2d_nhwc_winograd import *
from .depthwise_conv2d import *
from .group_conv2d_nchw import *
from . import conv2d_alter_op
from .conv2d_transpose_nchw import *
from .conv3d_transpose_ncdhw import *
from .deformable_conv2d import *
from .conv3d import *
from .conv3d_winograd import *
from . import conv3d_alter_op
from .reduction import schedule_reduce
from .softmax import *
from .injective import schedule_injective, schedule_elemwise, schedule_broadcast
from .dense import *
from .pooling import *
from .nn import schedule_lrn
from .batch_matmul import *
from .vision import *
from .ssd import *
from .nms import get_valid_counts, non_max_suppression
from .rcnn import *
from .sort import *
from .conv2d_nhwc_tensorcore import *
from .conv3d_ndhwc_tensorcore import *
from .dense_tensorcore import *
from .conv2d_hwnc_tensorcore import *
from .correlation import *
from .sparse import *
| apache-2.0 |
poeschlr/kicad-3d-models-in-freecad | cadquery/FCAD_script_generator/Filter/cq_kyocera.py | 2 | 13873 | # -*- coding: utf-8 -*-
#!/usr/bin/python
#
# from https://bitbucket.org/hyOzd/freecad-macros
# author hyOzd
# This is a
#
## requirements
## cadquery FreeCAD plugin
## https://github.com/jmwright/cadquery-freecad-module
#
## to run the script just do: freecad main_generator.py modelName
## e.g. c:\freecad\bin\freecad main_generator.py DIP8
#
## the script will generate STEP and VRML parametric models
## to be used with kicad StepUp script
#
#* These are a FreeCAD & cadquery tools *
#* to export generated models in STEP & VRML format. *
#* *
#* cadquery script for generating QFP/SOIC/SSOP/TSSOP models in STEP AP214 *
#* Copyright (c) 2015 *
#* Maurice https://launchpad.net/~easyw *
#* All trademarks within this guide belong to their legitimate owners. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., *
#* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA *
#* *
#****************************************************************************
#
# Most of these models are based on
# http://service.powerdynamics.com/ec/Catalog17/Section%2007.pdf
#
import cq_common # modules parameters
from cq_common import *
import sys
import math
class cq_kyocera():
def __init__(self):
self.body_top_color_key = 'brown body' # Top color
self.body_color_key = 'black body' # Body color
self.pin_color_key = 'metal grey pins' # Pin color
self.npth_pin_color_key = 'brown body' # NPTH Pin color
def set_colors(self, modelID):
params = self.all_params[modelID]
if params.body_top_color_key != None:
self.body_top_color_key = params.body_top_color_key
#
if params.body_color_key != None:
self.body_color_key = params.body_color_key
#
if params.pin_color_key != None:
self.pin_color_key = params.pin_color_key
#
if params.npth_pin_color_key != None:
self.npth_pin_color_key = params.npth_pin_color_key
#
def get_model_name(self, modelID):
for n in self.all_params:
if n == modelID:
return self.all_params[modelID].modelName
return 'xxUNKNOWNxxx'
def get_dest_3D_dir(self, modelID):
for n in self.all_params:
if n == modelID:
if self.all_params[modelID].dest_dir_prefix != None:
return self.all_params[modelID].dest_dir_prefix
return 'Filter.3dshapes'
def model_exist(self, modelID):
for n in self.all_params:
if n == modelID:
return True
return False
def get_list_all(self):
list = []
for n in self.all_params:
list.append(n)
return list
def set_rotation(self, params):
self.rotatex = 0.0 # Rotation around x-axis if required
self.rotatey = 0.0 # Rotation around x-axis if required
self.rotatez = 0.0 # Rotation around y-axis if required
def set_translate(self, modelID):
ttdx = 0.0
ttdy = 0.0
ttdz = 0.0
self.translate = (ttdx, ttdy, ttdz)
def make_3D_model(self, modelID):
destination_dir = self.get_dest_3D_dir(modelID)
params = self.all_params[modelID]
FreeCAD.Console.PrintMessage('\r\n')
FreeCAD.Console.PrintMessage('make_3D_model 1 \r\n')
self.set_colors(modelID)
self.set_translate(modelID)
self.set_rotation(modelID)
FreeCAD.Console.PrintMessage('make_3D_model 2 \r\n')
case_top = self.make_top(modelID)
show(case_top)
FreeCAD.Console.PrintMessage('make_3D_model 3 \r\n')
case = self.make_body(modelID)
show(case)
FreeCAD.Console.PrintMessage('make_3D_model 4 \r\n')
pins = self.make_pin(modelID)
show(pins)
FreeCAD.Console.PrintMessage('make_3D_model 5 \r\n')
npth_pins = self.make_npth_pin(modelID)
show(npth_pins)
FreeCAD.Console.PrintMessage('make_3D_model 6 \r\n')
doc = FreeCAD.ActiveDocument
objs=GetListOfObjects(FreeCAD, doc)
body_top_color_key = self.body_top_color_key
body_color_key = self.body_color_key
pin_color_key = self.pin_color_key
npth_pin_color_key = self.npth_pin_color_key
body_top_color = shaderColors.named_colors[body_top_color_key].getDiffuseFloat()
body_color = shaderColors.named_colors[body_color_key].getDiffuseFloat()
pin_color = shaderColors.named_colors[pin_color_key].getDiffuseFloat()
npth_pin_color = shaderColors.named_colors[npth_pin_color_key].getDiffuseFloat()
Color_Objects(Gui,objs[0],body_top_color)
Color_Objects(Gui,objs[1],body_color)
Color_Objects(Gui,objs[2],pin_color)
Color_Objects(Gui,objs[3],npth_pin_color)
col_body_top=Gui.ActiveDocument.getObject(objs[0].Name).DiffuseColor[0]
col_body=Gui.ActiveDocument.getObject(objs[1].Name).DiffuseColor[0]
col_pin=Gui.ActiveDocument.getObject(objs[2].Name).DiffuseColor[0]
col_npth_pin=Gui.ActiveDocument.getObject(objs[3].Name).DiffuseColor[0]
material_substitutions={
col_body_top[:-1]:body_top_color_key,
col_body[:-1]:body_color_key,
col_pin[:-1]:pin_color_key,
col_npth_pin[:-1]:npth_pin_color_key
}
expVRML.say(material_substitutions)
while len(objs) > 1:
FuseObjs_wColors(FreeCAD, FreeCADGui, doc.Name, objs[0].Name, objs[1].Name)
del objs
objs = GetListOfObjects(FreeCAD, doc)
return material_substitutions
def make_top(self, modelID):
params = self.all_params[modelID]
W = params.W # Width
L = params.L # Length
H = params.H # Height
H1 = params.H1 # Height 1
serie = params.serie # Serie
#
# Pin 1 marker
#
case = cq.Workplane("XY").workplane(offset=H - 0.1).moveTo(0.0 - (W / 2.0) + (W / 5.0) , 0.0 - (L / 2.0) + (L / 5.0)).circle(0.05, False).extrude(0.1)
if self.rotatex > 0.0:
case = case.rotate((0,0,0), (1,0,0), self.rotatex)
if self.rotatey > 0.0:
case = case.rotate((0,0,0), (0,1,0), self.rotatey)
if self.rotatez > 0.0:
case = case.rotate((0,0,0), (0,0,1), self.rotatez)
case = case.translate(self.translate)
return (case)
def make_body(self, modelID):
FreeCAD.Console.PrintMessage('make_body 1 \r\n')
params = self.all_params[modelID]
W = params.W # Width
L = params.L # Length
H = params.H # Height
H1 = params.H1 # Height 1
serie = params.serie # Serie
#
# Make body
#
case = cq.Workplane("XY").workplane(offset=H1).moveTo(0.0, 0.0).rect(W, L).extrude(H - H1)
case = case.faces("<X").edges("<Y").fillet(0.03)
case = case.faces("<X").edges(">Y").fillet(0.03)
case = case.faces(">X").edges("<Y").fillet(0.03)
case = case.faces(">X").edges(">Y").fillet(0.03)
case = case.faces(">Z").fillet(0.03)
case1 = cq.Workplane("XY").workplane(offset=H - 0.1).moveTo(0.0 - (W / 2.0) + (W / 5.0) , 0.0 - (L / 2.0) + (L / 5.0)).circle(0.05, False).extrude(0.2)
case = case.cut(case1)
if self.rotatex > 0.0:
case = case.rotate((0,0,0), (1,0,0), self.rotatex)
if self.rotatey > 0.0:
case = case.rotate((0,0,0), (0,1,0), self.rotatey)
if self.rotatez > 0.0:
case = case.rotate((0,0,0), (0,0,1), self.rotatez)
case = case.translate(self.translate)
return (case)
def make_pin(self, modelID):
params = self.all_params[modelID]
W = params.W # Width
L = params.L # Length
H = params.H # Height
H1 = params.H1 # Height 1
serie = params.serie # Serie
if serie == 'SF14':
case = cq.Workplane("XY").workplane(offset=-0.005).moveTo(-0.5, 0.0).rect(0.25, 0.325).extrude(0.1)
case1 = cq.Workplane("XY").workplane(offset=-0.005).moveTo(0.0, 0.0 - 0.2875).rect(0.25, 0.325).extrude(0.1)
case = case.union(case1)
case1 = cq.Workplane("XY").workplane(offset=-0.005).moveTo(0.5, 0.0 - 0.2875).rect(0.25, 0.325).extrude(0.1)
case = case.union(case1)
case1 = cq.Workplane("XY").workplane(offset=-0.005).moveTo(0.5, 0.2875).rect(0.25, 0.325).extrude(0.1)
case = case.union(case1)
case1 = cq.Workplane("XY").workplane(offset=-0.005).moveTo(0.0, 0.2875).rect(0.25, 0.325).extrude(0.1)
case = case.union(case1)
else:
#
# Make dummy
#
case = cq.Workplane("XY").workplane(offset=0.1).moveTo(0.0, 0.0).circle(0.01, 0.01).extrude(0.01)
if self.rotatex > 0.0:
case = case.rotate((0,0,0), (1,0,0), self.rotatex)
if self.rotatey > 0.0:
case = case.rotate((0,0,0), (0,1,0), self.rotatey)
if self.rotatez > 0.0:
case = case.rotate((0,0,0), (0,0,1), self.rotatez)
case = case.translate(self.translate)
return (case)
def make_npth_pin(self, modelID):
params = self.all_params[modelID]
W = params.W # Width
L = params.L # Length
H = params.H # Height
H1 = params.H1 # Height 1
serie = params.serie # Serie
#
# Make dummy
#
case = cq.Workplane("XY").workplane(offset=0.0).moveTo(0.0, 0.0).rect(W, L).extrude(H1)
case = case.faces("<X").edges("<Y").fillet(0.03)
case = case.faces("<X").edges(">Y").fillet(0.03)
case = case.faces(">X").edges("<Y").fillet(0.03)
case = case.faces(">X").edges(">Y").fillet(0.03)
if self.rotatex > 0.0:
case = case.rotate((0,0,0), (1,0,0), self.rotatex)
if self.rotatey > 0.0:
case = case.rotate((0,0,0), (0,1,0), self.rotatey)
if self.rotatez > 0.0:
case = case.rotate((0,0,0), (0,0,1), self.rotatez)
case = case.translate(self.translate)
return (case)
##enabling optional/default values to None
def namedtuple_with_defaults(typename, field_names, default_values=()):
T = collections.namedtuple(typename, field_names)
T.__new__.__defaults__ = (None,) * len(T._fields)
if isinstance(default_values, collections.Mapping):
prototype = T(**default_values)
else:
prototype = T(*default_values)
T.__new__.__defaults__ = tuple(prototype)
return T
Params = namedtuple_with_defaults("Params", [
'modelName', # modelName
'W', # Width
'L', # Length
'H', # Overall height
'H1', # Height 1
'serie', # Serie
'npth_pin_color_key', # NPTH Pin color
'body_top_color_key', # Top color
'body_color_key', # Body colour
'pin_color_key', # Pin color
'dest_dir_prefix' # Destination directory
])
all_params = {
#
# https://global.kyocera.com/prdct/electro/product/pdf/sf14_tdlte.pdf
#
'Filter_1411-5_1.4x1.1mm': Params(
modelName = 'Filter_1411-5_1.4x1.1mm', # Model name
W = 1.40, # Width
L = 1.10, # Length
H = 0.7, # Overall height
H1 = 0.20, # Height 1
serie = 'SF14', # Serie
),
'SF14-1575F5UUC1': Params(
modelName = 'Filter_1411-5_1.4x1.1mm', # Model name
W = 1.40, # Width
L = 1.10, # Length
H = 0.7, # Overall height
H1 = 0.20, # Height 1
serie = 'SF14', # Serie
),
}
| gpl-2.0 |
miptliot/edx-platform | common/test/acceptance/pages/studio/component_editor.py | 3 | 6866 | from bok_choy.page_object import PageObject
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from common.test.acceptance.pages.common.utils import click_css
from common.test.acceptance.tests.helpers import get_selected_option_text, select_option_by_text
class BaseComponentEditorView(PageObject):
"""
A base :class:`.PageObject` for the component and visibility editors.
This class assumes that the editor is our default editor as displayed for xmodules.
"""
BODY_SELECTOR = '.xblock-editor'
def __init__(self, browser, locator):
"""
Args:
browser (selenium.webdriver): The Selenium-controlled browser that this page is loaded in.
locator (str): The locator that identifies which xblock this :class:`.xblock-editor` relates to.
"""
super(BaseComponentEditorView, self).__init__(browser)
self.locator = locator
def is_browser_on_page(self):
return self.q(css='{}[data-locator="{}"]'.format(self.BODY_SELECTOR, self.locator)).present
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `ComponentEditorView` context
"""
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
def url(self):
"""
Returns None because this is not directly accessible via URL.
"""
return None
def save(self):
"""
Clicks save button.
"""
click_css(self, 'a.action-save')
def cancel(self):
"""
Clicks cancel button.
"""
click_css(self, 'a.action-cancel', require_notification=False)
class ComponentEditorView(BaseComponentEditorView):
"""
A :class:`.PageObject` representing the rendered view of a component editor.
"""
def get_setting_element(self, label):
"""
Returns the index of the setting entry with given label (display name) within the Settings modal.
"""
settings_button = self.q(css='.edit-xblock-modal .editor-modes .settings-button')
if settings_button.is_present():
settings_button.click()
setting_labels = self.q(css=self._bounded_selector('.metadata_edit .wrapper-comp-setting .setting-label'))
for index, setting in enumerate(setting_labels):
if setting.text == label:
return self.q(css=self._bounded_selector('.metadata_edit div.wrapper-comp-setting .setting-input'))[index]
return None
def set_field_value_and_save(self, label, value):
"""
Sets the text field with given label (display name) to the specified value, and presses Save.
"""
elem = self.get_setting_element(label)
# Clear the current value, set the new one, then
# Tab to move to the next field (so change event is triggered).
elem.clear()
elem.send_keys(value)
elem.send_keys(Keys.TAB)
self.save()
def set_select_value_and_save(self, label, value):
"""
Sets the select with given label (display name) to the specified value, and presses Save.
"""
elem = self.get_setting_element(label)
select = Select(elem)
select.select_by_value(value)
self.save()
def get_selected_option_text(self, label):
"""
Returns the text of the first selected option for the select with given label (display name).
"""
elem = self.get_setting_element(label)
if elem:
select = Select(elem)
return select.first_selected_option.text
else:
return None
class ComponentVisibilityEditorView(BaseComponentEditorView):
"""
A :class:`.PageObject` representing the rendered view of a component visibility editor.
"""
OPTION_SELECTOR = '.partition-group-control .field'
ALL_LEARNERS_AND_STAFF = 'All Learners and Staff'
CONTENT_GROUP_PARTITION = 'Content Groups'
ENROLLMENT_TRACK_PARTITION = "Enrollment Track Groups"
@property
def all_group_options(self):
"""
Return all partition groups.
"""
return self.q(css=self._bounded_selector(self.OPTION_SELECTOR)).results
@property
def current_groups_message(self):
"""
This returns the message shown at the top of the visibility dialog about the
current visibility state (at the time that the dialog was opened).
For example, "Access is restricted to: All Learners and Staff".
"""
return self.q(css=self._bounded_selector('.visibility-header'))[0].text
@property
def selected_partition_scheme(self):
"""
Return the selected partition scheme (or "All Learners and Staff"
if no partitioning is selected).
"""
selector = self.q(css=self._bounded_selector('.partition-visibility select'))
return get_selected_option_text(selector)
def select_partition_scheme(self, partition_name):
"""
Sets the selected partition scheme to the one with the
matching name.
"""
selector = self.q(css=self._bounded_selector('.partition-visibility select'))
select_option_by_text(selector, partition_name, focus_out=True)
@property
def selected_groups(self):
"""
Return all selected partition groups. If none are selected,
returns an empty array.
"""
results = []
for option in self.all_group_options:
checkbox = option.find_element_by_css_selector('input')
if checkbox.is_selected():
results.append(option)
return results
def select_group(self, group_name, save=True):
"""
Select the first group which has a label matching `group_name`.
Arguments:
group_name (str): The name of the group.
save (boolean): Whether the "save" button should be clicked
afterwards.
Returns:
bool: Whether a group with the provided name was found and clicked.
"""
for option in self.all_group_options:
if group_name in option.text:
checkbox = option.find_element_by_css_selector('input')
checkbox.click()
if save:
self.save()
return True
return False
def select_groups_in_partition_scheme(self, partition_name, group_names):
"""
Select groups in the provided partition scheme. The "save"
button is clicked afterwards.
"""
self.select_partition_scheme(partition_name)
for label in group_names:
self.select_group(label, save=False)
self.save()
| agpl-3.0 |
pombredanne/splash | splash/browser_tab.py | 1 | 42052 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import base64
import functools
import json
import os
import weakref
import uuid
from PyQt5.QtCore import QObject, QSize, Qt, QTimer, pyqtSlot
from PyQt5.QtNetwork import QNetworkRequest
from PyQt5.QtWebKitWidgets import QWebPage
from PyQt5.QtWebKit import QWebSettings
from twisted.internet import defer
from twisted.python import log
import six
from splash import defaults
from splash.har.qt import cookies2har
from splash.qtrender_image import QtImageRenderer
from splash.qtutils import (OPERATION_QT_CONSTANTS, WrappedSignal, qt2py,
qurl2ascii, to_qurl)
from splash.render_options import validate_size_str
from splash.qwebpage import SplashQWebPage, SplashQWebView
from splash.exceptions import JsError, OneShotCallbackError
from splash.utils import to_bytes
def skip_if_closing(meth):
@functools.wraps(meth)
def wrapped(self, *args, **kwargs):
if self._closing:
self.logger.log("%s is not called because BrowserTab is closing" % meth.__name__, min_level=2)
return
return meth(self, *args, **kwargs)
return wrapped
class BrowserTab(QObject):
"""
An object for controlling a single browser tab (QWebView).
It is created by splash.pool.Pool. Pool attaches to tab's deferred
and waits until either a callback or an errback is called, then destroys
a BrowserTab.
XXX: are cookies shared between "browser tabs"? In real browsers they are,
but maybe this is not what we want.
"""
def __init__(self, network_manager, splash_proxy_factory, verbosity,
render_options, visible=False):
""" Create a new browser tab. """
QObject.__init__(self)
self.deferred = defer.Deferred()
self.network_manager = network_manager
self.verbosity = verbosity
self.visible = visible
self._uid = render_options.get_uid()
self._closing = False
self._closing_normally = False
self._active_timers = set()
self._timers_to_cancel_on_redirect = weakref.WeakKeyDictionary() # timer: callback
self._timers_to_cancel_on_error = weakref.WeakKeyDictionary() # timer: callback
self._callback_proxies_to_cancel = weakref.WeakSet()
self._js_console = None
self._autoload_scripts = []
self.logger = _BrowserTabLogger(uid=self._uid, verbosity=verbosity)
self._init_webpage(verbosity, network_manager, splash_proxy_factory,
render_options)
self.http_client = _SplashHttpClient(self.web_page)
def _init_webpage(self, verbosity, network_manager, splash_proxy_factory, render_options):
""" Create and initialize QWebPage and QWebView """
self.web_page = SplashQWebPage(verbosity)
self.web_page.setNetworkAccessManager(network_manager)
self.web_page.splash_proxy_factory = splash_proxy_factory
self.web_page.render_options = render_options
self._set_default_webpage_options(self.web_page)
self._setup_webpage_events()
self.web_view = SplashQWebView()
self.web_view.setPage(self.web_page)
self.web_view.setAttribute(Qt.WA_DeleteOnClose, True)
self.web_view.onBeforeClose = self._on_before_close
if self.visible:
self.web_view.move(0, 0)
self.web_view.show()
self.set_viewport(defaults.VIEWPORT_SIZE)
# XXX: hack to ensure that default window size is not 640x480.
self.web_view.resize(
QSize(*map(int, defaults.VIEWPORT_SIZE.split('x'))))
def set_js_enabled(self, val):
settings = self.web_page.settings()
settings.setAttribute(QWebSettings.JavascriptEnabled, val)
def get_js_enabled(self):
settings = self.web_page.settings()
return settings.testAttribute(QWebSettings.JavascriptEnabled)
def set_private_mode_enabled(self, val):
settings = self.web_page.settings()
settings.setAttribute(QWebSettings.PrivateBrowsingEnabled, bool(val))
settings.setAttribute(QWebSettings.LocalStorageEnabled, not bool(val))
def get_private_mode_enabled(self):
settings = self.web_page.settings()
return settings.testAttribute(QWebSettings.PrivateBrowsingEnabled)
def _set_default_webpage_options(self, web_page):
"""
Set QWebPage options.
TODO: allow to customize them.
"""
settings = web_page.settings()
settings.setAttribute(QWebSettings.JavascriptEnabled, True)
settings.setAttribute(QWebSettings.PluginsEnabled, False)
settings.setAttribute(QWebSettings.LocalContentCanAccessRemoteUrls, True)
scroll_bars = Qt.ScrollBarAsNeeded if self.visible else Qt.ScrollBarAlwaysOff
web_page.mainFrame().setScrollBarPolicy(Qt.Vertical, scroll_bars)
web_page.mainFrame().setScrollBarPolicy(Qt.Horizontal, scroll_bars)
if self.visible:
web_page.settings().setAttribute(QWebSettings.DeveloperExtrasEnabled, True)
def _setup_webpage_events(self):
self._load_finished = WrappedSignal(self.web_page.mainFrame().loadFinished)
self.web_page.mainFrame().loadFinished.connect(self._on_load_finished)
self.web_page.mainFrame().urlChanged.connect(self._on_url_changed)
self.web_page.mainFrame().javaScriptWindowObjectCleared.connect(self._on_javascript_window_object_cleared)
self.logger.add_web_page(self.web_page)
def return_result(self, result):
""" Return a result to the Pool. """
if self._result_already_returned():
self.logger.log("error: result is already returned", min_level=1)
self.deferred.callback(result)
# self.deferred = None
def return_error(self, error):
""" Return an error to the Pool. """
if self._result_already_returned():
self.logger.log("error: result is already returned", min_level=1)
self.deferred.errback(error)
# self.deferred = None
def _result_already_returned(self):
""" Return True if an error or a result is already returned to Pool """
return self.deferred.called
def set_custom_headers(self, headers):
"""
Set custom HTTP headers to be sent with each request. Passed headers
are merged with QWebKit default headers, overwriting QWebKit values
in case of conflicts.
"""
self.web_page.custom_headers = headers
def set_resource_timeout(self, timeout):
""" Set a default timeout for HTTP requests, in seconds. """
self.web_page.resource_timeout = timeout
def get_resource_timeout(self):
""" Get a default timeout for HTTP requests, in seconds. """
return self.web_page.resource_timeout
def set_images_enabled(self, enabled):
self.web_page.settings().setAttribute(QWebSettings.AutoLoadImages,
enabled)
def get_images_enabled(self):
settings = self.web_page.settings()
return settings.testAttribute(QWebSettings.AutoLoadImages)
def set_viewport(self, size, raise_if_empty=False):
"""
Set viewport size.
If size is "full" viewport size is detected automatically.
If can also be "<width>x<height>".
.. note::
This will update all JS geometry variables, but window resize event
is delivered asynchronously and so ``window.resize`` will not be
invoked until control is yielded to the event loop.
"""
if size == 'full':
size = self.web_page.mainFrame().contentsSize()
self.logger.log("Contents size: %s" % size, min_level=2)
if size.isEmpty():
if raise_if_empty:
raise RuntimeError("Cannot detect viewport size")
else:
size = defaults.VIEWPORT_SIZE
self.logger.log("Viewport is empty, falling back to: %s" %
size)
if not isinstance(size, QSize):
validate_size_str(size)
w, h = map(int, size.split('x'))
size = QSize(w, h)
self.web_page.setViewportSize(size)
self._force_relayout()
w, h = int(size.width()), int(size.height())
self.logger.log("viewport size is set to %sx%s" % (w, h), min_level=2)
return w, h
def _force_relayout(self):
"""Force a relayout of the web page contents."""
# setPreferredContentsSize may be used to force a certain size for
# layout purposes. Passing an invalid size resets the override and
# tells the QWebPage to use the size as requested by the document.
# This is in fact the default behavior, so we don't change anything.
#
# The side-effect of this operation is a forced synchronous relayout of
# the page.
self.web_page.setPreferredContentsSize(QSize())
def lock_navigation(self):
self.web_page.navigation_locked = True
def unlock_navigation(self):
self.web_page.navigation_locked = False
def set_content(self, data, callback, errback, mime_type=None, baseurl=None):
"""
Set page contents to ``data``, then wait until page loads.
Invoke a callback if load was successful or errback if it wasn't.
"""
if mime_type is None:
mime_type = "text/html; charset=utf-8"
if baseurl is None:
baseurl = ''
callback_id = self._load_finished.connect(
self._on_content_ready,
callback=callback,
errback=errback,
)
self.logger.log("callback %s is connected to loadFinished" % callback_id, min_level=3)
self.web_page.mainFrame().setContent(data, mime_type, to_qurl(baseurl))
def set_user_agent(self, value):
""" Set User-Agent header for future requests """
self.http_client.set_user_agent(value)
def get_cookies(self):
""" Return a list of all cookies in the current cookiejar """
return cookies2har(self.web_page.cookiejar.allCookies())
def init_cookies(self, cookies):
""" Replace all current cookies with ``cookies`` """
self.web_page.cookiejar.init(cookies)
def clear_cookies(self):
""" Remove all cookies. Return a number of cookies deleted. """
return self.web_page.cookiejar.clear()
def delete_cookies(self, name=None, url=None):
"""
Delete cookies with name == ``name``.
If ``url`` is not None then only those cookies are deleted wihch
are to be added when a request is sent to ``url``.
Return a number of cookies deleted.
"""
return self.web_page.cookiejar.delete(name, url)
def add_cookie(self, cookie):
return self.web_page.cookiejar.add(cookie)
@property
def url(self):
""" Current URL """
return six.text_type(self.web_page.mainFrame().url().toString())
def go(self, url, callback, errback, baseurl=None, http_method='GET',
body=None, headers=None):
"""
Go to an URL. This is similar to entering an URL in
address tab and pressing Enter.
"""
self.store_har_timing("_onStarted")
if body is not None:
body = to_bytes(body)
if baseurl:
# If baseurl is used, we download the page manually,
# then set its contents to the QWebPage and let it
# download related resources and render the result.
cb = functools.partial(
self._on_baseurl_request_finished,
callback=callback,
errback=errback,
baseurl=baseurl,
url=url,
)
self.http_client.request(url,
callback=cb,
method=http_method,
body=body,
headers=headers,
follow_redirects=True,
)
else:
# if not self._goto_callbacks.isempty():
# self.logger.log("Only a single concurrent 'go' request is supported. "
# "Previous go requests will be cancelled.", min_level=1)
# # When a new URL is loaded to mainFrame an errback will
# # be called, so we're not cancelling this callback manually.
callback_id = self._load_finished.connect(
self._on_content_ready,
callback=callback,
errback=errback,
)
self.logger.log("callback %s is connected to loadFinished" % callback_id, min_level=3)
self._load_url_to_mainframe(url, http_method, body, headers=headers)
def stop_loading(self):
"""
Stop loading of the current page and all pending page
refresh/redirect requests.
"""
self.logger.log("stop_loading", min_level=2)
self.web_view.pageAction(QWebPage.StopScheduledPageRefresh)
self.web_view.stop()
def register_callback(self, event, callback):
""" Register a callback for an event """
self.web_page.callbacks[event].append(callback)
def clear_callbacks(self, event):
""" Unregister all callbacks for an event """
del self.web_page.callbacks[event][:]
# def remove_callback(self, event, callback):
# """ Unregister a callback for an event """
# self.web_page.callbacks[event].remove(callback)
@skip_if_closing
def close(self):
""" Destroy this tab """
self.logger.log("close is requested by a script", min_level=2)
self._closing = True
self._closing_normally = True
self.web_view.pageAction(QWebPage.StopScheduledPageRefresh)
self.web_view.stop()
self.web_view.close()
self.web_page.deleteLater()
self.web_view.deleteLater()
self._cancel_all_timers()
def _on_before_close(self):
# self._closing = True
# self._cancel_all_timers()
# if not self._closing_normally:
# self.return_error(Exception("Window is closed by user"))
return True # don't close the window
@skip_if_closing
def _on_load_finished(self, ok):
"""
This callback is called for all web_page.mainFrame()
loadFinished events.
"""
if self.web_page.maybe_redirect(ok):
self.logger.log("Redirect or other non-fatal error detected", min_level=2)
return
if self.web_page.is_ok(ok): # or maybe_redirect:
self.logger.log("loadFinished: ok", min_level=2)
else:
self._cancel_timers(self._timers_to_cancel_on_error)
if self.web_page.error_loading(ok):
self.logger.log("loadFinished: %s" % (str(self.web_page.error_info)), min_level=1)
else:
self.logger.log("loadFinished: unknown error", min_level=1)
def _on_baseurl_request_finished(self, callback, errback, baseurl, url):
"""
This method is called when ``baseurl`` is used and a
reply for the first request is received.
"""
self.logger.log("baseurl_request_finished", min_level=2)
reply = self.sender()
mime_type = reply.header(QNetworkRequest.ContentTypeHeader)
data = reply.readAll()
self.set_content(
data=data,
callback=callback,
errback=errback,
mime_type=mime_type,
baseurl=baseurl,
)
if reply.error():
self.logger.log("Error loading %s: %s" % (url, reply.errorString()), min_level=1)
def _load_url_to_mainframe(self, url, http_method, body=None, headers=None):
request = self.http_client.request_obj(url, headers=headers, body=body)
meth = OPERATION_QT_CONSTANTS[http_method]
if body is None: # PyQT doesn't support body=None
self.web_page.mainFrame().load(request, meth)
else:
assert isinstance(body, bytes)
self.web_page.mainFrame().load(request, meth, body)
@skip_if_closing
def _on_content_ready(self, ok, callback, errback, callback_id):
"""
This method is called when a QWebPage finishes loading its contents.
"""
if self.web_page.maybe_redirect(ok):
# XXX: It assumes loadFinished will be called again because
# redirect happens. If redirect is detected improperly,
# loadFinished won't be called again, and Splash will return
# the result only after a timeout.
return
self.logger.log("loadFinished: disconnecting callback %s" % callback_id, min_level=3)
self._load_finished.disconnect(callback_id)
if self.web_page.is_ok(ok):
callback()
elif self.web_page.error_loading(ok):
# XXX: maybe return a meaningful error page instead of generic
# error message?
errback(self.web_page.error_info)
else:
# XXX: it means ok=False. When does it happen?
errback(self.web_page.error_info)
def wait(self, time_ms, callback, onredirect=None, onerror=None):
"""
Wait for time_ms, then run callback.
If onredirect is True then the timer is cancelled if redirect happens.
If onredirect is callable then in case of redirect the timer is
cancelled and this callable is called.
If onerror is True then the timer is cancelled if a render error
happens. If onerror is callable then in case of a render error the
timer is cancelled and this callable is called.
"""
timer = QTimer()
timer.setSingleShot(True)
timer_callback = functools.partial(self._on_wait_timeout,
timer=timer,
callback=callback,
)
timer.timeout.connect(timer_callback)
self.logger.log("waiting %sms; timer %s" % (time_ms, id(timer)), min_level=2)
timer.start(time_ms)
self._active_timers.add(timer)
if onredirect:
self._timers_to_cancel_on_redirect[timer] = onredirect
if onerror:
self._timers_to_cancel_on_error[timer] = onerror
def _on_wait_timeout(self, timer, callback):
self.logger.log("wait timeout for %s" % id(timer), min_level=2)
if timer in self._active_timers:
self._active_timers.remove(timer)
self._timers_to_cancel_on_redirect.pop(timer, None)
self._timers_to_cancel_on_error.pop(timer, None)
callback()
def _cancel_timer(self, timer, errback=None):
self.logger.log("cancelling timer %s" % id(timer), min_level=2)
if timer in self._active_timers:
self._active_timers.remove(timer)
try:
timer.stop()
if callable(errback):
self.logger.log("calling timer errback", min_level=2)
errback(self.web_page.error_info)
finally:
timer.deleteLater()
def _cancel_timers(self, timers):
for timer, oncancel in list(timers.items()):
self._cancel_timer(timer, oncancel)
timers.pop(timer, None)
def _cancel_all_timers(self):
total_len = len(self._active_timers) + len(self._callback_proxies_to_cancel)
self.logger.log("cancelling %d remaining timers" % total_len, min_level=2)
for timer in list(self._active_timers):
self._cancel_timer(timer)
for callback_proxy in self._callback_proxies_to_cancel:
callback_proxy.use_up()
def _on_url_changed(self, url):
self.web_page.har.store_redirect(six.text_type(url.toString()))
self._cancel_timers(self._timers_to_cancel_on_redirect)
def run_js_file(self, filename, handle_errors=True):
"""
Load JS library from file ``filename`` to the current frame.
"""
with open(filename, 'rb') as f:
script = f.read().decode('utf-8')
self.runjs(script, handle_errors=handle_errors)
def run_js_files(self, folder, handle_errors=True):
"""
Load all JS libraries from ``folder`` folder to the current frame.
"""
for jsfile in os.listdir(folder):
if jsfile.endswith('.js'):
filename = os.path.join(folder, jsfile)
self.run_js_file(filename, handle_errors=handle_errors)
def autoload(self, js_source):
""" Execute JS code before each page load """
self._autoload_scripts.append(js_source)
def autoload_reset(self):
""" Remove all scripts scheduled for auto-loading """
self._autoload_scripts = []
def _on_javascript_window_object_cleared(self):
for script in self._autoload_scripts:
# XXX: handle_errors=False is used to execute autoload scripts
# in a global context (not inside a closure).
# One difference is how are `function foo(){}` statements handled:
# if executed globally, `foo` becomes an attribute of window;
# if executed in a closure, `foo` is a name local to this closure.
self.runjs(script, handle_errors=False)
def http_get(self, url, callback, headers=None, follow_redirects=True):
""" Send a GET request; call a callback with the reply as an argument. """
self.http_client.get(url,
callback=callback,
headers=headers,
follow_redirects=follow_redirects
)
def http_post(self, url, callback, headers=None, follow_redirects=True, body=None):
if body is not None:
body = to_bytes(body)
self.http_client.post(url,
callback=callback,
headers=headers,
follow_redirects=follow_redirects,
body=body)
def evaljs(self, js_source, handle_errors=True):
"""
Run JS code in page context and return the result.
If JavaScript exception or an syntax error happens
and :param:`handle_errors` is True then Python JsError
exception is raised.
"""
frame = self.web_page.mainFrame()
if not handle_errors:
return qt2py(frame.evaluateJavaScript(js_source))
escaped = json.dumps([js_source], ensure_ascii=False)[1:-1]
wrapped = """
(function(script_text){
try{
return {error: false, result: eval(script_text)}
}
catch(e){
return {
error: true,
errorType: e.name,
errorMessage: e.message,
errorRepr: e.toString(),
}
}
})(%(script_text)s)
""" % dict(script_text=escaped)
res = qt2py(frame.evaluateJavaScript(wrapped))
if not isinstance(res, dict):
raise JsError({
'type': "unknown",
'js_error_message': res,
'message': "unknown JS error: {!r}".format(res)
})
if res.get("error", False):
err_message = res.get('errorMessage')
err_type = res.get('errorType', '<custom JS error>')
err_repr = res.get('errorRepr', "<unknown JS error>")
if err_message is None:
err_message = err_repr
raise JsError({
'type': 'js_error',
'js_error_type': err_type,
'js_error_message': err_message,
'js_error': err_repr,
'message': "JS error: {!r}".format(err_repr)
})
return res.get("result", None)
def runjs(self, js_source, handle_errors=True):
""" Run JS code in page context and discard the result. """
# If JS code returns something, and we just discard
# the result of frame.evaluateJavaScript, then Qt still needs to build
# a result - it could be costly. So the original JS code
# is adjusted to make sure it doesn't return anything.
js_source = "%s;undefined" % js_source
self.evaljs(js_source, handle_errors=handle_errors)
def wait_for_resume(self, js_source, callback, errback, timeout):
"""
Run some Javascript asynchronously.
The JavaScript must contain a method called `main()` that accepts
one argument. The first argument will be an object with `resume()`
and `error()` methods. The code _must_ call one of these functions
before the timeout or else it will be canceled.
Note: this cleans up the JavaScript global variable that it creates,
but QT seems to notice when a JS GV is deleted and it destroys the
underlying C++ object. Therefore, we can only delete the JS GV _after_
the user's code has called us back. This should change in QT5, since
it will then be possible to specify a different object ownership
policy when calling addToJavaScriptWindowObject().
"""
frame = self.web_page.mainFrame()
script_text = json.dumps(js_source)[1:-1]
callback_proxy = OneShotCallbackProxy(self, callback, errback, timeout)
self._callback_proxies_to_cancel.add(callback_proxy)
frame.addToJavaScriptWindowObject(callback_proxy.name, callback_proxy)
wrapped = """
(function () {
try {
eval("%(script_text)s");
} catch (err) {
var main = function (splash) {
throw err;
}
}
(function () {
var returnObject = {};
var deleteCallbackLater = function () {
setTimeout(function () {delete window["%(callback_name)s"]}, 0);
}
var splash = {
'error': function (message) {
setTimeout(function () {
window["%(callback_name)s"].error(message, false);
deleteCallbackLater();
}, 0);
},
'resume': function (value) {
returnObject['value'] = value;
setTimeout(function () {
window["%(callback_name)s"].resume(returnObject);
deleteCallbackLater();
}, 0);
},
'set': function (key, value) {
returnObject[key] = value;
}
};
try {
if (typeof main === 'undefined') {
throw "wait_for_resume(): no main() function defined";
}
main(splash);
} catch (err) {
setTimeout(function () {
window["%(callback_name)s"].error(err, true);
deleteCallbackLater();
}, 0);
}
})();
})();undefined
""" % dict(script_text=script_text, callback_name=callback_proxy.name)
def cancel_callback():
callback_proxy.cancel(reason='javascript window object cleared')
self.logger.log("wait_for_resume wrapped script:\n%s" % wrapped,
min_level=3)
frame.javaScriptWindowObjectCleared.connect(cancel_callback)
frame.evaluateJavaScript(wrapped)
def store_har_timing(self, name):
self.logger.log("HAR event: %s" % name, min_level=3)
self.web_page.har.store_timing(name)
def _jsconsole_enable(self):
# TODO: add public interface or make console available by default
if self._js_console is not None:
return
self._js_console = _JavascriptConsole()
frame = self.web_page.mainFrame()
frame.addToJavaScriptWindowObject('console', self._js_console)
def _jsconsole_messages(self):
# TODO: add public interface or make console available by default
if self._js_console is None:
return []
return self._js_console.messages[:]
def html(self):
""" Return HTML of the current main frame """
self.logger.log("getting HTML", min_level=2)
frame = self.web_page.mainFrame()
result = frame.toHtml()
self.store_har_timing("_onHtmlRendered")
return result
def _get_image(self, image_format, width, height, render_all, scale_method):
old_size = self.web_page.viewportSize()
try:
if render_all:
self.logger.log("Rendering whole page contents (RENDER_ALL)",
min_level=2)
self.set_viewport('full')
renderer = QtImageRenderer(
self.web_page, self.logger, image_format,
width=width, height=height, scale_method=scale_method)
image = renderer.render_qwebpage()
finally:
if old_size != self.web_page.viewportSize():
# Let's not generate extra "set size" messages in the log.
self.web_page.setViewportSize(old_size)
self.store_har_timing("_onScreenshotPrepared")
return image
def png(self, width=None, height=None, b64=False, render_all=False,
scale_method=None):
""" Return screenshot in PNG format """
self.logger.log(
"Getting PNG: width=%s, height=%s, "
"render_all=%s, scale_method=%s" %
(width, height, render_all, scale_method), min_level=2)
image = self._get_image('PNG', width, height, render_all, scale_method)
result = image.to_png()
if b64:
result = base64.b64encode(result).decode('utf-8')
self.store_har_timing("_onPngRendered")
return result
def jpeg(self, width=None, height=None, b64=False, render_all=False,
scale_method=None, quality=None):
"""Return screenshot in JPEG format."""
self.logger.log(
"Getting JPEG: width=%s, height=%s, "
"render_all=%s, scale_method=%s, quality=%s" %
(width, height, render_all, scale_method, quality), min_level=2)
image = self._get_image('JPEG', width, height, render_all, scale_method)
result = image.to_jpeg(quality=quality)
if b64:
result = base64.b64encode(result).decode('utf-8')
self.store_har_timing("_onJpegRendered")
return result
def iframes_info(self, children=True, html=True):
""" Return information about all iframes """
self.logger.log("getting iframes", min_level=3)
frame = self.web_page.mainFrame()
result = self._frame_to_dict(frame, children, html)
self.store_har_timing("_onIframesRendered")
return result
def har(self, reset=False):
""" Return HAR information """
self.logger.log("getting HAR", min_level=3)
res = self.web_page.har.todict()
if reset:
self.har_reset()
return res
def har_reset(self):
""" Drop current HAR information """
self.logger.log("HAR information is reset", min_level=3)
return self.web_page.reset_har()
def history(self):
""" Return history of 'main' HTTP requests """
self.logger.log("getting history", min_level=3)
return self.web_page.har.get_history()
def last_http_status(self):
"""
Return HTTP status code of the currently loaded webpage
or None if it is not available.
"""
return self.web_page.har.get_last_http_status()
def _frame_to_dict(self, frame, children=True, html=True):
g = frame.geometry()
res = {
"url": six.text_type(frame.url().toString()),
"requestedUrl": six.text_type(frame.requestedUrl().toString()),
"geometry": (g.x(), g.y(), g.width(), g.height()),
"title": six.text_type(frame.title())
}
if html:
res["html"] = six.text_type(frame.toHtml())
if children:
res["childFrames"] = [
self._frame_to_dict(f, True, html)
for f in frame.childFrames()
]
res["frameName"] = six.text_type(frame.frameName())
return res
class _SplashHttpClient(QObject):
""" Wrapper class for making HTTP requests on behalf of a SplashQWebPage """
def __init__(self, web_page):
super(_SplashHttpClient, self).__init__()
self._replies = set()
self.web_page = web_page
self.network_manager = web_page.networkAccessManager()
def set_user_agent(self, value):
""" Set User-Agent header for future requests """
self.web_page.custom_user_agent = value
def request_obj(self, url, headers=None, body=None):
""" Return a QNetworkRequest object """
request = QNetworkRequest()
request.setUrl(to_qurl(url))
request.setOriginatingObject(self.web_page.mainFrame())
if headers is not None:
self.web_page.skip_custom_headers = True
self._set_request_headers(request, headers)
if body and not request.hasRawHeader(b"content-type"):
# there is POST body but no content-type
# QT will set this header, but it will complain so better to do this here
request.setRawHeader(b"content-type", b"application/x-www-form-urlencoded")
return request
def request(self, url, callback, method='GET', body=None,
headers=None, follow_redirects=True, max_redirects=5):
"""
Create a request and return a QNetworkReply object with callback
connected.
"""
cb = functools.partial(
self._on_request_finished,
callback=callback,
method=method,
body=body,
headers=headers,
follow_redirects=follow_redirects,
redirects_remaining=max_redirects,
)
return self._send_request(url, cb, method=method, body=body, headers=headers)
def get(self, url, callback, headers=None, follow_redirects=True):
""" Send a GET HTTP request; call the callback with the reply. """
cb = functools.partial(
self._return_reply,
callback=callback,
url=url,
)
self.request(url, cb, headers=headers, follow_redirects=follow_redirects)
def post(self, url, callback, headers=None, follow_redirects=True, body=None):
""" Send HTTP POST request;
"""
cb = functools.partial(self._return_reply, callback=callback, url=url)
self.request(url, cb, headers=headers, follow_redirects=follow_redirects, body=body,
method="POST")
def _send_request(self, url, callback, method='GET', body=None,
headers=None):
# XXX: The caller must ensure self._delete_reply is called in a callback.
if method.upper() not in ["POST", "GET"]:
raise NotImplementedError()
if body is not None:
assert isinstance(body, bytes)
request = self.request_obj(url, headers=headers, body=body)
if method.upper() == "POST":
reply = self.network_manager.post(request, body)
else:
reply = self.network_manager.get(request)
reply.finished.connect(callback)
self._replies.add(reply)
return reply
def _on_request_finished(self, callback, method, body, headers,
follow_redirects, redirects_remaining):
""" Handle redirects and call the callback. """
reply = self.sender()
try:
if not follow_redirects:
callback()
return
if not redirects_remaining:
callback() # XXX: should it be an error?
return
redirect_url = reply.attribute(QNetworkRequest.RedirectionTargetAttribute)
if redirect_url is None: # no redirect
callback()
return
# handle redirects after POST request
if method.upper() == "POST":
method = "GET"
body = None
redirect_url = reply.url().resolved(redirect_url)
self.request(
url=redirect_url,
callback=callback,
method=method,
body=body,
headers=headers,
follow_redirects=follow_redirects,
max_redirects=redirects_remaining-1,
)
finally:
self._delete_reply(reply)
def _return_reply(self, callback, url):
reply = self.sender()
callback(reply)
def _set_request_headers(self, request, headers):
""" Set HTTP headers for the request. """
if isinstance(headers, dict):
headers = headers.items()
for name, value in headers or []:
request.setRawHeader(to_bytes(name), to_bytes(value))
if name.lower() == 'user-agent':
self.set_user_agent(value)
def _delete_reply(self, reply):
self._replies.remove(reply)
reply.close()
reply.deleteLater()
class _JavascriptConsole(QObject):
def __init__(self, parent=None):
self.messages = []
super(_JavascriptConsole, self).__init__(parent)
@pyqtSlot(str)
def log(self, message):
self.messages.append(six.text_type(message))
class _BrowserTabLogger(object):
""" This class logs various events that happen with QWebPage """
def __init__(self, uid, verbosity):
self.uid = uid
self.verbosity = verbosity
def add_web_page(self, web_page):
frame = web_page.mainFrame()
# setup logging
if self.verbosity >= 4:
web_page.loadStarted.connect(self.on_load_started)
frame.loadFinished.connect(self.on_frame_load_finished)
frame.loadStarted.connect(self.on_frame_load_started)
frame.contentsSizeChanged.connect(self.on_contents_size_changed)
# TODO: on_repaint
if self.verbosity >= 3:
frame.javaScriptWindowObjectCleared.connect(self.on_javascript_window_object_cleared)
frame.initialLayoutCompleted.connect(self.on_initial_layout_completed)
frame.urlChanged.connect(self.on_url_changed)
def on_load_started(self):
self.log("loadStarted")
def on_frame_load_finished(self, ok):
self.log("mainFrame().LoadFinished %s" % ok)
def on_frame_load_started(self):
self.log("mainFrame().loadStarted")
@pyqtSlot('QSize')
def on_contents_size_changed(self, sz):
self.log("mainFrame().contentsSizeChanged: %s" % sz)
def on_javascript_window_object_cleared(self):
self.log("mainFrame().javaScriptWindowObjectCleared")
def on_initial_layout_completed(self):
self.log("mainFrame().initialLayoutCompleted")
def on_url_changed(self, url):
self.log("mainFrame().urlChanged %s" % qurl2ascii(url))
def log(self, message, min_level=None):
if min_level is not None and self.verbosity < min_level:
return
if isinstance(message, six.text_type):
message = message.encode('unicode-escape').decode('ascii')
message = "[%s] %s" % (self.uid, message)
log.msg(message, system='render')
class OneShotCallbackProxy(QObject):
"""
A proxy object that allows JavaScript to run Python callbacks.
This creates a JavaScript-compatible object (can be added to `window`)
that has functions `resume()` and `error()` that can be connected to
Python callbacks.
It is "one shot" because either `resume()` or `error()` should be called
exactly _once_. It raises an exception if the combined number of calls
to these methods is greater than 1.
If timeout is zero, then the timeout is disabled.
"""
def __init__(self, parent, callback, errback, timeout=0):
self.name = str(uuid.uuid1())
self._used_up = False
self._callback = callback
self._errback = errback
if timeout < 0:
raise ValueError('OneShotCallbackProxy timeout must be >= 0.')
elif timeout == 0:
self._timer = None
elif timeout > 0:
self._timer = QTimer()
self._timer.setSingleShot(True)
self._timer.timeout.connect(self._timed_out)
self._timer.start(timeout * 1000)
super(OneShotCallbackProxy, self).__init__(parent)
@pyqtSlot('QVariantMap')
def resume(self, value=None):
if self._used_up:
raise OneShotCallbackError("resume() called on a one shot"
" callback that was already used up.")
self.use_up()
self._callback(qt2py(value))
@pyqtSlot(str, bool)
def error(self, message, raise_=False):
if self._used_up:
raise OneShotCallbackError("error() called on a one shot"
" callback that was already used up.")
self.use_up()
self._errback(message, raise_)
def cancel(self, reason):
self.use_up()
self._errback("One shot callback canceled due to: %s." % reason,
raise_=False)
def _timed_out(self):
self.use_up()
self._errback("One shot callback timed out while waiting for"
" resume() or error().", raise_=False)
def use_up(self):
self._used_up = True
if self._timer is not None and self._timer.isActive():
self._timer.stop()
| bsd-3-clause |
jrutila/django-scheduler | schedule/periods.py | 5 | 14879 | from __future__ import unicode_literals
from six.moves.builtins import range
from six.moves.builtins import object
import pytz
import datetime
import calendar as standardlib_calendar
from django.conf import settings
from django.utils.translation import ugettext
from django.utils.encoding import python_2_unicode_compatible
from django.template.defaultfilters import date as date_filter
from django.utils.dates import WEEKDAYS, WEEKDAYS_ABBR
from schedule.conf.settings import SHOW_CANCELLED_OCCURRENCES
from schedule.models import Occurrence
from django.utils import timezone
weekday_names = []
weekday_abbrs = []
if settings.FIRST_DAY_OF_WEEK == 1:
# The calendar week starts on Monday
for i in range(7):
weekday_names.append(WEEKDAYS[i])
weekday_abbrs.append(WEEKDAYS_ABBR[i])
else:
# The calendar week starts on Sunday, not Monday
weekday_names.append(WEEKDAYS[6])
weekday_abbrs.append(WEEKDAYS_ABBR[6])
for i in range(6):
weekday_names.append(WEEKDAYS[i])
weekday_abbrs.append(WEEKDAYS_ABBR[i])
class Period(object):
"""
This class represents a period of time. It can return a set of occurrences
based on its events, and its time period (start and end).
"""
def __init__(self, events, start, end, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.utc_start = self._normalize_timezone_to_utc(start, tzinfo)
self.utc_end = self._normalize_timezone_to_utc(end, tzinfo)
self.events = events
self.tzinfo = self._get_tzinfo(tzinfo)
self.occurrence_pool = occurrence_pool
if parent_persisted_occurrences is not None:
self._persisted_occurrences = parent_persisted_occurrences
def _normalize_timezone_to_utc(self, point_in_time, tzinfo):
if point_in_time.tzinfo is not None:
return point_in_time.astimezone(pytz.utc)
if tzinfo is not None:
return tzinfo.localize(point_in_time).astimezone(pytz.utc)
if settings.USE_TZ:
return pytz.utc.localize(point_in_time)
else:
if timezone.is_aware(point_in_time):
return timezone.make_naive(point_in_time, pytz.utc)
else:
return point_in_time
def __eq__(self, period):
return self.utc_start == period.utc_start and self.utc_end == period.utc_end and self.events == period.events
def __ne__(self, period):
return self.utc_start != period.utc_start or self.utc_end != period.utc_end or self.events != period.events
def _get_tzinfo(self, tzinfo):
return tzinfo if settings.USE_TZ else None
def _get_sorted_occurrences(self):
occurrences = []
if hasattr(self, "occurrence_pool") and self.occurrence_pool is not None:
for occurrence in self.occurrence_pool:
if occurrence.start <= self.utc_end and occurrence.end >= self.utc_start:
occurrences.append(occurrence)
return occurrences
for event in self.events:
event_occurrences = event.get_occurrences(self.start, self.end)
occurrences += event_occurrences
return sorted(occurrences)
def cached_get_sorted_occurrences(self):
if hasattr(self, '_occurrences'):
return self._occurrences
occs = self._get_sorted_occurrences()
self._occurrences = occs
return occs
occurrences = property(cached_get_sorted_occurrences)
def get_persisted_occurrences(self):
if hasattr(self, '_persisted_occurrenes'):
return self._persisted_occurrences
else:
self._persisted_occurrences = Occurrence.objects.filter(event__in=self.events)
return self._persisted_occurrences
def classify_occurrence(self, occurrence):
if occurrence.cancelled and not SHOW_CANCELLED_OCCURRENCES:
return
if occurrence.start > self.end or occurrence.end < self.start:
return None
started = False
ended = False
if self.utc_start <= occurrence.start < self.utc_end:
started = True
if self.utc_start <= occurrence.end < self.utc_end:
ended = True
if started and ended:
return {'occurrence': occurrence, 'class': 1}
elif started:
return {'occurrence': occurrence, 'class': 0}
elif ended:
return {'occurrence': occurrence, 'class': 3}
# it existed during this period but it didn't begin or end within it
# so it must have just continued
return {'occurrence': occurrence, 'class': 2}
def get_occurrence_partials(self):
occurrence_dicts = []
for occurrence in self.occurrences:
occurrence = self.classify_occurrence(occurrence)
if occurrence:
occurrence_dicts.append(occurrence)
return occurrence_dicts
def get_occurrences(self):
return self.occurrences
def has_occurrences(self):
return any(self.classify_occurrence(o) for o in self.occurrences)
def get_time_slot(self, start, end):
if start >= self.start and end <= self.end:
return Period(self.events, start, end)
return None
def create_sub_period(self, cls, start=None, tzinfo=None):
if tzinfo is None:
tzinfo = self.tzinfo
start = start or self.start
return cls(self.events, start, self.get_persisted_occurrences(), self.occurrences, tzinfo)
def get_periods(self, cls, tzinfo=None):
if tzinfo is None:
tzinfo = self.tzinfo
period = self.create_sub_period(cls)
while period.start < self.end:
yield self.create_sub_period(cls, period.start, tzinfo)
period = next(period)
@property
def start(self):
if self.tzinfo is not None:
return self.utc_start.astimezone(self.tzinfo)
return self.utc_start.replace(tzinfo=None)
@property
def end(self):
if self.tzinfo is not None:
return self.utc_end.astimezone(self.tzinfo)
return self.utc_end.replace(tzinfo=None)
@python_2_unicode_compatible
class Year(Period):
def __init__(self, events, date=None, parent_persisted_occurrences=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_year_range(date)
super(Year, self).__init__(events, start, end, parent_persisted_occurrences, tzinfo=tzinfo)
def get_months(self):
return self.get_periods(Month)
def next_year(self):
return Year(self.events, self.end, tzinfo=self.tzinfo)
next = __next__ = next_year
def prev_year(self):
start = datetime.datetime(self.start.year - 1, self.start.month, self.start.day)
return Year(self.events, start, tzinfo=self.tzinfo)
prev = prev_year
def _get_year_range(self, year):
# If tzinfo is not none get the local start of the year and convert it to utc.
naive_start = datetime.datetime(year.year, datetime.datetime.min.month, datetime.datetime.min.day)
naive_end = datetime.datetime(year.year + 1, datetime.datetime.min.month, datetime.datetime.min.day)
start = naive_start
end = naive_end
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
return start, end
def __str__(self):
return self.start.year
@python_2_unicode_compatible
class Month(Period):
"""
The month period has functions for retrieving the week periods within this period
and day periods within the date.
"""
def __init__(self, events, date=None, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_month_range(date)
super(Month, self).__init__(events, start, end,
parent_persisted_occurrences, occurrence_pool, tzinfo=tzinfo)
def get_weeks(self):
return self.get_periods(Week)
def get_days(self):
return self.get_periods(Day)
def get_day(self, daynumber):
date = self.start
if daynumber > 1:
date += datetime.timedelta(days=daynumber - 1)
return self.create_sub_period(Day, date)
def next_month(self):
return Month(self.events, self.end, tzinfo=self.tzinfo)
next = __next__ = next_month
def prev_month(self):
start = (self.start - datetime.timedelta(days=1)).replace(day=1, tzinfo=self.tzinfo)
return Month(self.events, start, tzinfo=self.tzinfo)
prev = prev_month
def current_year(self):
return Year(self.events, self.start, tzinfo=self.tzinfo)
def prev_year(self):
start = datetime.datetime.min.replace(year=self.start.year - 1, tzinfo=self.tzinfo)
return Year(self.events, start, tzinfo=self.tzinfo)
def next_year(self):
start = datetime.datetime.min.replace(year=self.start.year + 1, tzinfo=self.tzinfo)
return Year(self.events, start, tzinfo=self.tzinfo)
def _get_month_range(self, month):
year = month.year
month = month.month
# If tzinfo is not none get the local start of the month and convert it to utc.
naive_start = datetime.datetime.min.replace(year=year, month=month)
if month == 12:
naive_end = datetime.datetime.min.replace(month=1, year=year + 1, day=1)
else:
naive_end = datetime.datetime.min.replace(month=month + 1, year=year, day=1)
start = naive_start
end = naive_end
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
return start, end
def __str__(self):
return self.name()
def name(self):
return standardlib_calendar.month_name[self.start.month]
def year(self):
return self.start.year
@python_2_unicode_compatible
class Week(Period):
"""
The Week period that has functions for retrieving Day periods within it
"""
def __init__(self, events, date=None, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_week_range(date)
super(Week, self).__init__(events, start, end,
parent_persisted_occurrences, occurrence_pool, tzinfo=tzinfo)
def prev_week(self):
return Week(self.events, self.start - datetime.timedelta(days=7), tzinfo=self.tzinfo)
prev = prev_week
def next_week(self):
return Week(self.events, self.end, tzinfo=self.tzinfo)
next = __next__ = next_week
def current_month(self):
return Month(self.events, self.start, tzinfo=self.tzinfo)
def current_year(self):
return Year(self.events, self.start, tzinfo=self.tzinfo)
def get_days(self):
return self.get_periods(Day)
def _get_week_range(self, week):
if isinstance(week, datetime.datetime):
week = week.date()
# Adjust the start datetime to midnight of the week datetime
naive_start = datetime.datetime.combine(week, datetime.time.min)
# Adjust the start datetime to Monday or Sunday of the current week
if settings.FIRST_DAY_OF_WEEK == 1:
# The week begins on Monday
sub_days = naive_start.isoweekday() - 1
else:
# The week begins on Sunday
sub_days = naive_start.isoweekday()
if sub_days == 7:
sub_days = 0
if sub_days > 0:
naive_start = naive_start - datetime.timedelta(days=sub_days)
naive_end = naive_start + datetime.timedelta(days=7)
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
else:
start = naive_start
end = naive_end
return start, end
def __str__(self):
date_format = 'l, %s' % settings.DATE_FORMAT
return ugettext('Week: %(start)s-%(end)s') % {
'start': date_filter(self.start, date_format),
'end': date_filter(self.end, date_format),
}
@python_2_unicode_compatible
class Day(Period):
def __init__(self, events, date=None, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_day_range(date)
super(Day, self).__init__(events, start, end,
parent_persisted_occurrences, occurrence_pool, tzinfo=tzinfo)
def _get_day_range(self, date):
if isinstance(date, datetime.datetime):
date = date.date()
naive_start = datetime.datetime.combine(date, datetime.time.min)
naive_end = datetime.datetime.combine(date + datetime.timedelta(days=1), datetime.time.min)
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
else:
start = naive_start
end = naive_end
return start, end
def __str__(self):
date_format = 'l, %s' % settings.DATE_FORMAT
return ugettext('Day: %(start)s-%(end)s') % {
'start': date_filter(self.start, date_format),
'end': date_filter(self.end, date_format),
}
def prev_day(self):
return Day(self.events, self.start - datetime.timedelta(days=1), tzinfo=self.tzinfo)
prev = prev_day
def next_day(self):
return Day(self.events, self.end, tzinfo=self.tzinfo)
next = __next__ = next_day
def current_year(self):
return Year(self.events, self.start, tzinfo=self.tzinfo)
def current_month(self):
return Month(self.events, self.start, tzinfo=self.tzinfo)
def current_week(self):
return Week(self.events, self.start, tzinfo=self.tzinfo)
| bsd-3-clause |
chethenry/pyOBD | obd2_codes.py | 16 | 120616 | #!/usr/bin/env python
###########################################################################
# obd_sensors.py
#
# Copyright 2004 Donour Sizemore (donour@uchicago.edu)
# Copyright 2009 Secons Ltd. (www.obdtester.com)
#
# This file is part of pyOBD.
#
# pyOBD is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pyOBD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyOBD; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
###########################################################################
pcodes = {
"P0001": "Fuel Volume Regulator Control Circuit/Open",
"P0002": "Fuel Volume Regulator Control Circuit Range/Performance",
"P0003": "Fuel Volume Regulator Control Circuit Low",
"P0004": "Fuel Volume Regulator Control Circuit High",
"P0005": "Fuel Shutoff Valve 'A' Control Circuit/Open",
"P0006": "Fuel Shutoff Valve 'A' Control Circuit Low",
"P0007": "Fuel Shutoff Valve 'A' Control Circuit High",
"P0008": "Engine Position System Performance",
"P0009": "Engine Position System Performance",
"P0010": "'A' Camshaft Position Actuator Circuit",
"P0011": "'A' Camshaft Position - Timing Over-Advanced or System Performance",
"P0012": "'A' Camshaft Position - Timing Over-Retarded",
"P0013": "'B' Camshaft Position - Actuator Circuit",
"P0014": "'B' Camshaft Position - Timing Over-Advanced or System Performance",
"P0015": "'B' Camshaft Position - Timing Over-Retarded",
"P0016": "Crankshaft Position - Camshaft Position Correlation",
"P0017": "Crankshaft Position - Camshaft Position Correlation",
"P0018": "Crankshaft Position - Camshaft Position Correlation",
"P0019": "Crankshaft Position - Camshaft Position Correlation",
"P0020": "'A' Camshaft Position Actuator Circuit",
"P0021": "'A' Camshaft Position - Timing Over-Advanced or System Performance",
"P0022": "'A' Camshaft Position - Timing Over-Retarded",
"P0023": "'B' Camshaft Position - Actuator Circuit",
"P0024": "'B' Camshaft Position - Timing Over-Advanced or System Performance",
"P0025": "'B' Camshaft Position - Timing Over-Retarded",
"P0026": "Intake Valve Control Solenoid Circuit Range/Performance",
"P0027": "Exhaust Valve Control Solenoid Circuit Range/Performance",
"P0028": "Intake Valve Control Solenoid Circuit Range/Performance",
"P0029": "Exhaust Valve Control Solenoid Circuit Range/Performance",
"P0030": "HO2S Heater Control Circuit",
"P0031": "HO2S Heater Control Circuit Low",
"P0032": "HO2S Heater Control Circuit High",
"P0033": "Turbo Charger Bypass Valve Control Circuit",
"P0034": "Turbo Charger Bypass Valve Control Circuit Low",
"P0035": "Turbo Charger Bypass Valve Control Circuit High",
"P0036": "HO2S Heater Control Circuit",
"P0037": "HO2S Heater Control Circuit Low",
"P0038": "HO2S Heater Control Circuit High",
"P0039": "Turbo/Super Charger Bypass Valve Control Circuit Range/Performance",
"P0040": "O2 Sensor Signals Swapped Bank 1 Sensor 1/ Bank 2 Sensor 1",
"P0041": "O2 Sensor Signals Swapped Bank 1 Sensor 2/ Bank 2 Sensor 2",
"P0042": "HO2S Heater Control Circuit",
"P0043": "HO2S Heater Control Circuit Low",
"P0044": "HO2S Heater Control Circuit High",
"P0045": "Turbo/Super Charger Boost Control Solenoid Circuit/Open",
"P0046": "Turbo/Super Charger Boost Control Solenoid Circuit Range/Performance",
"P0047": "Turbo/Super Charger Boost Control Solenoid Circuit Low",
"P0048": "Turbo/Super Charger Boost Control Solenoid Circuit High",
"P0049": "Turbo/Super Charger Turbine Overspeed",
"P0050": "HO2S Heater Control Circuit",
"P0051": "HO2S Heater Control Circuit Low",
"P0052": "HO2S Heater Control Circuit High",
"P0053": "HO2S Heater Resistance",
"P0054": "HO2S Heater Resistance",
"P0055": "HO2S Heater Resistance",
"P0056": "HO2S Heater Control Circuit",
"P0057": "HO2S Heater Control Circuit Low",
"P0058": "HO2S Heater Control Circuit High",
"P0059": "HO2S Heater Resistance",
"P0060": "HO2S Heater Resistance",
"P0061": "HO2S Heater Resistance",
"P0062": "HO2S Heater Control Circuit",
"P0063": "HO2S Heater Control Circuit Low",
"P0064": "HO2S Heater Control Circuit High",
"P0065": "Air Assisted Injector Control Range/Performance",
"P0066": "Air Assisted Injector Control Circuit or Circuit Low",
"P0067": "Air Assisted Injector Control Circuit High",
"P0068": "MAP/MAF - Throttle Position Correlation",
"P0069": "Manifold Absolute Pressure - Barometric Pressure Correlation",
"P0070": "Ambient Air Temperature Sensor Circuit",
"P0071": "Ambient Air Temperature Sensor Range/Performance",
"P0072": "Ambient Air Temperature Sensor Circuit Low",
"P0073": "Ambient Air Temperature Sensor Circuit High",
"P0074": "Ambient Air Temperature Sensor Circuit Intermittent",
"P0075": "Intake Valve Control Solenoid Circuit",
"P0076": "Intake Valve Control Solenoid Circuit Low",
"P0077": "Intake Valve Control Solenoid Circuit High",
"P0078": "Exhaust Valve Control Solenoid Circuit",
"P0079": "Exhaust Valve Control Solenoid Circuit Low",
"P0080": "Exhaust Valve Control Solenoid Circuit High",
"P0081": "Intake Valve Control Solenoid Circuit",
"P0082": "Intake Valve Control Solenoid Circuit Low",
"P0083": "Intake Valve Control Solenoid Circuit High",
"P0084": "Exhaust Valve Control Solenoid Circuit",
"P0085": "Exhaust Valve Control Solenoid Circuit Low",
"P0086": "Exhaust Valve Control Solenoid Circuit High",
"P0087": "Fuel Rail/System Pressure - Too Low",
"P0088": "Fuel Rail/System Pressure - Too High",
"P0089": "Fuel Pressure Regulator 1 Performance",
"P0090": "Fuel Pressure Regulator 1 Control Circuit",
"P0091": "Fuel Pressure Regulator 1 Control Circuit Low",
"P0092": "Fuel Pressure Regulator 1 Control Circuit High",
"P0093": "Fuel System Leak Detected - Large Leak",
"P0094": "Fuel System Leak Detected - Small Leak",
"P0095": "Intake Air Temperature Sensor 2 Circuit",
"P0096": "Intake Air Temperature Sensor 2 Circuit Range/Performance",
"P0097": "Intake Air Temperature Sensor 2 Circuit Low",
"P0098": "Intake Air Temperature Sensor 2 Circuit High",
"P0099": "Intake Air Temperature Sensor 2 Circuit Intermittent/Erratic",
"P0100": "Mass or Volume Air Flow Circuit",
"P0101": "Mass or Volume Air Flow Circuit Range/Performance",
"P0102": "Mass or Volume Air Flow Circuit Low Input",
"P0103": "Mass or Volume Air Flow Circuit High Input",
"P0104": "Mass or Volume Air Flow Circuit Intermittent",
"P0105": "Manifold Absolute Pressure/Barometric Pressure Circuit",
"P0106": "Manifold Absolute Pressure/Barometric Pressure Circuit Range/Performance",
"P0107": "Manifold Absolute Pressure/Barometric Pressure Circuit Low Input",
"P0108": "Manifold Absolute Pressure/Barometric Pressure Circuit High Input",
"P0109": "Manifold Absolute Pressure/Barometric Pressure Circuit Intermittent",
"P0110": "Intake Air Temperature Sensor 1 Circuit",
"P0111": "Intake Air Temperature Sensor 1 Circuit Range/Performance",
"P0112": "Intake Air Temperature Sensor 1 Circuit Low",
"P0113": "Intake Air Temperature Sensor 1 Circuit High",
"P0114": "Intake Air Temperature Sensor 1 Circuit Intermittent",
"P0115": "Engine Coolant Temperature Circuit",
"P0116": "Engine Coolant Temperature Circuit Range/Performance",
"P0117": "Engine Coolant Temperature Circuit Low",
"P0118": "Engine Coolant Temperature Circuit High",
"P0119": "Engine Coolant Temperature Circuit Intermittent",
"P0120": "Throttle/Pedal Position Sensor/Switch 'A' Circuit",
"P0121": "Throttle/Pedal Position Sensor/Switch 'A' Circuit Range/Performance",
"P0122": "Throttle/Pedal Position Sensor/Switch 'A' Circuit Low",
"P0123": "Throttle/Pedal Position Sensor/Switch 'A' Circuit High",
"P0124": "Throttle/Pedal Position Sensor/Switch 'A' Circuit Intermittent",
"P0125": "Insufficient Coolant Temperature for Closed Loop Fuel Control",
"P0126": "Insufficient Coolant Temperature for Stable Operation",
"P0127": "Intake Air Temperature Too High",
"P0128": "Coolant Thermostat (Coolant Temperature Below Thermostat Regulating Temperature)",
"P0129": "Barometric Pressure Too Low",
"P0130": "O2 Sensor Circuit",
"P0131": "O2 Sensor Circuit Low Voltage",
"P0132": "O2 Sensor Circuit High Voltage",
"P0133": "O2 Sensor Circuit Slow Response",
"P0134": "O2 Sensor Circuit No Activity Detected",
"P0135": "O2 Sensor Heater Circuit",
"P0136": "O2 Sensor Circuit",
"P0137": "O2 Sensor Circuit Low Voltage",
"P0138": "O2 Sensor Circuit High Voltage",
"P0139": "O2 Sensor Circuit Slow Response",
"P0140": "O2 Sensor Circuit No Activity Detected",
"P0141": "O2 Sensor Heater Circuit",
"P0142": "O2 Sensor Circuit",
"P0143": "O2 Sensor Circuit Low Voltage",
"P0144": "O2 Sensor Circuit High Voltage",
"P0145": "O2 Sensor Circuit Slow Response",
"P0146": "O2 Sensor Circuit No Activity Detected",
"P0147": "O2 Sensor Heater Circuit",
"P0148": "Fuel Delivery Error",
"P0149": "Fuel Timing Error",
"P0150": "O2 Sensor Circuit",
"P0151": "O2 Sensor Circuit Low Voltage",
"P0152": "O2 Sensor Circuit High Voltage",
"P0153": "O2 Sensor Circuit Slow Response",
"P0154": "O2 Sensor Circuit No Activity Detected",
"P0155": "O2 Sensor Heater Circuit",
"P0156": "O2 Sensor Circuit",
"P0157": "O2 Sensor Circuit Low Voltage",
"P0158": "O2 Sensor Circuit High Voltage",
"P0159": "O2 Sensor Circuit Slow Response",
"P0160": "O2 Sensor Circuit No Activity Detected",
"P0161": "O2 Sensor Heater Circuit",
"P0162": "O2 Sensor Circuit",
"P0163": "O2 Sensor Circuit Low Voltage",
"P0164": "O2 Sensor Circuit High Voltage",
"P0165": "O2 Sensor Circuit Slow Response",
"P0166": "O2 Sensor Circuit No Activity Detected",
"P0167": "O2 Sensor Heater Circuit",
"P0168": "Fuel Temperature Too High",
"P0169": "Incorrect Fuel Composition",
"P0170": "Fuel Trim",
"P0171": "System Too Lean",
"P0172": "System Too Rich",
"P0173": "Fuel Trim",
"P0174": "System Too Lean",
"P0175": "System Too Rich",
"P0176": "Fuel Composition Sensor Circuit",
"P0177": "Fuel Composition Sensor Circuit Range/Performance",
"P0178": "Fuel Composition Sensor Circuit Low",
"P0179": "Fuel Composition Sensor Circuit High",
"P0180": "Fuel Temperature Sensor A Circuit",
"P0181": "Fuel Temperature Sensor A Circuit Range/Performance",
"P0182": "Fuel Temperature Sensor A Circuit Low",
"P0183": "Fuel Temperature Sensor A Circuit High",
"P0184": "Fuel Temperature Sensor A Circuit Intermittent",
"P0185": "Fuel Temperature Sensor B Circuit",
"P0186": "Fuel Temperature Sensor B Circuit Range/Performance",
"P0187": "Fuel Temperature Sensor B Circuit Low",
"P0188": "Fuel Temperature Sensor B Circuit High",
"P0189": "Fuel Temperature Sensor B Circuit Intermittent",
"P0190": "Fuel Rail Pressure Sensor Circuit",
"P0191": "Fuel Rail Pressure Sensor Circuit Range/Performance",
"P0192": "Fuel Rail Pressure Sensor Circuit Low",
"P0193": "Fuel Rail Pressure Sensor Circuit High",
"P0194": "Fuel Rail Pressure Sensor Circuit Intermittent",
"P0195": "Engine Oil Temperature Sensor",
"P0196": "Engine Oil Temperature Sensor Range/Performance",
"P0197": "Engine Oil Temperature Sensor Low",
"P0198": "Engine Oil Temperature Sensor High",
"P0199": "Engine Oil Temperature Sensor Intermittent",
"P0200": "Injector Circuit/Open",
"P0201": "Injector Circuit/Open - Cylinder 1",
"P0202": "Injector Circuit/Open - Cylinder 2",
"P0203": "Injector Circuit/Open - Cylinder 3",
"P0204": "Injector Circuit/Open - Cylinder 4",
"P0205": "Injector Circuit/Open - Cylinder 5",
"P0206": "Injector Circuit/Open - Cylinder 6",
"P0207": "Injector Circuit/Open - Cylinder 7",
"P0208": "Injector Circuit/Open - Cylinder 8",
"P0209": "Injector Circuit/Open - Cylinder 9",
"P0210": "Injector Circuit/Open - Cylinder 10",
"P0211": "Injector Circuit/Open - Cylinder 11",
"P0212": "Injector Circuit/Open - Cylinder 12",
"P0213": "Cold Start Injector 1",
"P0214": "Cold Start Injector 2",
"P0215": "Engine Shutoff Solenoid",
"P0216": "Injector/Injection Timing Control Circuit",
"P0217": "Engine Coolant Over Temperature Condition",
"P0218": "Transmission Fluid Over Temperature Condition",
"P0219": "Engine Overspeed Condition",
"P0220": "Throttle/Pedal Position Sensor/Switch 'B' Circuit",
"P0221": "Throttle/Pedal Position Sensor/Switch 'B' Circuit Range/Performance",
"P0222": "Throttle/Pedal Position Sensor/Switch 'B' Circuit Low",
"P0223": "Throttle/Pedal Position Sensor/Switch 'B' Circuit High",
"P0224": "Throttle/Pedal Position Sensor/Switch 'B' Circuit Intermittent",
"P0225": "Throttle/Pedal Position Sensor/Switch 'C' Circuit",
"P0226": "Throttle/Pedal Position Sensor/Switch 'C' Circuit Range/Performance",
"P0227": "Throttle/Pedal Position Sensor/Switch 'C' Circuit Low",
"P0228": "Throttle/Pedal Position Sensor/Switch 'C' Circuit High",
"P0229": "Throttle/Pedal Position Sensor/Switch 'C' Circuit Intermittent",
"P0230": "Fuel Pump Primary Circuit",
"P0231": "Fuel Pump Secondary Circuit Low",
"P0232": "Fuel Pump Secondary Circuit High",
"P0233": "Fuel Pump Secondary Circuit Intermittent",
"P0234": "Turbo/Super Charger Overboost Condition",
"P0235": "Turbo/Super Charger Boost Sensor 'A' Circuit",
"P0236": "Turbo/Super Charger Boost Sensor 'A' Circuit Range/Performance",
"P0237": "Turbo/Super Charger Boost Sensor 'A' Circuit Low",
"P0238": "Turbo/Super Charger Boost Sensor 'A' Circuit High",
"P0239": "Turbo/Super Charger Boost Sensor 'B' Circuit",
"P0240": "Turbo/Super Charger Boost Sensor 'B' Circuit Range/Performance",
"P0241": "Turbo/Super Charger Boost Sensor 'B' Circuit Low",
"P0242": "Turbo/Super Charger Boost Sensor 'B' Circuit High",
"P0243": "Turbo/Super Charger Wastegate Solenoid 'A'",
"P0244": "Turbo/Super Charger Wastegate Solenoid 'A' Range/Performance",
"P0245": "Turbo/Super Charger Wastegate Solenoid 'A' Low",
"P0246": "Turbo/Super Charger Wastegate Solenoid 'A' High",
"P0247": "Turbo/Super Charger Wastegate Solenoid 'B'",
"P0248": "Turbo/Super Charger Wastegate Solenoid 'B' Range/Performance",
"P0249": "Turbo/Super Charger Wastegate Solenoid 'B' Low",
"P0250": "Turbo/Super Charger Wastegate Solenoid 'B' High",
"P0251": "Injection Pump Fuel Metering Control 'A' (Cam/Rotor/Injector)",
"P0252": "Injection Pump Fuel Metering Control 'A' Range/Performance (Cam/Rotor/Injector)",
"P0253": "Injection Pump Fuel Metering Control 'A' Low (Cam/Rotor/Injector)",
"P0254": "Injection Pump Fuel Metering Control 'A' High (Cam/Rotor/Injector)",
"P0255": "Injection Pump Fuel Metering Control 'A' Intermittent (Cam/Rotor/Injector)",
"P0256": "Injection Pump Fuel Metering Control 'B' (Cam/Rotor/Injector)",
"P0257": "Injection Pump Fuel Metering Control 'B' Range/Performance (Cam/Rotor/Injector)",
"P0258": "Injection Pump Fuel Metering Control 'B' Low (Cam/Rotor/Injector)",
"P0259": "Injection Pump Fuel Metering Control 'B' High (Cam/Rotor/Injector)",
"P0260": "Injection Pump Fuel Metering Control 'B' Intermittent (Cam/Rotor/Injector)",
"P0261": "Cylinder 1 Injector Circuit Low",
"P0262": "Cylinder 1 Injector Circuit High",
"P0263": "Cylinder 1 Contribution/Balance",
"P0264": "Cylinder 2 Injector Circuit Low",
"P0265": "Cylinder 2 Injector Circuit High",
"P0266": "Cylinder 2 Contribution/Balance",
"P0267": "Cylinder 3 Injector Circuit Low",
"P0268": "Cylinder 3 Injector Circuit High",
"P0269": "Cylinder 3 Contribution/Balance",
"P0270": "Cylinder 4 Injector Circuit Low",
"P0271": "Cylinder 4 Injector Circuit High",
"P0272": "Cylinder 4 Contribution/Balance",
"P0273": "Cylinder 5 Injector Circuit Low",
"P0274": "Cylinder 5 Injector Circuit High",
"P0275": "Cylinder 5 Contribution/Balance",
"P0276": "Cylinder 6 Injector Circuit Low",
"P0277": "Cylinder 6 Injector Circuit High",
"P0278": "Cylinder 6 Contribution/Balance",
"P0279": "Cylinder 7 Injector Circuit Low",
"P0280": "Cylinder 7 Injector Circuit High",
"P0281": "Cylinder 7 Contribution/Balance",
"P0282": "Cylinder 8 Injector Circuit Low",
"P0283": "Cylinder 8 Injector Circuit High",
"P0284": "Cylinder 8 Contribution/Balance",
"P0285": "Cylinder 9 Injector Circuit Low",
"P0286": "Cylinder 9 Injector Circuit High",
"P0287": "Cylinder 9 Contribution/Balance",
"P0288": "Cylinder 10 Injector Circuit Low",
"P0289": "Cylinder 10 Injector Circuit High",
"P0290": "Cylinder 10 Contribution/Balance",
"P0291": "Cylinder 11 Injector Circuit Low",
"P0292": "Cylinder 11 Injector Circuit High",
"P0293": "Cylinder 11 Contribution/Balance",
"P0294": "Cylinder 12 Injector Circuit Low",
"P0295": "Cylinder 12 Injector Circuit High",
"P0296": "Cylinder 12 Contribution/Balance",
"P0297": "Vehicle Overspeed Condition",
"P0298": "Engine Oil Over Temperature",
"P0299": "Turbo/Super Charger Underboost",
"P0300": "Random/Multiple Cylinder Misfire Detected",
"P0301": "Cylinder 1 Misfire Detected",
"P0302": "Cylinder 2 Misfire Detected",
"P0303": "Cylinder 3 Misfire Detected",
"P0304": "Cylinder 4 Misfire Detected",
"P0305": "Cylinder 5 Misfire Detected",
"P0306": "Cylinder 6 Misfire Detected",
"P0307": "Cylinder 7 Misfire Detected",
"P0308": "Cylinder 8 Misfire Detected",
"P0309": "Cylinder 9 Misfire Detected",
"P0310": "Cylinder 10 Misfire Detected",
"P0311": "Cylinder 11 Misfire Detected",
"P0312": "Cylinder 12 Misfire Detected",
"P0313": "Misfire Detected with Low Fuel",
"P0314": "Single Cylinder Misfire (Cylinder not Specified)",
"P0315": "Crankshaft Position System Variation Not Learned",
"P0316": "Engine Misfire Detected on Startup (First 1000 Revolutions)",
"P0317": "Rough Road Hardware Not Present",
"P0318": "Rough Road Sensor 'A' Signal Circuit",
"P0319": "Rough Road Sensor 'B'",
"P0320": "Ignition/Distributor Engine Speed Input Circuit",
"P0321": "Ignition/Distributor Engine Speed Input Circuit Range/Performance",
"P0322": "Ignition/Distributor Engine Speed Input Circuit No Signal",
"P0323": "Ignition/Distributor Engine Speed Input Circuit Intermittent",
"P0324": "Knock Control System Error",
"P0325": "Knock Sensor 1 Circuit",
"P0326": "Knock Sensor 1 Circuit Range/Performance",
"P0327": "Knock Sensor 1 Circuit Low",
"P0328": "Knock Sensor 1 Circuit High",
"P0329": "Knock Sensor 1 Circuit Input Intermittent",
"P0330": "Knock Sensor 2 Circuit",
"P0331": "Knock Sensor 2 Circuit Range/Performance",
"P0332": "Knock Sensor 2 Circuit Low",
"P0333": "Knock Sensor 2 Circuit High",
"P0334": "Knock Sensor 2 Circuit Input Intermittent",
"P0335": "Crankshaft Position Sensor 'A' Circuit",
"P0336": "Crankshaft Position Sensor 'A' Circuit Range/Performance",
"P0337": "Crankshaft Position Sensor 'A' Circuit Low",
"P0338": "Crankshaft Position Sensor 'A' Circuit High",
"P0339": "Crankshaft Position Sensor 'A' Circuit Intermittent",
"P0340": "Camshaft Position Sensor 'A' Circuit",
"P0341": "Camshaft Position Sensor 'A' Circuit Range/Performance",
"P0342": "Camshaft Position Sensor 'A' Circuit Low",
"P0343": "Camshaft Position Sensor 'A' Circuit High",
"P0344": "Camshaft Position Sensor 'A' Circuit Intermittent",
"P0345": "Camshaft Position Sensor 'A' Circuit",
"P0346": "Camshaft Position Sensor 'A' Circuit Range/Performance",
"P0347": "Camshaft Position Sensor 'A' Circuit Low",
"P0348": "Camshaft Position Sensor 'A' Circuit High",
"P0349": "Camshaft Position Sensor 'A' Circuit Intermittent",
"P0350": "Ignition Coil Primary/Secondary Circuit",
"P0351": "Ignition Coil 'A' Primary/Secondary Circuit",
"P0352": "Ignition Coil 'B' Primary/Secondary Circuit",
"P0353": "Ignition Coil 'C' Primary/Secondary Circuit",
"P0354": "Ignition Coil 'D' Primary/Secondary Circuit",
"P0355": "Ignition Coil 'E' Primary/Secondary Circuit",
"P0356": "Ignition Coil 'F' Primary/Secondary Circuit",
"P0357": "Ignition Coil 'G' Primary/Secondary Circuit",
"P0358": "Ignition Coil 'H' Primary/Secondary Circuit",
"P0359": "Ignition Coil 'I' Primary/Secondary Circuit",
"P0360": "Ignition Coil 'J' Primary/Secondary Circuit",
"P0361": "Ignition Coil 'K' Primary/Secondary Circuit",
"P0362": "Ignition Coil 'L' Primary/Secondary Circuit",
"P0363": "Misfire Detected - Fueling Disabled",
"P0364": "Reserved",
"P0365": "Camshaft Position Sensor 'B' Circuit",
"P0366": "Camshaft Position Sensor 'B' Circuit Range/Performance",
"P0367": "Camshaft Position Sensor 'B' Circuit Low",
"P0368": "Camshaft Position Sensor 'B' Circuit High",
"P0369": "Camshaft Position Sensor 'B' Circuit Intermittent",
"P0370": "Timing Reference High Resolution Signal 'A'",
"P0371": "Timing Reference High Resolution Signal 'A' Too Many Pulses",
"P0372": "Timing Reference High Resolution Signal 'A' Too Few Pulses",
"P0373": "Timing Reference High Resolution Signal 'A' Intermittent/Erratic Pulses",
"P0374": "Timing Reference High Resolution Signal 'A' No Pulse",
"P0375": "Timing Reference High Resolution Signal 'B'",
"P0376": "Timing Reference High Resolution Signal 'B' Too Many Pulses",
"P0377": "Timing Reference High Resolution Signal 'B' Too Few Pulses",
"P0378": "Timing Reference High Resolution Signal 'B' Intermittent/Erratic Pulses",
"P0379": "Timing Reference High Resolution Signal 'B' No Pulses",
"P0380": "Glow Plug/Heater Circuit 'A'",
"P0381": "Glow Plug/Heater Indicator Circuit",
"P0382": "Glow Plug/Heater Circuit 'B'",
"P0383": "Reserved by SAE J2012",
"P0384": "Reserved by SAE J2012",
"P0385": "Crankshaft Position Sensor 'B' Circuit",
"P0386": "Crankshaft Position Sensor 'B' Circuit Range/Performance",
"P0387": "Crankshaft Position Sensor 'B' Circuit Low",
"P0388": "Crankshaft Position Sensor 'B' Circuit High",
"P0389": "Crankshaft Position Sensor 'B' Circuit Intermittent",
"P0390": "Camshaft Position Sensor 'B' Circuit",
"P0391": "Camshaft Position Sensor 'B' Circuit Range/Performance",
"P0392": "Camshaft Position Sensor 'B' Circuit Low",
"P0393": "Camshaft Position Sensor 'B' Circuit High",
"P0394": "Camshaft Position Sensor 'B' Circuit Intermittent",
"P0400": "Exhaust Gas Recirculation Flow",
"P0401": "Exhaust Gas Recirculation Flow Insufficient Detected",
"P0402": "Exhaust Gas Recirculation Flow Excessive Detected",
"P0403": "Exhaust Gas Recirculation Control Circuit",
"P0404": "Exhaust Gas Recirculation Control Circuit Range/Performance",
"P0405": "Exhaust Gas Recirculation Sensor 'A' Circuit Low",
"P0406": "Exhaust Gas Recirculation Sensor 'A' Circuit High",
"P0407": "Exhaust Gas Recirculation Sensor 'B' Circuit Low",
"P0408": "Exhaust Gas Recirculation Sensor 'B' Circuit High",
"P0409": "Exhaust Gas Recirculation Sensor 'A' Circuit",
"P0410": "Secondary Air Injection System",
"P0411": "Secondary Air Injection System Incorrect Flow Detected",
"P0412": "Secondary Air Injection System Switching Valve 'A' Circuit",
"P0413": "Secondary Air Injection System Switching Valve 'A' Circuit Open",
"P0414": "Secondary Air Injection System Switching Valve 'A' Circuit Shorted",
"P0415": "Secondary Air Injection System Switching Valve 'B' Circuit",
"P0416": "Secondary Air Injection System Switching Valve 'B' Circuit Open",
"P0417": "Secondary Air Injection System Switching Valve 'B' Circuit Shorted",
"P0418": "Secondary Air Injection System Control 'A' Circuit",
"P0419": "Secondary Air Injection System Control 'B' Circuit",
"P0420": "Catalyst System Efficiency Below Threshold",
"P0421": "Warm Up Catalyst Efficiency Below Threshold",
"P0422": "Main Catalyst Efficiency Below Threshold",
"P0423": "Heated Catalyst Efficiency Below Threshold",
"P0424": "Heated Catalyst Temperature Below Threshold",
"P0425": "Catalyst Temperature Sensor",
"P0426": "Catalyst Temperature Sensor Range/Performance",
"P0427": "Catalyst Temperature Sensor Low",
"P0428": "Catalyst Temperature Sensor High",
"P0429": "Catalyst Heater Control Circuit",
"P0430": "Catalyst System Efficiency Below Threshold",
"P0431": "Warm Up Catalyst Efficiency Below Threshold",
"P0432": "Main Catalyst Efficiency Below Threshold",
"P0433": "Heated Catalyst Efficiency Below Threshold",
"P0434": "Heated Catalyst Temperature Below Threshold",
"P0435": "Catalyst Temperature Sensor",
"P0436": "Catalyst Temperature Sensor Range/Performance",
"P0437": "Catalyst Temperature Sensor Low",
"P0438": "Catalyst Temperature Sensor High",
"P0439": "Catalyst Heater Control Circuit",
"P0440": "Evaporative Emission System",
"P0441": "Evaporative Emission System Incorrect Purge Flow",
"P0442": "Evaporative Emission System Leak Detected (small leak)",
"P0443": "Evaporative Emission System Purge Control Valve Circuit",
"P0444": "Evaporative Emission System Purge Control Valve Circuit Open",
"P0445": "Evaporative Emission System Purge Control Valve Circuit Shorted",
"P0446": "Evaporative Emission System Vent Control Circuit",
"P0447": "Evaporative Emission System Vent Control Circuit Open",
"P0448": "Evaporative Emission System Vent Control Circuit Shorted",
"P0449": "Evaporative Emission System Vent Valve/Solenoid Circuit",
"P0450": "Evaporative Emission System Pressure Sensor/Switch",
"P0451": "Evaporative Emission System Pressure Sensor/Switch Range/Performance",
"P0452": "Evaporative Emission System Pressure Sensor/Switch Low",
"P0453": "Evaporative Emission System Pressure Sensor/Switch High",
"P0454": "Evaporative Emission System Pressure Sensor/Switch Intermittent",
"P0455": "Evaporative Emission System Leak Detected (large leak)",
"P0456": "Evaporative Emission System Leak Detected (very small leak)",
"P0457": "Evaporative Emission System Leak Detected (fuel cap loose/off)",
"P0458": "Evaporative Emission System Purge Control Valve Circuit Low",
"P0459": "Evaporative Emission System Purge Control Valve Circuit High",
"P0460": "Fuel Level Sensor 'A' Circuit",
"P0461": "Fuel Level Sensor 'A' Circuit Range/Performance",
"P0462": "Fuel Level Sensor 'A' Circuit Low",
"P0463": "Fuel Level Sensor 'A' Circuit High",
"P0464": "Fuel Level Sensor 'A' Circuit Intermittent",
"P0465": "EVAP Purge Flow Sensor Circuit",
"P0466": "EVAP Purge Flow Sensor Circuit Range/Performance",
"P0467": "EVAP Purge Flow Sensor Circuit Low",
"P0468": "EVAP Purge Flow Sensor Circuit High",
"P0469": "EVAP Purge Flow Sensor Circuit Intermittent",
"P0470": "Exhaust Pressure Sensor",
"P0471": "Exhaust Pressure Sensor Range/Performance",
"P0472": "Exhaust Pressure Sensor Low",
"P0473": "Exhaust Pressure Sensor High",
"P0474": "Exhaust Pressure Sensor Intermittent",
"P0475": "Exhaust Pressure Control Valve",
"P0476": "Exhaust Pressure Control Valve Range/Performance",
"P0477": "Exhaust Pressure Control Valve Low",
"P0478": "Exhaust Pressure Control Valve High",
"P0479": "Exhaust Pressure Control Valve Intermittent",
"P0480": "Fan 1 Control Circuit",
"P0481": "Fan 2 Control Circuit",
"P0482": "Fan 3 Control Circuit",
"P0483": "Fan Rationality Check",
"P0484": "Fan Circuit Over Current",
"P0485": "Fan Power/Ground Circuit",
"P0486": "Exhaust Gas Recirculation Sensor 'B' Circuit",
"P0487": "Exhaust Gas Recirculation Throttle Position Control Circuit",
"P0488": "Exhaust Gas Recirculation Throttle Position Control Range/Performance",
"P0489": "Exhaust Gas Recirculation Control Circuit Low",
"P0490": "Exhaust Gas Recirculation Control Circuit High",
"P0491": "Secondary Air Injection System Insufficient Flow",
"P0492": "Secondary Air Injection System Insufficient Flow",
"P0493": "Fan Overspeed",
"P0494": "Fan Speed Low",
"P0495": "Fan Speed High",
"P0496": "Evaporative Emission System High Purge Flow",
"P0497": "Evaporative Emission System Low Purge Flow",
"P0498": "Evaporative Emission System Vent Valve Control Circuit Low",
"P0499": "Evaporative Emission System Vent Valve Control Circuit High",
"P0500": "Vehicle Speed Sensor 'A'",
"P0501": "Vehicle Speed Sensor 'A' Range/Performance",
"P0502": "Vehicle Speed Sensor 'A' Circuit Low Input",
"P0503": "Vehicle Speed Sensor 'A' Intermittent/Erratic/High",
"P0504": "Brake Switch 'A'/'B' Correlation",
"P0505": "Idle Air Control System",
"P0506": "Idle Air Control System RPM Lower Than Expected",
"P0507": "Idle Air Control System RPM Higher Than Expected",
"P0508": "Idle Air Control System Circuit Low",
"P0509": "Idle Air Control System Circuit High",
"P0510": "Closed Throttle Position Switch",
"P0511": "Idle Air Control Circuit",
"P0512": "Starter Request Circuit",
"P0513": "Incorrect Immobilizer Key",
"P0514": "Battery Temperature Sensor Circuit Range/Performance",
"P0515": "Battery Temperature Sensor Circuit",
"P0516": "Battery Temperature Sensor Circuit Low",
"P0517": "Battery Temperature Sensor Circuit High",
"P0518": "Idle Air Control Circuit Intermittent",
"P0519": "Idle Air Control System Performance",
"P0520": "Engine Oil Pressure Sensor/Switch Circuit",
"P0521": "Engine Oil Pressure Sensor/Switch Range/Performance",
"P0522": "Engine Oil Pressure Sensor/Switch Low Voltage",
"P0523": "Engine Oil Pressure Sensor/Switch High Voltage",
"P0524": "Engine Oil Pressure Too Low",
"P0525": "Cruise Control Servo Control Circuit Range/Performance",
"P0526": "Fan Speed Sensor Circuit",
"P0527": "Fan Speed Sensor Circuit Range/Performance",
"P0528": "Fan Speed Sensor Circuit No Signal",
"P0529": "Fan Speed Sensor Circuit Intermittent",
"P0530": "A/C Refrigerant Pressure Sensor 'A' Circuit",
"P0531": "A/C Refrigerant Pressure Sensor 'A' Circuit Range/Performance",
"P0532": "A/C Refrigerant Pressure Sensor 'A' Circuit Low",
"P0533": "A/C Refrigerant Pressure Sensor 'A' Circuit High",
"P0534": "Air Conditioner Refrigerant Charge Loss",
"P0535": "A/C Evaporator Temperature Sensor Circuit",
"P0536": "A/C Evaporator Temperature Sensor Circuit Range/Performance",
"P0537": "A/C Evaporator Temperature Sensor Circuit Low",
"P0538": "A/C Evaporator Temperature Sensor Circuit High",
"P0539": "A/C Evaporator Temperature Sensor Circuit Intermittent",
"P0540": "Intake Air Heater 'A' Circuit",
"P0541": "Intake Air Heater 'A' Circuit Low",
"P0542": "Intake Air Heater 'A' Circuit High",
"P0543": "Intake Air Heater 'A' Circuit Open",
"P0544": "Exhaust Gas Temperature Sensor Circuit",
"P0545": "Exhaust Gas Temperature Sensor Circuit Low",
"P0546": "Exhaust Gas Temperature Sensor Circuit High",
"P0547": "Exhaust Gas Temperature Sensor Circuit",
"P0548": "Exhaust Gas Temperature Sensor Circuit Low",
"P0549": "Exhaust Gas Temperature Sensor Circuit High",
"P0550": "Power Steering Pressure Sensor/Switch Circuit",
"P0551": "Power Steering Pressure Sensor/Switch Circuit Range/Performance",
"P0552": "Power Steering Pressure Sensor/Switch Circuit Low Input",
"P0553": "Power Steering Pressure Sensor/Switch Circuit High Input",
"P0554": "Power Steering Pressure Sensor/Switch Circuit Intermittent",
"P0555": "Brake Booster Pressure Sensor Circuit",
"P0556": "Brake Booster Pressure Sensor Circuit Range/Performance",
"P0557": "Brake Booster Pressure Sensor Circuit Low Input",
"P0558": "Brake Booster Pressure Sensor Circuit High Input",
"P0559": "Brake Booster Pressure Sensor Circuit Intermittent",
"P0560": "System Voltage",
"P0561": "System Voltage Unstable",
"P0562": "System Voltage Low",
"P0563": "System Voltage High",
"P0564": "Cruise Control Multi-Function Input 'A' Circuit",
"P0565": "Cruise Control On Signal",
"P0566": "Cruise Control Off Signal",
"P0567": "Cruise Control Resume Signal",
"P0568": "Cruise Control Set Signal",
"P0569": "Cruise Control Coast Signal",
"P0570": "Cruise Control Accelerate Signal",
"P0571": "Brake Switch 'A' Circuit",
"P0572": "Brake Switch 'A' Circuit Low",
"P0573": "Brake Switch 'A' Circuit High",
"P0574": "Cruise Control System - Vehicle Speed Too High",
"P0575": "Cruise Control Input Circuit",
"P0576": "Cruise Control Input Circuit Low",
"P0577": "Cruise Control Input Circuit High",
"P0578": "Cruise Control Multi-Function Input 'A' Circuit Stuck",
"P0579": "Cruise Control Multi-Function Input 'A' Circuit Range/Performance",
"P0580": "Cruise Control Multi-Function Input 'A' Circuit Low",
"P0581": "Cruise Control Multi-Function Input 'A' Circuit High",
"P0582": "Cruise Control Vacuum Control Circuit/Open",
"P0583": "Cruise Control Vacuum Control Circuit Low",
"P0584": "Cruise Control Vacuum Control Circuit High",
"P0585": "Cruise Control Multi-Function Input 'A'/'B' Correlation",
"P0586": "Cruise Control Vent Control Circuit/Open",
"P0587": "Cruise Control Vent Control Circuit Low",
"P0588": "Cruise Control Vent Control Circuit High",
"P0589": "Cruise Control Multi-Function Input 'B' Circuit",
"P0590": "Cruise Control Multi-Function Input 'B' Circuit Stuck",
"P0591": "Cruise Control Multi-Function Input 'B' Circuit Range/Performance",
"P0592": "Cruise Control Multi-Function Input 'B' Circuit Low",
"P0593": "Cruise Control Multi-Function Input 'B' Circuit High",
"P0594": "Cruise Control Servo Control Circuit/Open",
"P0595": "Cruise Control Servo Control Circuit Low",
"P0596": "Cruise Control Servo Control Circuit High",
"P0597": "Thermostat Heater Control Circuit/Open",
"P0598": "Thermostat Heater Control Circuit Low",
"P0599": "Thermostat Heater Control Circuit High",
"P0600": "Serial Communication Link",
"P0601": "Internal Control Module Memory Check Sum Error",
"P0602": "Control Module Programming Error",
"P0603": "Internal Control Module Keep Alive Memory (KAM) Error",
"P0604": "Internal Control Module Random Access Memory (RAM) Error",
"P0605": "Internal Control Module Read Only Memory (ROM) Error",
"P0606": "ECM/PCM Processor",
"P0607": "Control Module Performance",
"P0608": "Control Module VSS Output 'A'",
"P0609": "Control Module VSS Output 'B'",
"P0610": "Control Module Vehicle Options Error",
"P0611": "Fuel Injector Control Module Performance",
"P0612": "Fuel Injector Control Module Relay Control",
"P0613": "TCM Processor",
"P0614": "ECM / TCM Incompatible",
"P0615": "Starter Relay Circuit",
"P0616": "Starter Relay Circuit Low",
"P0617": "Starter Relay Circuit High",
"P0618": "Alternative Fuel Control Module KAM Error",
"P0619": "Alternative Fuel Control Module RAM/ROM Error",
"P0620": "Generator Control Circuit",
"P0621": "Generator Lamp/L Terminal Circuit",
"P0622": "Generator Field/F Terminal Circuit",
"P0623": "Generator Lamp Control Circuit",
"P0624": "Fuel Cap Lamp Control Circuit",
"P0625": "Generator Field/F Terminal Circuit Low",
"P0626": "Generator Field/F Terminal Circuit High",
"P0627": "Fuel Pump 'A' Control Circuit /Open",
"P0628": "Fuel Pump 'A' Control Circuit Low",
"P0629": "Fuel Pump 'A' Control Circuit High",
"P0630": "VIN Not Programmed or Incompatible - ECM/PCM",
"P0631": "VIN Not Programmed or Incompatible - TCM",
"P0632": "Odometer Not Programmed - ECM/PCM",
"P0633": "Immobilizer Key Not Programmed - ECM/PCM",
"P0634": "PCM/ECM/TCM Internal Temperature Too High",
"P0635": "Power Steering Control Circuit",
"P0636": "Power Steering Control Circuit Low",
"P0637": "Power Steering Control Circuit High",
"P0638": "Throttle Actuator Control Range/Performance",
"P0639": "Throttle Actuator Control Range/Performance",
"P0640": "Intake Air Heater Control Circuit",
"P0641": "Sensor Reference Voltage 'A' Circuit/Open",
"P0642": "Sensor Reference Voltage 'A' Circuit Low",
"P0643": "Sensor Reference Voltage 'A' Circuit High",
"P0644": "Driver Display Serial Communication Circuit",
"P0645": "A/C Clutch Relay Control Circuit",
"P0646": "A/C Clutch Relay Control Circuit Low",
"P0647": "A/C Clutch Relay Control Circuit High",
"P0648": "Immobilizer Lamp Control Circuit",
"P0649": "Speed Control Lamp Control Circuit",
"P0650": "Malfunction Indicator Lamp (MIL) Control Circuit",
"P0651": "Sensor Reference Voltage 'B' Circuit/Open",
"P0652": "Sensor Reference Voltage 'B' Circuit Low",
"P0653": "Sensor Reference Voltage 'B' Circuit High",
"P0654": "Engine RPM Output Circuit",
"P0655": "Engine Hot Lamp Output Control Circuit",
"P0656": "Fuel Level Output Circuit",
"P0657": "Actuator Supply Voltage 'A' Circuit/Open",
"P0658": "Actuator Supply Voltage 'A' Circuit Low",
"P0659": "Actuator Supply Voltage 'A' Circuit High",
"P0660": "Intake Manifold Tuning Valve Control Circuit/Open",
"P0661": "Intake Manifold Tuning Valve Control Circuit Low",
"P0662": "Intake Manifold Tuning Valve Control Circuit High",
"P0663": "Intake Manifold Tuning Valve Control Circuit/Open",
"P0664": "Intake Manifold Tuning Valve Control Circuit Low",
"P0665": "Intake Manifold Tuning Valve Control Circuit High",
"P0666": "PCM/ECM/TCM Internal Temperature Sensor Circuit",
"P0667": "PCM/ECM/TCM Internal Temperature Sensor Range/Performance",
"P0668": "PCM/ECM/TCM Internal Temperature Sensor Circuit Low",
"P0669": "PCM/ECM/TCM Internal Temperature Sensor Circuit High",
"P0670": "Glow Plug Module Control Circuit",
"P0671": "Cylinder 1 Glow Plug Circuit",
"P0672": "Cylinder 2 Glow Plug Circuit",
"P0673": "Cylinder 3 Glow Plug Circuit",
"P0674": "Cylinder 4 Glow Plug Circuit",
"P0675": "Cylinder 5 Glow Plug Circuit",
"P0676": "Cylinder 6 Glow Plug Circuit",
"P0677": "Cylinder 7 Glow Plug Circuit",
"P0678": "Cylinder 8 Glow Plug Circuit",
"P0679": "Cylinder 9 Glow Plug Circuit",
"P0680": "Cylinder 10 Glow Plug Circuit",
"P0681": "Cylinder 11 Glow Plug Circuit",
"P0682": "Cylinder 12 Glow Plug Circuit",
"P0683": "Glow Plug Control Module to PCM Communication Circuit",
"P0684": "Glow Plug Control Module to PCM Communication Circuit Range/Performance",
"P0685": "ECM/PCM Power Relay Control Circuit /Open",
"P0686": "ECM/PCM Power Relay Control Circuit Low",
"P0687": "ECM/PCM Power Relay Control Circuit High",
"P0688": "ECM/PCM Power Relay Sense Circuit /Open",
"P0689": "ECM/PCM Power Relay Sense Circuit Low",
"P0690": "ECM/PCM Power Relay Sense Circuit High",
"P0691": "Fan 1 Control Circuit Low",
"P0692": "Fan 1 Control Circuit High",
"P0693": "Fan 2 Control Circuit Low",
"P0694": "Fan 2 Control Circuit High",
"P0695": "Fan 3 Control Circuit Low",
"P0696": "Fan 3 Control Circuit High",
"P0697": "Sensor Reference Voltage 'C' Circuit/Open",
"P0698": "Sensor Reference Voltage 'C' Circuit Low",
"P0699": "Sensor Reference Voltage 'C' Circuit High",
"P0700": "Transmission Control System (MIL Request)",
"P0701": "Transmission Control System Range/Performance",
"P0702": "Transmission Control System Electrical",
"P0703": "Brake Switch 'B' Circuit",
"P0704": "Clutch Switch Input Circuit Malfunction",
"P0705": "Transmission Range Sensor Circuit Malfunction (PRNDL Input)",
"P0706": "Transmission Range Sensor Circuit Range/Performance",
"P0707": "Transmission Range Sensor Circuit Low",
"P0708": "Transmission Range Sensor Circuit High",
"P0709": "Transmission Range Sensor Circuit Intermittent",
"P0710": "Transmission Fluid Temperature Sensor 'A' Circuit",
"P0711": "Transmission Fluid Temperature Sensor 'A' Circuit Range/Performance",
"P0712": "Transmission Fluid Temperature Sensor 'A' Circuit Low",
"P0713": "Transmission Fluid Temperature Sensor 'A' Circuit High",
"P0714": "Transmission Fluid Temperature Sensor 'A' Circuit Intermittent",
"P0715": "Input/Turbine Speed Sensor 'A' Circuit",
"P0716": "Input/Turbine Speed Sensor 'A' Circuit Range/Performance",
"P0717": "Input/Turbine Speed Sensor 'A' Circuit No Signal",
"P0718": "Input/Turbine Speed Sensor 'A' Circuit Intermittent",
"P0719": "Brake Switch 'B' Circuit Low",
"P0720": "Output Speed Sensor Circuit",
"P0721": "Output Speed Sensor Circuit Range/Performance",
"P0722": "Output Speed Sensor Circuit No Signal",
"P0723": "Output Speed Sensor Circuit Intermittent",
"P0724": "Brake Switch 'B' Circuit High",
"P0725": "Engine Speed Input Circuit",
"P0726": "Engine Speed Input Circuit Range/Performance",
"P0727": "Engine Speed Input Circuit No Signal",
"P0728": "Engine Speed Input Circuit Intermittent",
"P0729": "Gear 6 Incorrect Ratio",
"P0730": "Incorrect Gear Ratio",
"P0731": "Gear 1 Incorrect Ratio",
"P0732": "Gear 2 Incorrect Ratio",
"P0733": "Gear 3 Incorrect Ratio",
"P0734": "Gear 4 Incorrect Ratio",
"P0735": "Gear 5 Incorrect Ratio",
"P0736": "Reverse Incorrect Ratio",
"P0737": "TCM Engine Speed Output Circuit",
"P0738": "TCM Engine Speed Output Circuit Low",
"P0739": "TCM Engine Speed Output Circuit High",
"P0740": "Torque Converter Clutch Circuit/Open",
"P0741": "Torque Converter Clutch Circuit Performance or Stuck Off",
"P0742": "Torque Converter Clutch Circuit Stuck On",
"P0743": "Torque Converter Clutch Circuit Electrical",
"P0744": "Torque Converter Clutch Circuit Intermittent",
"P0745": "Pressure Control Solenoid 'A'",
"P0746": "Pressure Control Solenoid 'A' Performance or Stuck Off",
"P0747": "Pressure Control Solenoid 'A' Stuck On",
"P0748": "Pressure Control Solenoid 'A' Electrical",
"P0749": "Pressure Control Solenoid 'A' Intermittent",
"P0750": "Shift Solenoid 'A'",
"P0751": "Shift Solenoid 'A' Performance or Stuck Off",
"P0752": "Shift Solenoid 'A' Stuck On",
"P0753": "Shift Solenoid 'A' Electrical",
"P0754": "Shift Solenoid 'A' Intermittent",
"P0755": "Shift Solenoid 'B'",
"P0756": "Shift Solenoid 'B' Performance or Stuck Off",
"P0757": "Shift Solenoid 'B' Stuck On",
"P0758": "Shift Solenoid 'B' Electrical",
"P0759": "Shift Solenoid 'B' Intermittent",
"P0760": "Shift Solenoid 'C'",
"P0761": "Shift Solenoid 'C' Performance or Stuck Off",
"P0762": "Shift Solenoid 'C' Stuck On",
"P0763": "Shift Solenoid 'C' Electrical",
"P0764": "Shift Solenoid 'C' Intermittent",
"P0765": "Shift Solenoid 'D'",
"P0766": "Shift Solenoid 'D' Performance or Stuck Off",
"P0767": "Shift Solenoid 'D' Stuck On",
"P0768": "Shift Solenoid 'D' Electrical",
"P0769": "Shift Solenoid 'D' Intermittent",
"P0770": "Shift Solenoid 'E'",
"P0771": "Shift Solenoid 'E' Performance or Stuck Off",
"P0772": "Shift Solenoid 'E' Stuck On",
"P0773": "Shift Solenoid 'E' Electrical",
"P0774": "Shift Solenoid 'E' Intermittent",
"P0775": "Pressure Control Solenoid 'B'",
"P0776": "Pressure Control Solenoid 'B' Performance or Stuck off",
"P0777": "Pressure Control Solenoid 'B' Stuck On",
"P0778": "Pressure Control Solenoid 'B' Electrical",
"P0779": "Pressure Control Solenoid 'B' Intermittent",
"P0780": "Shift Error",
"P0781": "1-2 Shift",
"P0782": "2-3 Shift",
"P0783": "3-4 Shift",
"P0784": "4-5 Shift",
"P0785": "Shift/Timing Solenoid",
"P0786": "Shift/Timing Solenoid Range/Performance",
"P0787": "Shift/Timing Solenoid Low",
"P0788": "Shift/Timing Solenoid High",
"P0789": "Shift/Timing Solenoid Intermittent",
"P0790": "Normal/Performance Switch Circuit",
"P0791": "Intermediate Shaft Speed Sensor 'A' Circuit",
"P0792": "Intermediate Shaft Speed Sensor 'A' Circuit Range/Performance",
"P0793": "Intermediate Shaft Speed Sensor 'A' Circuit No Signal",
"P0794": "Intermediate Shaft Speed Sensor 'A' Circuit Intermittent",
"P0795": "Pressure Control Solenoid 'C'",
"P0796": "Pressure Control Solenoid 'C' Performance or Stuck off",
"P0797": "Pressure Control Solenoid 'C' Stuck On",
"P0798": "Pressure Control Solenoid 'C' Electrical",
"P0799": "Pressure Control Solenoid 'C' Intermittent",
"P0800": "Transfer Case Control System (MIL Request)",
"P0801": "Reverse Inhibit Control Circuit",
"P0802": "Transmission Control System MIL Request Circuit/Open",
"P0803": "1-4 Upshift (Skip Shift) Solenoid Control Circuit",
"P0804": "1-4 Upshift (Skip Shift) Lamp Control Circuit",
"P0805": "Clutch Position Sensor Circuit",
"P0806": "Clutch Position Sensor Circuit Range/Performance",
"P0807": "Clutch Position Sensor Circuit Low",
"P0808": "Clutch Position Sensor Circuit High",
"P0809": "Clutch Position Sensor Circuit Intermittent",
"P0810": "Clutch Position Control Error",
"P0811": "Excessive Clutch Slippage",
"P0812": "Reverse Input Circuit",
"P0813": "Reverse Output Circuit",
"P0814": "Transmission Range Display Circuit",
"P0815": "Upshift Switch Circuit",
"P0816": "Downshift Switch Circuit",
"P0817": "Starter Disable Circuit",
"P0818": "Driveline Disconnect Switch Input Circuit",
"P0819": "Up and Down Shift Switch to Transmission Range Correlation",
"P0820": "Gear Lever X-Y Position Sensor Circuit",
"P0821": "Gear Lever X Position Circuit",
"P0822": "Gear Lever Y Position Circuit",
"P0823": "Gear Lever X Position Circuit Intermittent",
"P0824": "Gear Lever Y Position Circuit Intermittent",
"P0825": "Gear Lever Push-Pull Switch (Shift Anticipate)",
"P0826": "Up and Down Shift Switch Circuit",
"P0827": "Up and Down Shift Switch Circuit Low",
"P0828": "Up and Down Shift Switch Circuit High",
"P0829": "5-6 Shift",
"P0830": "Clutch Pedal Switch 'A' Circuit",
"P0831": "Clutch Pedal Switch 'A' Circuit Low",
"P0832": "Clutch Pedal Switch 'A' Circuit High",
"P0833": "Clutch Pedal Switch 'B' Circuit",
"P0834": "Clutch Pedal Switch 'B' Circuit Low",
"P0835": "Clutch Pedal Switch 'B' Circuit High",
"P0836": "Four Wheel Drive (4WD) Switch Circuit",
"P0837": "Four Wheel Drive (4WD) Switch Circuit Range/Performance",
"P0838": "Four Wheel Drive (4WD) Switch Circuit Low",
"P0839": "Four Wheel Drive (4WD) Switch Circuit High",
"P0840": "Transmission Fluid Pressure Sensor/Switch 'A' Circuit",
"P0841": "Transmission Fluid Pressure Sensor/Switch 'A' Circuit Range/Performance",
"P0842": "Transmission Fluid Pressure Sensor/Switch 'A' Circuit Low",
"P0843": "Transmission Fluid Pressure Sensor/Switch 'A' Circuit High",
"P0844": "Transmission Fluid Pressure Sensor/Switch 'A' Circuit Intermittent",
"P0845": "Transmission Fluid Pressure Sensor/Switch 'B' Circuit",
"P0846": "Transmission Fluid Pressure Sensor/Switch 'B' Circuit Range/Performance",
"P0847": "Transmission Fluid Pressure Sensor/Switch 'B' Circuit Low",
"P0848": "Transmission Fluid Pressure Sensor/Switch 'B' Circuit High",
"P0849": "Transmission Fluid Pressure Sensor/Switch 'B' Circuit Intermittent",
"P0850": "Park/Neutral Switch Input Circuit",
"P0851": "Park/Neutral Switch Input Circuit Low",
"P0852": "Park/Neutral Switch Input Circuit High",
"P0853": "Drive Switch Input Circuit",
"P0854": "Drive Switch Input Circuit Low",
"P0855": "Drive Switch Input Circuit High",
"P0856": "Traction Control Input Signal",
"P0857": "Traction Control Input Signal Range/Performance",
"P0858": "Traction Control Input Signal Low",
"P0859": "Traction Control Input Signal High",
"P0860": "Gear Shift Module Communication Circuit",
"P0861": "Gear Shift Module Communication Circuit Low",
"P0862": "Gear Shift Module Communication Circuit High",
"P0863": "TCM Communication Circuit",
"P0864": "TCM Communication Circuit Range/Performance",
"P0865": "TCM Communication Circuit Low",
"P0866": "TCM Communication Circuit High",
"P0867": "Transmission Fluid Pressure",
"P0868": "Transmission Fluid Pressure Low",
"P0869": "Transmission Fluid Pressure High",
"P0870": "Transmission Fluid Pressure Sensor/Switch 'C' Circuit",
"P0871": "Transmission Fluid Pressure Sensor/Switch 'C' Circuit Range/Performance",
"P0872": "Transmission Fluid Pressure Sensor/Switch 'C' Circuit Low",
"P0873": "Transmission Fluid Pressure Sensor/Switch 'C' Circuit High",
"P0874": "Transmission Fluid Pressure Sensor/Switch 'C' Circuit Intermittent",
"P0875": "Transmission Fluid Pressure Sensor/Switch 'D' Circuit",
"P0876": "Transmission Fluid Pressure Sensor/Switch 'D' Circuit Range/Performance",
"P0877": "Transmission Fluid Pressure Sensor/Switch 'D' Circuit Low",
"P0878": "Transmission Fluid Pressure Sensor/Switch 'D' Circuit High",
"P0879": "Transmission Fluid Pressure Sensor/Switch 'D' Circuit Intermittent",
"P0880": "TCM Power Input Signal",
"P0881": "TCM Power Input Signal Range/Performance",
"P0882": "TCM Power Input Signal Low",
"P0883": "TCM Power Input Signal High",
"P0884": "TCM Power Input Signal Intermittent",
"P0885": "TCM Power Relay Control Circuit/Open",
"P0886": "TCM Power Relay Control Circuit Low",
"P0887": "TCM Power Relay Control Circuit High",
"P0888": "TCM Power Relay Sense Circuit",
"P0889": "TCM Power Relay Sense Circuit Range/Performance",
"P0890": "TCM Power Relay Sense Circuit Low",
"P0891": "TCM Power Relay Sense Circuit High",
"P0892": "TCM Power Relay Sense Circuit Intermittent",
"P0893": "Multiple Gears Engaged",
"P0894": "Transmission Component Slipping",
"P0895": "Shift Time Too Short",
"P0896": "Shift Time Too Long",
"P0897": "Transmission Fluid Deteriorated",
"P0898": "Transmission Control System MIL Request Circuit Low",
"P0899": "Transmission Control System MIL Request Circuit High",
"P0900": "Clutch Actuator Circuit/Open",
"P0901": "Clutch Actuator Circuit Range/Performance",
"P0902": "Clutch Actuator Circuit Low",
"P0903": "Clutch Actuator Circuit High",
"P0904": "Gate Select Position Circuit",
"P0905": "Gate Select Position Circuit Range/Performance",
"P0906": "Gate Select Position Circuit Low",
"P0907": "Gate Select Position Circuit High",
"P0908": "Gate Select Position Circuit Intermittent",
"P0909": "Gate Select Control Error",
"P0910": "Gate Select Actuator Circuit/Open",
"P0911": "Gate Select Actuator Circuit Range/Performance",
"P0912": "Gate Select Actuator Circuit Low",
"P0913": "Gate Select Actuator Circuit High",
"P0914": "Gear Shift Position Circuit",
"P0915": "Gear Shift Position Circuit Range/Performance",
"P0916": "Gear Shift Position Circuit Low",
"P0917": "Gear Shift Position Circuit High",
"P0918": "Gear Shift Position Circuit Intermittent",
"P0919": "Gear Shift Position Control Error",
"P0920": "Gear Shift Forward Actuator Circuit/Open",
"P0921": "Gear Shift Forward Actuator Circuit Range/Performance",
"P0922": "Gear Shift Forward Actuator Circuit Low",
"P0923": "Gear Shift Forward Actuator Circuit High",
"P0924": "Gear Shift Reverse Actuator Circuit/Open",
"P0925": "Gear Shift Reverse Actuator Circuit Range/Performance",
"P0926": "Gear Shift Reverse Actuator Circuit Low",
"P0927": "Gear Shift Reverse Actuator Circuit High",
"P0928": "Gear Shift Lock Solenoid Control Circuit/Open",
"P0929": "Gear Shift Lock Solenoid Control Circuit Range/Performance",
"P0930": "Gear Shift Lock Solenoid Control Circuit Low",
"P0931": "Gear Shift Lock Solenoid Control Circuit High",
"P0932": "Hydraulic Pressure Sensor Circuit",
"P0933": "Hydraulic Pressure Sensor Range/Performance",
"P0934": "Hydraulic Pressure Sensor Circuit Low",
"P0935": "Hydraulic Pressure Sensor Circuit High",
"P0936": "Hydraulic Pressure Sensor Circuit Intermittent",
"P0937": "Hydraulic Oil Temperature Sensor Circuit",
"P0938": "Hydraulic Oil Temperature Sensor Range/Performance",
"P0939": "Hydraulic Oil Temperature Sensor Circuit Low",
"P0940": "Hydraulic Oil Temperature Sensor Circuit High",
"P0941": "Hydraulic Oil Temperature Sensor Circuit Intermittent",
"P0942": "Hydraulic Pressure Unit",
"P0943": "Hydraulic Pressure Unit Cycling Period Too Short",
"P0944": "Hydraulic Pressure Unit Loss of Pressure",
"P0945": "Hydraulic Pump Relay Circuit/Open",
"P0946": "Hydraulic Pump Relay Circuit Range/Performance",
"P0947": "Hydraulic Pump Relay Circuit Low",
"P0948": "Hydraulic Pump Relay Circuit High",
"P0949": "Auto Shift Manual Adaptive Learning Not Complete",
"P0950": "Auto Shift Manual Control Circuit",
"P0951": "Auto Shift Manual Control Circuit Range/Performance",
"P0952": "Auto Shift Manual Control Circuit Low",
"P0953": "Auto Shift Manual Control Circuit High",
"P0954": "Auto Shift Manual Control Circuit Intermittent",
"P0955": "Auto Shift Manual Mode Circuit",
"P0956": "Auto Shift Manual Mode Circuit Range/Performance",
"P0957": "Auto Shift Manual Mode Circuit Low",
"P0958": "Auto Shift Manual Mode Circuit High",
"P0959": "Auto Shift Manual Mode Circuit Intermittent",
"P0960": "Pressure Control Solenoid 'A' Control Circuit/Open",
"P0961": "Pressure Control Solenoid 'A' Control Circuit Range/Performance",
"P0962": "Pressure Control Solenoid 'A' Control Circuit Low",
"P0963": "Pressure Control Solenoid 'A' Control Circuit High",
"P0964": "Pressure Control Solenoid 'B' Control Circuit/Open",
"P0965": "Pressure Control Solenoid 'B' Control Circuit Range/Performance",
"P0966": "Pressure Control Solenoid 'B' Control Circuit Low",
"P0967": "Pressure Control Solenoid 'B' Control Circuit High",
"P0968": "Pressure Control Solenoid 'C' Control Circuit/Open",
"P0969": "Pressure Control Solenoid 'C' Control Circuit Range/Performance",
"P0970": "Pressure Control Solenoid 'C' Control Circuit Low",
"P0971": "Pressure Control Solenoid 'C' Control Circuit High",
"P0972": "Shift Solenoid 'A' Control Circuit Range/Performance",
"P0973": "Shift Solenoid 'A' Control Circuit Low",
"P0974": "Shift Solenoid 'A' Control Circuit High",
"P0975": "Shift Solenoid 'B' Control Circuit Range/Performance",
"P0976": "Shift Solenoid 'B' Control Circuit Low",
"P0977": "Shift Solenoid 'B' Control Circuit High",
"P0978": "Shift Solenoid 'C' Control Circuit Range/Performance",
"P0979": "Shift Solenoid 'C' Control Circuit Low",
"P0980": "Shift Solenoid 'C' Control Circuit High",
"P0981": "Shift Solenoid 'D' Control Circuit Range/Performance",
"P0982": "Shift Solenoid 'D' Control Circuit Low",
"P0983": "Shift Solenoid 'D' Control Circuit High",
"P0984": "Shift Solenoid 'E' Control Circuit Range/Performance",
"P0985": "Shift Solenoid 'E' Control Circuit Low",
"P0986": "Shift Solenoid 'E' Control Circuit High",
"P0987": "Transmission Fluid Pressure Sensor/Switch 'E' Circuit",
"P0988": "Transmission Fluid Pressure Sensor/Switch 'E' Circuit Range/Performance",
"P0989": "Transmission Fluid Pressure Sensor/Switch 'E' Circuit Low",
"P0990": "Transmission Fluid Pressure Sensor/Switch 'E' Circuit High",
"P0991": "Transmission Fluid Pressure Sensor/Switch 'E' Circuit Intermittent",
"P0992": "Transmission Fluid Pressure Sensor/Switch 'F' Circuit",
"P0993": "Transmission Fluid Pressure Sensor/Switch 'F' Circuit Range/Performance",
"P0994": "Transmission Fluid Pressure Sensor/Switch 'F' Circuit Low",
"P0995": "Transmission Fluid Pressure Sensor/Switch 'F' Circuit High",
"P0996": "Transmission Fluid Pressure Sensor/Switch 'F' Circuit Intermittent",
"P0997": "Shift Solenoid 'F' Control Circuit Range/Performance",
"P0998": "Shift Solenoid 'F' Control Circuit Low",
"P0999": "Shift Solenoid 'F' Control Circuit High",
"P0A00": "Motor Electronics Coolant Temperature Sensor Circuit",
"P0A01": "Motor Electronics Coolant Temperature Sensor Circuit Range/Performance",
"P0A02": "Motor Electronics Coolant Temperature Sensor Circuit Low",
"P0A03": "Motor Electronics Coolant Temperature Sensor Circuit High",
"P0A04": "Motor Electronics Coolant Temperature Sensor Circuit Intermittent",
"P0A05": "Motor Electronics Coolant Pump Control Circuit/Open",
"P0A06": "Motor Electronics Coolant Pump Control Circuit Low",
"P0A07": "Motor Electronics Coolant Pump Control Circuit High",
"P0A08": "DC/DC Converter Status Circuit",
"P0A09": "DC/DC Converter Status Circuit Low Input",
"P0A10": "DC/DC Converter Status Circuit High Input",
"P0A11": "DC/DC Converter Enable Circuit/Open",
"P0A12": "DC/DC Converter Enable Circuit Low",
"P0A13": "DC/DC Converter Enable Circuit High",
"P0A14": "Engine Mount Control Circuit/Open",
"P0A15": "Engine Mount Control Circuit Low",
"P0A16": "Engine Mount Control Circuit High",
"P0A17": "Motor Torque Sensor Circuit",
"P0A18": "Motor Torque Sensor Circuit Range/Performance",
"P0A19": "Motor Torque Sensor Circuit Low",
"P0A20": "Motor Torque Sensor Circuit High",
"P0A21": "Motor Torque Sensor Circuit Intermittent",
"P0A22": "Generator Torque Sensor Circuit",
"P0A23": "Generator Torque Sensor Circuit Range/Performance",
"P0A24": "Generator Torque Sensor Circuit Low",
"P0A25": "Generator Torque Sensor Circuit High",
"P0A26": "Generator Torque Sensor Circuit Intermittent",
"P0A27": "Battery Power Off Circuit",
"P0A28": "Battery Power Off Circuit Low",
"P0A29": "Battery Power Off Circuit High",
"P2000": "NOx Trap Efficiency Below Threshold",
"P2001": "NOx Trap Efficiency Below Threshold",
"P2002": "Particulate Trap Efficiency Below Threshold",
"P2003": "Particulate Trap Efficiency Below Threshold",
"P2004": "Intake Manifold Runner Control Stuck Open",
"P2005": "Intake Manifold Runner Control Stuck Open",
"P2006": "Intake Manifold Runner Control Stuck Closed",
"P2007": "Intake Manifold Runner Control Stuck Closed",
"P2008": "Intake Manifold Runner Control Circuit/Open",
"P2009": "Intake Manifold Runner Control Circuit Low",
"P2010": "Intake Manifold Runner Control Circuit High",
"P2011": "Intake Manifold Runner Control Circuit/Open",
"P2012": "Intake Manifold Runner Control Circuit Low",
"P2013": "Intake Manifold Runner Control Circuit High",
"P2014": "Intake Manifold Runner Position Sensor/Switch Circuit",
"P2015": "Intake Manifold Runner Position Sensor/Switch Circuit Range/Performance",
"P2016": "Intake Manifold Runner Position Sensor/Switch Circuit Low",
"P2017": "Intake Manifold Runner Position Sensor/Switch Circuit High",
"P2018": "Intake Manifold Runner Position Sensor/Switch Circuit Intermittent",
"P2019": "Intake Manifold Runner Position Sensor/Switch Circuit",
"P2020": "Intake Manifold Runner Position Sensor/Switch Circuit Range/Performance",
"P2021": "Intake Manifold Runner Position Sensor/Switch Circuit Low",
"P2022": "Intake Manifold Runner Position Sensor/Switch Circuit High",
"P2023": "Intake Manifold Runner Position Sensor/Switch Circuit Intermittent",
"P2024": "Evaporative Emissions (EVAP) Fuel Vapor Temperature Sensor Circuit",
"P2025": "Evaporative Emissions (EVAP) Fuel Vapor Temperature Sensor Performance",
"P2026": "Evaporative Emissions (EVAP) Fuel Vapor Temperature Sensor Circuit Low Voltage",
"P2027": "Evaporative Emissions (EVAP) Fuel Vapor Temperature Sensor Circuit High Voltage",
"P2028": "Evaporative Emissions (EVAP) Fuel Vapor Temperature Sensor Circuit Intermittent",
"P2029": "Fuel Fired Heater Disabled",
"P2030": "Fuel Fired Heater Performance",
"P2031": "Exhaust Gas Temperature Sensor Circuit",
"P2032": "Exhaust Gas Temperature Sensor Circuit Low",
"P2033": "Exhaust Gas Temperature Sensor Circuit High",
"P2034": "Exhaust Gas Temperature Sensor Circuit",
"P2035": "Exhaust Gas Temperature Sensor Circuit Low",
"P2036": "Exhaust Gas Temperature Sensor Circuit High",
"P2037": "Reductant Injection Air Pressure Sensor Circuit",
"P2038": "Reductant Injection Air Pressure Sensor Circuit Range/Performance",
"P2039": "Reductant Injection Air Pressure Sensor Circuit Low Input",
"P2040": "Reductant Injection Air Pressure Sensor Circuit High Input",
"P2041": "Reductant Injection Air Pressure Sensor Circuit Intermittent",
"P2042": "Reductant Temperature Sensor Circuit",
"P2043": "Reductant Temperature Sensor Circuit Range/Performance",
"P2044": "Reductant Temperature Sensor Circuit Low Input",
"P2045": "Reductant Temperature Sensor Circuit High Input",
"P2046": "Reductant Temperature Sensor Circuit Intermittent",
"P2047": "Reductant Injector Circuit/Open",
"P2048": "Reductant Injector Circuit Low",
"P2049": "Reductant Injector Circuit High",
"P2050": "Reductant Injector Circuit/Open",
"P2051": "Reductant Injector Circuit Low",
"P2052": "Reductant Injector Circuit High",
"P2053": "Reductant Injector Circuit/Open",
"P2054": "Reductant Injector Circuit Low",
"P2055": "Reductant Injector Circuit High",
"P2056": "Reductant Injector Circuit/Open",
"P2057": "Reductant Injector Circuit Low",
"P2058": "Reductant Injector Circuit High",
"P2059": "Reductant Injection Air Pump Control Circuit/Open",
"P2060": "Reductant Injection Air Pump Control Circuit Low",
"P2061": "Reductant Injection Air Pump Control Circuit High",
"P2062": "Reductant Supply Control Circuit/Open",
"P2063": "Reductant Supply Control Circuit Low",
"P2064": "Reductant Supply Control Circuit High",
"P2065": "Fuel Level Sensor 'B' Circuit",
"P2066": "Fuel Level Sensor 'B' Performance",
"P2067": "Fuel Level Sensor 'B' Circuit Low",
"P2068": "Fuel Level Sensor 'B' Circuit High",
"P2069": "Fuel Level Sensor 'B' Circuit Intermittent",
"P2070": "Intake Manifold Tuning (IMT) Valve Stuck Open",
"P2071": "Intake Manifold Tuning (IMT) Valve Stuck Closed",
"P2075": "Intake Manifold Tuning (IMT) Valve Position Sensor/Switch Circuit",
"P2076": "Intake Manifold Tuning (IMT) Valve Position Sensor/Switch Circuit Range/Performance",
"P2077": "Intake Manifold Tuning (IMT) Valve Position Sensor/Switch Circuit Low",
"P2078": "Intake Manifold Tuning (IMT) Valve Position Sensor/Switch Circuit High",
"P2079": "Intake Manifold Tuning (IMT) Valve Position Sensor/Switch Circuit Intermittent",
"P2080": "Exhaust Gas Temperature Sensor Circuit Range/Performance",
"P2081": "Exhaust Gas Temperature Sensor Circuit Intermittent",
"P2082": "Exhaust Gas Temperature Sensor Circuit Range/Performance",
"P2083": "Exhaust Gas Temperature Sensor Circuit Intermittent",
"P2084": "Exhaust Gas Temperature Sensor Circuit Range/Performance",
"P2085": "Exhaust Gas Temperature Sensor Circuit Intermittent",
"P2086": "Exhaust Gas Temperature Sensor Circuit Range/Performance",
"P2087": "Exhaust Gas Temperature Sensor Circuit Intermittent",
"P2088": "'A' Camshaft Position Actuator Control Circuit Low",
"P2089": "'A' Camshaft Position Actuator Control Circuit High",
"P2090": "'B' Camshaft Position Actuator Control Circuit Low",
"P2091": "'B' Camshaft Position Actuator Control Circuit High",
"P2092": "'A' Camshaft Position Actuator Control Circuit Low",
"P2093": "'A' Camshaft Position Actuator Control Circuit High",
"P2094": "'B' Camshaft Position Actuator Control Circuit Low",
"P2095": "'B' Camshaft Position Actuator Control Circuit High",
"P2096": "Post Catalyst Fuel Trim System Too Lean",
"P2097": "Post Catalyst Fuel Trim System Too Rich",
"P2098": "Post Catalyst Fuel Trim System Too Lean",
"P2099": "Post Catalyst Fuel Trim System Too Rich",
"P2100": "Throttle Actuator Control Motor Circuit/Open",
"P2101": "Throttle Actuator Control Motor Circuit Range/Performance",
"P2102": "Throttle Actuator Control Motor Circuit Low",
"P2103": "Throttle Actuator Control Motor Circuit High",
"P2104": "Throttle Actuator Control System - Forced Idle",
"P2105": "Throttle Actuator Control System - Forced Engine Shutdown",
"P2106": "Throttle Actuator Control System - Forced Limited Power",
"P2107": "Throttle Actuator Control Module Processor",
"P2108": "Throttle Actuator Control Module Performance",
"P2109": "Throttle/Pedal Position Sensor 'A' Minimum Stop Performance",
"P2110": "Throttle Actuator Control System - Forced Limited RPM",
"P2111": "Throttle Actuator Control System - Stuck Open",
"P2112": "Throttle Actuator Control System - Stuck Closed",
"P2113": "Throttle/Pedal Position Sensor 'B' Minimum Stop Performance",
"P2114": "Throttle/Pedal Position Sensor 'C' Minimum Stop Performance",
"P2115": "Throttle/Pedal Position Sensor 'D' Minimum Stop Performance",
"P2116": "Throttle/Pedal Position Sensor 'E' Minimum Stop Performance",
"P2117": "Throttle/Pedal Position Sensor 'F' Minimum Stop Performance",
"P2118": "Throttle Actuator Control Motor Current Range/Performance",
"P2119": "Throttle Actuator Control Throttle Body Range/Performance",
"P2120": "Throttle/Pedal Position Sensor/Switch 'D' Circuit",
"P2121": "Throttle/Pedal Position Sensor/Switch 'D' Circuit Range/Performance",
"P2122": "Throttle/Pedal Position Sensor/Switch 'D' Circuit Low Input",
"P2123": "Throttle/Pedal Position Sensor/Switch 'D' Circuit High Input",
"P2124": "Throttle/Pedal Position Sensor/Switch 'D' Circuit Intermittent",
"P2125": "Throttle/Pedal Position Sensor/Switch 'E' Circuit",
"P2126": "Throttle/Pedal Position Sensor/Switch 'E' Circuit Range/Performance",
"P2127": "Throttle/Pedal Position Sensor/Switch 'E' Circuit Low Input",
"P2128": "Throttle/Pedal Position Sensor/Switch 'E' Circuit High Input",
"P2129": "Throttle/Pedal Position Sensor/Switch 'E' Circuit Intermittent",
"P2130": "Throttle/Pedal Position Sensor/Switch 'F' Circuit",
"P2131": "Throttle/Pedal Position Sensor/Switch 'F' Circuit Range Performance",
"P2132": "Throttle/Pedal Position Sensor/Switch 'F' Circuit Low Input",
"P2133": "Throttle/Pedal Position Sensor/Switch 'F' Circuit High Input",
"P2134": "Throttle/Pedal Position Sensor/Switch 'F' Circuit Intermittent",
"P2135": "Throttle/Pedal Position Sensor/Switch 'A' / 'B' Voltage Correlation",
"P2136": "Throttle/Pedal Position Sensor/Switch 'A' / 'C' Voltage Correlation",
"P2137": "Throttle/Pedal Position Sensor/Switch 'B' / 'C' Voltage Correlation",
"P2138": "Throttle/Pedal Position Sensor/Switch 'D' / 'E' Voltage Correlation",
"P2139": "Throttle/Pedal Position Sensor/Switch 'D' / 'F' Voltage Correlation",
"P2140": "Throttle/Pedal Position Sensor/Switch 'E' / 'F' Voltage Correlation",
"P2141": "Exhaust Gas Recirculation Throttle Control Circuit Low",
"P2142": "Exhaust Gas Recirculation Throttle Control Circuit High",
"P2143": "Exhaust Gas Recirculation Vent Control Circuit/Open",
"P2144": "Exhaust Gas Recirculation Vent Control Circuit Low",
"P2145": "Exhaust Gas Recirculation Vent Control Circuit High",
"P2146": "Fuel Injector Group 'A' Supply Voltage Circuit/Open",
"P2147": "Fuel Injector Group 'A' Supply Voltage Circuit Low",
"P2148": "Fuel Injector Group 'A' Supply Voltage Circuit High",
"P2149": "Fuel Injector Group 'B' Supply Voltage Circuit/Open",
"P2150": "Fuel Injector Group 'B' Supply Voltage Circuit Low",
"P2151": "Fuel Injector Group 'B' Supply Voltage Circuit High",
"P2152": "Fuel Injector Group 'C' Supply Voltage Circuit/Open",
"P2153": "Fuel Injector Group 'C' Supply Voltage Circuit Low",
"P2154": "Fuel Injector Group 'C' Supply Voltage Circuit High",
"P2155": "Fuel Injector Group 'D' Supply Voltage Circuit/Open",
"P2156": "Fuel Injector Group 'D' Supply Voltage Circuit Low",
"P2157": "Fuel Injector Group 'D' Supply Voltage Circuit High",
"P2158": "Vehicle Speed Sensor 'B'",
"P2159": "Vehicle Speed Sensor 'B' Range/Performance",
"P2160": "Vehicle Speed Sensor 'B' Circuit Low",
"P2161": "Vehicle Speed Sensor 'B' Intermittent/Erratic",
"P2162": "Vehicle Speed Sensor 'A' / 'B' Correlation",
"P2163": "Throttle/Pedal Position Sensor 'A' Maximum Stop Performance",
"P2164": "Throttle/Pedal Position Sensor 'B' Maximum Stop Performance",
"P2165": "Throttle/Pedal Position Sensor 'C' Maximum Stop Performance",
"P2166": "Throttle/Pedal Position Sensor 'D' Maximum Stop Performance",
"P2167": "Throttle/Pedal Position Sensor 'E' Maximum Stop Performance",
"P2168": "Throttle/Pedal Position Sensor 'F' Maximum Stop Performance",
"P2169": "Exhaust Pressure Regulator Vent Solenoid Control Circuit/Open",
"P2170": "Exhaust Pressure Regulator Vent Solenoid Control Circuit Low",
"P2171": "Exhaust Pressure Regulator Vent Solenoid Control Circuit High",
"P2172": "Throttle Actuator Control System - Sudden High Airflow Detected",
"P2173": "Throttle Actuator Control System - High Airflow Detected",
"P2174": "Throttle Actuator Control System - Sudden Low Airflow Detected",
"P2175": "Throttle Actuator Control System - Low Airflow Detected",
"P2176": "Throttle Actuator Control System - Idle Position Not Learned",
"P2177": "System Too Lean Off Idle",
"P2178": "System Too Rich Off Idle",
"P2179": "System Too Lean Off Idle",
"P2180": "System Too Rich Off Idle",
"P2181": "Cooling System Performance",
"P2182": "Engine Coolant Temperature Sensor 2 Circuit",
"P2183": "Engine Coolant Temperature Sensor 2 Circuit Range/Performance",
"P2184": "Engine Coolant Temperature Sensor 2 Circuit Low",
"P2185": "Engine Coolant Temperature Sensor 2 Circuit High",
"P2186": "Engine Coolant Temperature Sensor 2 Circuit Intermittent/Erratic",
"P2187": "System Too Lean at Idle",
"P2188": "System Too Rich at Idle",
"P2189": "System Too Lean at Idle",
"P2190": "System Too Rich at Idle",
"P2191": "System Too Lean at Higher Load",
"P2192": "System Too Rich at Higher Load",
"P2193": "System Too Lean at Higher Load",
"P2194": "System Too Rich at Higher Load",
"P2195": "O2 Sensor Signal Stuck Lean",
"P2196": "O2 Sensor Signal Stuck Rich",
"P2197": "O2 Sensor Signal Stuck Lean",
"P2198": "O2 Sensor Signal Stuck Rich",
"P2199": "Intake Air Temperature Sensor 1 / 2 Correlation",
"P2200": "NOx Sensor Circuit",
"P2201": "NOx Sensor Circuit Range/Performance",
"P2202": "NOx Sensor Circuit Low Input",
"P2203": "NOx Sensor Circuit High Input",
"P2204": "NOx Sensor Circuit Intermittent Input",
"P2205": "NOx Sensor Heater Control Circuit/Open",
"P2206": "NOx Sensor Heater Control Circuit Low",
"P2207": "NOx Sensor Heater Control Circuit High",
"P2208": "NOx Sensor Heater Sense Circuit",
"P2209": "NOx Sensor Heater Sense Circuit Range/Performance",
"P2210": "NOx Sensor Heater Sense Circuit Low Input",
"P2211": "NOx Sensor Heater Sense Circuit High Input",
"P2212": "NOx Sensor Heater Sense Circuit Intermittent",
"P2213": "NOx Sensor Circuit",
"P2214": "NOx Sensor Circuit Range/Performance",
"P2215": "NOx Sensor Circuit Low Input",
"P2216": "NOx Sensor Circuit High Input",
"P2217": "NOx Sensor Circuit Intermittent Input",
"P2218": "NOx Sensor Heater Control Circuit/Open",
"P2219": "NOx Sensor Heater Control Circuit Low",
"P2220": "NOx Sensor Heater Control Circuit High",
"P2221": "NOx Sensor Heater Sense Circuit",
"P2222": "NOx Sensor Heater Sense Circuit Range/Performance",
"P2223": "NOx Sensor Heater Sense Circuit Low",
"P2224": "NOx Sensor Heater Sense Circuit High",
"P2225": "NOx Sensor Heater Sense Circuit Intermittent",
"P2226": "Barometric Pressure Circuit",
"P2227": "Barometric Pressure Circuit Range/Performance",
"P2228": "Barometric Pressure Circuit Low",
"P2229": "Barometric Pressure Circuit High",
"P2230": "Barometric Pressure Circuit Intermittent",
"P2231": "O2 Sensor Signal Circuit Shorted to Heater Circuit",
"P2232": "O2 Sensor Signal Circuit Shorted to Heater Circuit",
"P2233": "O2 Sensor Signal Circuit Shorted to Heater Circuit",
"P2234": "O2 Sensor Signal Circuit Shorted to Heater Circuit",
"P2235": "O2 Sensor Signal Circuit Shorted to Heater Circuit",
"P2236": "O2 Sensor Signal Circuit Shorted to Heater Circuit",
"P2237": "O2 Sensor Positive Current Control Circuit/Open",
"P2238": "O2 Sensor Positive Current Control Circuit Low",
"P2239": "O2 Sensor Positive Current Control Circuit High",
"P2240": "O2 Sensor Positive Current Control Circuit/Open",
"P2241": "O2 Sensor Positive Current Control Circuit Low",
"P2242": "O2 Sensor Positive Current Control Circuit High",
"P2243": "O2 Sensor Reference Voltage Circuit/Open",
"P2244": "O2 Sensor Reference Voltage Performance",
"P2245": "O2 Sensor Reference Voltage Circuit Low",
"P2246": "O2 Sensor Reference Voltage Circuit High",
"P2247": "O2 Sensor Reference Voltage Circuit/Open",
"P2248": "O2 Sensor Reference Voltage Performance",
"P2249": "O2 Sensor Reference Voltage Circuit Low",
"P2250": "O2 Sensor Reference Voltage Circuit High",
"P2251": "O2 Sensor Negative Current Control Circuit/Open",
"P2252": "O2 Sensor Negative Current Control Circuit Low",
"P2253": "O2 Sensor Negative Current Control Circuit High",
"P2254": "O2 Sensor Negative Current Control Circuit/Open",
"P2255": "O2 Sensor Negative Current Control Circuit Low",
"P2256": "O2 Sensor Negative Current Control Circuit High",
"P2257": "Secondary Air Injection System Control 'A' Circuit Low",
"P2258": "Secondary Air Injection System Control 'A' Circuit High",
"P2259": "Secondary Air Injection System Control 'B' Circuit Low",
"P2260": "Secondary Air Injection System Control 'B' Circuit High",
"P2261": "Turbo/Super Charger Bypass Valve - Mechanical",
"P2262": "Turbo Boost Pressure Not Detected - Mechanical",
"P2263": "Turbo/Super Charger Boost System Performance",
"P2264": "Water in Fuel Sensor Circuit",
"P2265": "Water in Fuel Sensor Circuit Range/Performance",
"P2266": "Water in Fuel Sensor Circuit Low",
"P2267": "Water in Fuel Sensor Circuit High",
"P2268": "Water in Fuel Sensor Circuit Intermittent",
"P2269": "Water in Fuel Condition",
"P2270": "O2 Sensor Signal Stuck Lean",
"P2271": "O2 Sensor Signal Stuck Rich",
"P2272": "O2 Sensor Signal Stuck Lean",
"P2273": "O2 Sensor Signal Stuck Rich",
"P2274": "O2 Sensor Signal Stuck Lean",
"P2275": "O2 Sensor Signal Stuck Rich",
"P2276": "O2 Sensor Signal Stuck Lean",
"P2277": "O2 Sensor Signal Stuck Rich",
"P2278": "O2 Sensor Signals Swapped Bank 1 Sensor 3 / Bank 2 Sensor 3",
"P2279": "Intake Air System Leak",
"P2280": "Air Flow Restriction / Air Leak Between Air Filter and MAF",
"P2281": "Air Leak Between MAF and Throttle Body",
"P2282": "Air Leak Between Throttle Body and Intake Valves",
"P2283": "Injector Control Pressure Sensor Circuit",
"P2284": "Injector Control Pressure Sensor Circuit Range/Performance",
"P2285": "Injector Control Pressure Sensor Circuit Low",
"P2286": "Injector Control Pressure Sensor Circuit High",
"P2287": "Injector Control Pressure Sensor Circuit Intermittent",
"P2288": "Injector Control Pressure Too High",
"P2289": "Injector Control Pressure Too High - Engine Off",
"P2290": "Injector Control Pressure Too Low",
"P2291": "Injector Control Pressure Too Low - Engine Cranking",
"P2292": "Injector Control Pressure Erratic",
"P2293": "Fuel Pressure Regulator 2 Performance",
"P2294": "Fuel Pressure Regulator 2 Control Circuit",
"P2295": "Fuel Pressure Regulator 2 Control Circuit Low",
"P2296": "Fuel Pressure Regulator 2 Control Circuit High",
"P2297": "O2 Sensor Out of Range During Deceleration",
"P2298": "O2 Sensor Out of Range During Deceleration",
"P2299": "Brake Pedal Position / Accelerator Pedal Position Incompatible",
"P2300": "Ignition Coil 'A' Primary Control Circuit Low",
"P2301": "Ignition Coil 'A' Primary Control Circuit High",
"P2302": "Ignition Coil 'A' Secondary Circuit",
"P2303": "Ignition Coil 'B' Primary Control Circuit Low",
"P2304": "Ignition Coil 'B' Primary Control Circuit High",
"P2305": "Ignition Coil 'B' Secondary Circuit",
"P2306": "Ignition Coil 'C' Primary Control Circuit Low",
"P2307": "Ignition Coil 'C' Primary Control Circuit High",
"P2308": "Ignition Coil 'C' Secondary Circuit",
"P2309": "Ignition Coil 'D' Primary Control Circuit Low",
"P2310": "Ignition Coil 'D' Primary Control Circuit High",
"P2311": "Ignition Coil 'D' Secondary Circuit",
"P2312": "Ignition Coil 'E' Primary Control Circuit Low",
"P2313": "Ignition Coil 'E' Primary Control Circuit High",
"P2314": "Ignition Coil 'E' Secondary Circuit",
"P2315": "Ignition Coil 'F' Primary Control Circuit Low",
"P2316": "Ignition Coil 'F' Primary Control Circuit High",
"P2317": "Ignition Coil 'F' Secondary Circuit",
"P2318": "Ignition Coil 'G' Primary Control Circuit Low",
"P2319": "Ignition Coil 'G' Primary Control Circuit High",
"P2320": "Ignition Coil 'G' Secondary Circuit",
"P2321": "Ignition Coil 'H' Primary Control Circuit Low",
"P2322": "Ignition Coil 'H' Primary Control Circuit High",
"P2323": "Ignition Coil 'H' Secondary Circuit",
"P2324": "Ignition Coil 'I' Primary Control Circuit Low",
"P2325": "Ignition Coil 'I' Primary Control Circuit High",
"P2326": "Ignition Coil 'I' Secondary Circuit",
"P2327": "Ignition Coil 'J' Primary Control Circuit Low",
"P2328": "Ignition Coil 'J' Primary Control Circuit High",
"P2329": "Ignition Coil 'J' Secondary Circuit",
"P2330": "Ignition Coil 'K' Primary Control Circuit Low",
"P2331": "Ignition Coil 'K' Primary Control Circuit High",
"P2332": "Ignition Coil 'K' Secondary Circuit",
"P2333": "Ignition Coil 'L' Primary Control Circuit Low",
"P2334": "Ignition Coil 'L' Primary Control Circuit High",
"P2335": "Ignition Coil 'L' Secondary Circuit",
"P2336": "Cylinder #1 Above Knock Threshold",
"P2337": "Cylinder #2 Above Knock Threshold",
"P2338": "Cylinder #3 Above Knock Threshold",
"P2339": "Cylinder #4 Above Knock Threshold",
"P2340": "Cylinder #5 Above Knock Threshold",
"P2341": "Cylinder #6 Above Knock Threshold",
"P2342": "Cylinder #7 Above Knock Threshold",
"P2343": "Cylinder #8 Above Knock Threshold",
"P2344": "Cylinder #9 Above Knock Threshold",
"P2345": "Cylinder #10 Above Knock Threshold",
"P2346": "Cylinder #11 Above Knock Threshold",
"P2347": "Cylinder #12 Above Knock Threshold",
"P2400": "Evaporative Emission System Leak Detection Pump Control Circuit/Open",
"P2401": "Evaporative Emission System Leak Detection Pump Control Circuit Low",
"P2402": "Evaporative Emission System Leak Detection Pump Control Circuit High",
"P2403": "Evaporative Emission System Leak Detection Pump Sense Circuit/Open",
"P2404": "Evaporative Emission System Leak Detection Pump Sense Circuit Range/Performance",
"P2405": "Evaporative Emission System Leak Detection Pump Sense Circuit Low",
"P2406": "Evaporative Emission System Leak Detection Pump Sense Circuit High",
"P2407": "Evaporative Emission System Leak Detection Pump Sense Circuit Intermittent/Erratic",
"P2408": "Fuel Cap Sensor/Switch Circuit",
"P2409": "Fuel Cap Sensor/Switch Circuit Range/Performance",
"P2410": "Fuel Cap Sensor/Switch Circuit Low",
"P2411": "Fuel Cap Sensor/Switch Circuit High",
"P2412": "Fuel Cap Sensor/Switch Circuit Intermittent/Erratic",
"P2413": "Exhaust Gas Recirculation System Performance",
"P2414": "O2 Sensor Exhaust Sample Error",
"P2415": "O2 Sensor Exhaust Sample Error",
"P2416": "O2 Sensor Signals Swapped Bank 1 Sensor 2 / Bank 1 Sensor 3",
"P2417": "O2 Sensor Signals Swapped Bank 2 Sensor 2 / Bank 2 Sensor 3",
"P2418": "Evaporative Emission System Switching Valve Control Circuit / Open",
"P2419": "Evaporative Emission System Switching Valve Control Circuit Low",
"P2420": "Evaporative Emission System Switching Valve Control Circuit High",
"P2421": "Evaporative Emission System Vent Valve Stuck Open",
"P2422": "Evaporative Emission System Vent Valve Stuck Closed",
"P2423": "HC Adsorption Catalyst Efficiency Below Threshold",
"P2424": "HC Adsorption Catalyst Efficiency Below Threshold",
"P2425": "Exhaust Gas Recirculation Cooling Valve Control Circuit/Open",
"P2426": "Exhaust Gas Recirculation Cooling Valve Control Circuit Low",
"P2427": "Exhaust Gas Recirculation Cooling Valve Control Circuit High",
"P2428": "Exhaust Gas Temperature Too High",
"P2429": "Exhaust Gas Temperature Too High",
"P2430": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit",
"P2431": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit Range/Performance",
"P2432": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit Low",
"P2433": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit High",
"P2434": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit Intermittent/Erratic",
"P2435": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit",
"P2436": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit Range/Performance",
"P2437": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit Low",
"P2438": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit High",
"P2439": "Secondary Air Injection System Air Flow/Pressure Sensor Circuit Intermittent/Erratic",
"P2440": "Secondary Air Injection System Switching Valve Stuck Open",
"P2441": "Secondary Air Injection System Switching Valve Stuck Closed",
"P2442": "Secondary Air Injection System Switching Valve Stuck Open",
"P2443": "Secondary Air Injection System Switching Valve Stuck Closed",
"P2444": "Secondary Air Injection System Pump Stuck On",
"P2445": "Secondary Air Injection System Pump Stuck Off",
"P2446": "Secondary Air Injection System Pump Stuck On",
"P2447": "Secondary Air Injection System Pump Stuck Off",
"P2500": "Generator Lamp/L-Terminal Circuit Low",
"P2501": "Generator Lamp/L-Terminal Circuit High",
"P2502": "Charging System Voltage",
"P2503": "Charging System Voltage Low",
"P2504": "Charging System Voltage High",
"P2505": "ECM/PCM Power Input Signal",
"P2506": "ECM/PCM Power Input Signal Range/Performance",
"P2507": "ECM/PCM Power Input Signal Low",
"P2508": "ECM/PCM Power Input Signal High",
"P2509": "ECM/PCM Power Input Signal Intermittent",
"P2510": "ECM/PCM Power Relay Sense Circuit Range/Performance",
"P2511": "ECM/PCM Power Relay Sense Circuit Intermittent",
"P2512": "Event Data Recorder Request Circuit/ Open",
"P2513": "Event Data Recorder Request Circuit Low",
"P2514": "Event Data Recorder Request Circuit High",
"P2515": "A/C Refrigerant Pressure Sensor 'B' Circuit",
"P2516": "A/C Refrigerant Pressure Sensor 'B' Circuit Range/Performance",
"P2517": "A/C Refrigerant Pressure Sensor 'B' Circuit Low",
"P2518": "A/C Refrigerant Pressure Sensor 'B' Circuit High",
"P2519": "A/C Request 'A' Circuit",
"P2520": "A/C Request 'A' Circuit Low",
"P2521": "A/C Request 'A' Circuit High",
"P2522": "A/C Request 'B' Circuit",
"P2523": "A/C Request 'B' Circuit Low",
"P2524": "A/C Request 'B' Circuit High",
"P2525": "Vacuum Reservoir Pressure Sensor Circuit",
"P2526": "Vacuum Reservoir Pressure Sensor Circuit Range/Performance",
"P2527": "Vacuum Reservoir Pressure Sensor Circuit Low",
"P2528": "Vacuum Reservoir Pressure Sensor Circuit High",
"P2529": "Vacuum Reservoir Pressure Sensor Circuit Intermittent",
"P2530": "Ignition Switch Run Position Circuit",
"P2531": "Ignition Switch Run Position Circuit Low",
"P2532": "Ignition Switch Run Position Circuit High",
"P2533": "Ignition Switch Run/Start Position Circuit",
"P2534": "Ignition Switch Run/Start Position Circuit Low",
"P2535": "Ignition Switch Run/Start Position Circuit High",
"P2536": "Ignition Switch Accessory Position Circuit",
"P2537": "Ignition Switch Accessory Position Circuit Low",
"P2538": "Ignition Switch Accessory Position Circuit High",
"P2539": "Low Pressure Fuel System Sensor Circuit",
"P2540": "Low Pressure Fuel System Sensor Circuit Range/Performance",
"P2541": "Low Pressure Fuel System Sensor Circuit Low",
"P2542": "Low Pressure Fuel System Sensor Circuit High",
"P2543": "Low Pressure Fuel System Sensor Circuit Intermittent",
"P2544": "Torque Management Request Input Signal 'A'",
"P2545": "Torque Management Request Input Signal 'A' Range/Performance",
"P2546": "Torque Management Request Input Signal 'A' Low",
"P2547": "Torque Management Request Input Signal 'A' High",
"P2548": "Torque Management Request Input Signal 'B'",
"P2549": "Torque Management Request Input Signal 'B' Range/Performance",
"P2550": "Torque Management Request Input Signal 'B' Low",
"P2551": "Torque Management Request Input Signal 'B' High",
"P2552": "Throttle/Fuel Inhibit Circuit",
"P2553": "Throttle/Fuel Inhibit Circuit Range/Performance",
"P2554": "Throttle/Fuel Inhibit Circuit Low",
"P2555": "Throttle/Fuel Inhibit Circuit High",
"P2556": "Engine Coolant Level Sensor/Switch Circuit",
"P2557": "Engine Coolant Level Sensor/Switch Circuit Range/Performance",
"P2558": "Engine Coolant Level Sensor/Switch Circuit Low",
"P2559": "Engine Coolant Level Sensor/Switch Circuit High",
"P2560": "Engine Coolant Level Low",
"P2561": "A/C Control Module Requested MIL Illumination",
"P2562": "Turbocharger Boost Control Position Sensor Circuit",
"P2563": "Turbocharger Boost Control Position Sensor Circuit Range/Performance",
"P2564": "Turbocharger Boost Control Position Sensor Circuit Low",
"P2565": "Turbocharger Boost Control Position Sensor Circuit High",
"P2566": "Turbocharger Boost Control Position Sensor Circuit Intermittent",
"P2567": "Direct Ozone Reduction Catalyst Temperature Sensor Circuit",
"P2568": "Direct Ozone Reduction Catalyst Temperature Sensor Circuit Range/Performance",
"P2569": "Direct Ozone Reduction Catalyst Temperature Sensor Circuit Low",
"P2570": "Direct Ozone Reduction Catalyst Temperature Sensor Circuit High",
"P2571": "Direct Ozone Reduction Catalyst Temperature Sensor Circuit Intermittent/Erratic",
"P2572": "Direct Ozone Reduction Catalyst Deterioration Sensor Circuit",
"P2573": "Direct Ozone Reduction Catalyst Deterioration Sensor Circuit Range/Performance",
"P2574": "Direct Ozone Reduction Catalyst Deterioration Sensor Circuit Low",
"P2575": "Direct Ozone Reduction Catalyst Deterioration Sensor Circuit High",
"P2576": "Direct Ozone Reduction Catalyst Deterioration Sensor Circuit Intermittent/Erratic",
"P2577": "Direct Ozone Reduction Catalyst Efficiency Below Threshold",
"P2600": "Coolant Pump Control Circuit/Open",
"P2601": "Coolant Pump Control Circuit Range/Performance",
"P2602": "Coolant Pump Control Circuit Low",
"P2603": "Coolant Pump Control Circuit High",
"P2604": "Intake Air Heater 'A' Circuit Range/Performance",
"P2605": "Intake Air Heater 'A' Circuit/Open",
"P2606": "Intake Air Heater 'B' Circuit Range/Performance",
"P2607": "Intake Air Heater 'B' Circuit Low",
"P2608": "Intake Air Heater 'B' Circuit High",
"P2609": "Intake Air Heater System Performance",
"P2610": "ECM/PCM Internal Engine Off Timer Performance",
"P2611": "A/C Refrigerant Distribution Valve Control Circuit/Open",
"P2612": "A/C Refrigerant Distribution Valve Control Circuit Low",
"P2613": "A/C Refrigerant Distribution Valve Control Circuit High",
"P2614": "Camshaft Position Signal Output Circuit/Open",
"P2615": "Camshaft Position Signal Output Circuit Low",
"P2616": "Camshaft Position Signal Output Circuit High",
"P2617": "Crankshaft Position Signal Output Circuit/Open",
"P2618": "Crankshaft Position Signal Output Circuit Low",
"P2619": "Crankshaft Position Signal Output Circuit High",
"P2620": "Throttle Position Output Circuit/Open",
"P2621": "Throttle Position Output Circuit Low",
"P2622": "Throttle Position Output Circuit High",
"P2623": "Injector Control Pressure Regulator Circuit/Open",
"P2624": "Injector Control Pressure Regulator Circuit Low",
"P2625": "Injector Control Pressure Regulator Circuit High",
"P2626": "O2 Sensor Pumping Current Trim Circuit/Open",
"P2627": "O2 Sensor Pumping Current Trim Circuit Low",
"P2628": "O2 Sensor Pumping Current Trim Circuit High",
"P2629": "O2 Sensor Pumping Current Trim Circuit/Open",
"P2630": "O2 Sensor Pumping Current Trim Circuit Low",
"P2631": "O2 Sensor Pumping Current Trim Circuit High",
"P2632": "Fuel Pump 'B' Control Circuit /Open",
"P2633": "Fuel Pump 'B' Control Circuit Low",
"P2634": "Fuel Pump 'B' Control Circuit High",
"P2635": "Fuel Pump 'A' Low Flow / Performance",
"P2636": "Fuel Pump 'B' Low Flow / Performance",
"P2637": "Torque Management Feedback Signal 'A'",
"P2638": "Torque Management Feedback Signal 'A' Range/Performance",
"P2639": "Torque Management Feedback Signal 'A' Low",
"P2640": "Torque Management Feedback Signal 'A' High",
"P2641": "Torque Management Feedback Signal 'B'",
"P2642": "Torque Management Feedback Signal 'B' Range/Performance",
"P2643": "Torque Management Feedback Signal 'B' Low",
"P2644": "Torque Management Feedback Signal 'B' High",
"P2645": "'A' Rocker Arm Actuator Control Circuit/Open",
"P2646": "'A' Rocker Arm Actuator System Performance or Stuck Off",
"P2647": "'A' Rocker Arm Actuator System Stuck On",
"P2648": "'A' Rocker Arm Actuator Control Circuit Low",
"P2649": "'A' Rocker Arm Actuator Control Circuit High",
"P2650": "'B' Rocker Arm Actuator Control Circuit/Open",
"P2651": "'B' Rocker Arm Actuator System Performance or Stuck Off",
"P2652": "'B' Rocker Arm Actuator System Stuck On",
"P2653": "'B' Rocker Arm Actuator Control Circuit Low",
"P2654": "'B' Rocker Arm Actuator Control Circuit High",
"P2655": "'A' Rocker Arm Actuator Control Circuit/Open",
"P2656": "'A' Rocker Arm Actuator System Performance or Stuck Off",
"P2657": "'A' Rocker Arm Actuator System Stuck On",
"P2658": "'A' Rocker Arm Actuator Control Circuit Low",
"P2659": "'A' Rocker Arm Actuator Control Circuit High",
"P2660": "'B' Rocker Arm Actuator Control Circuit/Open",
"P2661": "'B' Rocker Arm Actuator System Performance or Stuck Off",
"P2662": "'B' Rocker Arm Actuator System Stuck On",
"P2663": "'B' Rocker Arm Actuator Control Circuit Low",
"P2664": "'B' Rocker Arm Actuator Control Circuit High",
"P2665": "Fuel Shutoff Valve 'B' Control Circuit/Open",
"P2666": "Fuel Shutoff Valve 'B' Control Circuit Low",
"P2667": "Fuel Shutoff Valve 'B' Control Circuit High",
"P2668": "Fuel Mode Indicator Lamp Control Circuit",
"P2669": "Actuator Supply Voltage 'B' Circuit /Open",
"P2670": "Actuator Supply Voltage 'B' Circuit Low",
"P2671": "Actuator Supply Voltage 'B' Circuit High",
"P2700": "Transmission Friction Element 'A' Apply Time Range/Performance",
"P2701": "Transmission Friction Element 'B' Apply Time Range/Performance",
"P2702": "Transmission Friction Element 'C' Apply Time Range/Performance",
"P2703": "Transmission Friction Element 'D' Apply Time Range/Performance",
"P2704": "Transmission Friction Element 'E' Apply Time Range/Performance",
"P2705": "Transmission Friction Element 'F' Apply Time Range/Performance",
"P2706": "Shift Solenoid 'F'",
"P2707": "Shift Solenoid 'F' Performance or Stuck Off",
"P2708": "Shift Solenoid 'F' Stuck On",
"P2709": "Shift Solenoid 'F' Electrical",
"P2710": "Shift Solenoid 'F' Intermittent",
"P2711": "Unexpected Mechanical Gear Disengagement",
"P2712": "Hydraulic Power Unit Leakage",
"P2713": "Pressure Control Solenoid 'D'",
"P2714": "Pressure Control Solenoid 'D' Performance or Stuck Off",
"P2715": "Pressure Control Solenoid 'D' Stuck On",
"P2716": "Pressure Control Solenoid 'D' Electrical",
"P2717": "Pressure Control Solenoid 'D' Intermittent",
"P2718": "Pressure Control Solenoid 'D' Control Circuit / Open",
"P2719": "Pressure Control Solenoid 'D' Control Circuit Range/Performance",
"P2720": "Pressure Control Solenoid 'D' Control Circuit Low",
"P2721": "Pressure Control Solenoid 'D' Control Circuit High",
"P2722": "Pressure Control Solenoid 'E'",
"P2723": "Pressure Control Solenoid 'E' Performance or Stuck Off",
"P2724": "Pressure Control Solenoid 'E' Stuck On",
"P2725": "Pressure Control Solenoid 'E' Electrical",
"P2726": "Pressure Control Solenoid 'E' Intermittent",
"P2727": "Pressure Control Solenoid 'E' Control Circuit / Open",
"P2728": "Pressure Control Solenoid 'E' Control Circuit Range/Performance",
"P2729": "Pressure Control Solenoid 'E' Control Circuit Low",
"P2730": "Pressure Control Solenoid 'E' Control Circuit High",
"P2731": "Pressure Control Solenoid 'F'",
"P2732": "Pressure Control Solenoid 'F' Performance or Stuck Off",
"P2733": "Pressure Control Solenoid 'F' Stuck On",
"P2734": "Pressure Control Solenoid 'F' Electrical",
"P2735": "Pressure Control Solenoid 'F' Intermittent",
"P2736": "Pressure Control Solenoid 'F' Control Circuit/Open",
"P2737": "Pressure Control Solenoid 'F' Control Circuit Range/Performance",
"P2738": "Pressure Control Solenoid 'F' Control Circuit Low",
"P2739": "Pressure Control Solenoid 'F' Control Circuit High",
"P2740": "Transmission Fluid Temperature Sensor 'B' Circuit",
"P2741": "Transmission Fluid Temperature Sensor 'B' Circuit Range Performance",
"P2742": "Transmission Fluid Temperature Sensor 'B' Circuit Low",
"P2743": "Transmission Fluid Temperature Sensor 'B' Circuit High",
"P2744": "Transmission Fluid Temperature Sensor 'B' Circuit Intermittent",
"P2745": "Intermediate Shaft Speed Sensor 'B' Circuit",
"P2746": "Intermediate Shaft Speed Sensor 'B' Circuit Range/Performance",
"P2747": "Intermediate Shaft Speed Sensor 'B' Circuit No Signal",
"P2748": "Intermediate Shaft Speed Sensor 'B' Circuit Intermittent",
"P2749": "Intermediate Shaft Speed Sensor 'C' Circuit",
"P2750": "Intermediate Shaft Speed Sensor 'C' Circuit Range/Performance",
"P2751": "Intermediate Shaft Speed Sensor 'C' Circuit No Signal",
"P2752": "Intermediate Shaft Speed Sensor 'C' Circuit Intermittent",
"P2753": "Transmission Fluid Cooler Control Circuit/Open",
"P2754": "Transmission Fluid Cooler Control Circuit Low",
"P2755": "Transmission Fluid Cooler Control Circuit High",
"P2756": "Torque Converter Clutch Pressure Control Solenoid",
"P2757": "Torque Converter Clutch Pressure Control Solenoid Control Circuit Performance or Stuck Off",
"P2758": "Torque Converter Clutch Pressure Control Solenoid Control Circuit Stuck On",
"P2759": "Torque Converter Clutch Pressure Control Solenoid Control Circuit Electrical",
"P2760": "Torque Converter Clutch Pressure Control Solenoid Control Circuit Intermittent",
"P2761": "Torque Converter Clutch Pressure Control Solenoid Control Circuit/Open",
"P2762": "Torque Converter Clutch Pressure Control Solenoid Control Circuit Range/Performance",
"P2763": "Torque Converter Clutch Pressure Control Solenoid Control Circuit High",
"P2764": "Torque Converter Clutch Pressure Control Solenoid Control Circuit Low",
"P2765": "Input/Turbine Speed Sensor 'B' Circuit",
"P2766": "Input/Turbine Speed Sensor 'B' Circuit Range/Performance",
"P2767": "Input/Turbine Speed Sensor 'B' Circuit No Signal",
"P2768": "Input/Turbine Speed Sensor 'B' Circuit Intermittent",
"P2769": "Torque Converter Clutch Circuit Low",
"P2770": "Torque Converter Clutch Circuit High",
"P2771": "Four Wheel Drive (4WD) Low Switch Circuit",
"P2772": "Four Wheel Drive (4WD) Low Switch Circuit Range/Performance",
"P2773": "Four Wheel Drive (4WD) Low Switch Circuit Low",
"P2774": "Four Wheel Drive (4WD) Low Switch Circuit High",
"P2775": "Upshift Switch Circuit Range/Performance",
"P2776": "Upshift Switch Circuit Low",
"P2777": "Upshift Switch Circuit High",
"P2778": "Upshift Switch Circuit Intermittent/Erratic",
"P2779": "Downshift Switch Circuit Range/Performance",
"P2780": "Downshift Switch Circuit Low",
"P2781": "Downshift Switch Circuit High",
"P2782": "Downshift Switch Circuit Intermittent/Erratic",
"P2783": "Torque Converter Temperature Too High",
"P2784": "Input/Turbine Speed Sensor 'A'/'B' Correlation",
"P2785": "Clutch Actuator Temperature Too High",
"P2786": "Gear Shift Actuator Temperature Too High",
"P2787": "Clutch Temperature Too High",
"P2788": "Auto Shift Manual Adaptive Learning at Limit",
"P2789": "Clutch Adaptive Learning at Limit",
"P2790": "Gate Select Direction Circuit",
"P2791": "Gate Select Direction Circuit Low",
"P2792": "Gate Select Direction Circuit High",
"P2793": "Gear Shift Direction Circuit",
"P2794": "Gear Shift Direction Circuit Low",
"P2795": "Gear Shift Direction Circuit High",
"P2A00": "O2 Sensor Circuit Range/Performance",
"P2A01": "O2 Sensor Circuit Range/Performance",
"P2A02": "O2 Sensor Circuit Range/Performance",
"P2A03": "O2 Sensor Circuit Range/Performance",
"P2A04": "O2 Sensor Circuit Range/Performance",
"P2A05": "O2 Sensor Circuit Range/Performance",
"P3400": "Cylinder Deactivation System",
"P3401": "Cylinder 1 Deactivation/lntake Valve Control Circuit/Open",
"P3402": "Cylinder 1 Deactivation/lntake Valve Control Performance",
"P3403": "Cylinder 1 Deactivation/lntake Valve Control Circuit Low",
"P3404": "Cylinder 1 Deactivation/lntake Valve Control Circuit High",
"P3405": "Cylinder 1 Exhaust Valve Control Circuit/Open",
"P3406": "Cylinder 1 Exhaust Valve Control Performance",
"P3407": "Cylinder 1 Exhaust Valve Control Circuit Low",
"P3408": "Cylinder 1 Exhaust Valve Control Circuit High",
"P3409": "Cylinder 2 Deactivation/lntake Valve Control Circuit/Open",
"P3410": "Cylinder 2 Deactivation/lntake Valve Control Performance",
"P3411": "Cylinder 2 Deactivation/lntake Valve Control Circuit Low",
"P3412": "Cylinder 2 Deactivation/lntake Valve Control Circuit High",
"P3413": "Cylinder 2 Exhaust Valve Control Circuit/Open",
"P3414": "Cylinder 2 Exhaust Valve Control Performance",
"P3415": "Cylinder 2 Exhaust Valve Control Circuit Low",
"P3416": "Cylinder 2 Exhaust Valve Control Circuit High",
"P3417": "Cylinder 3 Deactivation/lntake Valve Control Circuit/Open",
"P3418": "Cylinder 3 Deactivation/lntake Valve Control Performance",
"P3419": "Cylinder 3 Deactivation/lntake Valve Control Circuit Low",
"P3420": "Cylinder 3 Deactivation/lntake Valve Control Circuit High",
"P3421": "Cylinder 3 Exhaust Valve Control Circuit/Open",
"P3422": "Cylinder 3 Exhaust Valve Control Performance",
"P3423": "Cylinder 3 Exhaust Valve Control Circuit Low",
"P3424": "Cylinder 3 Exhaust Valve Control Circuit High",
"P3425": "Cylinder 4 Deactivation/lntake Valve Control Circuit/Open",
"P3426": "Cylinder 4 Deactivation/lntake Valve Control Performance",
"P3427": "Cylinder 4 Deactivation/lntake Valve Control Circuit Low",
"P3428": "Cylinder 4 Deactivation/lntake Valve Control Circuit High",
"P3429": "Cylinder 4 Exhaust Valve Control Circuit/Open",
"P3430": "Cylinder 4 Exhaust Valve Control Performance",
"P3431": "Cylinder 4 Exhaust Valve Control Circuit Low",
"P3432": "Cylinder 4 Exhaust Valve Control Circuit High",
"P3433": "Cylinder 5 Deactivation/lntake Valve Control Circuit/Open",
"P3434": "Cylinder 5 Deactivation/lntake Valve Control Performance",
"P3435": "Cylinder 5 Deactivation/lntake Valve Control Circuit Low",
"P3436": "Cylinder 5 Deactivation/lntake Valve Control Circuit High",
"P3437": "Cylinder 5 Exhaust Valve Control Circuit/Open",
"P3438": "Cylinder 5 Exhaust Valve Control Performance",
"P3439": "Cylinder 5 Exhaust Valve Control Circuit Low",
"P3440": "Cylinder 5 Exhaust Valve Control Circuit High",
"P3441": "Cylinder 6 Deactivation/lntake Valve Control Circuit/Open",
"P3442": "Cylinder 6 Deactivation/lntake Valve Control Performance",
"P3443": "Cylinder 6 Deactivation/lntake Valve Control Circuit Low",
"P3444": "Cylinder 6 Deactivation/lntake Valve Control Circuit High",
"P3445": "Cylinder 6 Exhaust Valve Control Circuit/Open",
"P3446": "Cylinder 6 Exhaust Valve Control Performance",
"P3447": "Cylinder 6 Exhaust Valve Control Circuit Low",
"P3448": "Cylinder 6 Exhaust Valve Control Circuit High",
"P3449": "Cylinder 7 Deactivation/lntake Valve Control Circuit/Open",
"P3450": "Cylinder 7 Deactivation/lntake Valve Control Performance",
"P3451": "Cylinder 7 Deactivation/lntake Valve Control Circuit Low",
"P3452": "Cylinder 7 Deactivation/lntake Valve Control Circuit High",
"P3453": "Cylinder 7 Exhaust Valve Control Circuit/Open",
"P3454": "Cylinder 7 Exhaust Valve Control Performance",
"P3455": "Cylinder 7 Exhaust Valve Control Circuit Low",
"P3456": "Cylinder 7 Exhaust Valve Control Circuit High",
"P3457": "Cylinder 8 Deactivation/lntake Valve Control Circuit/Open",
"P3458": "Cylinder 8 Deactivation/lntake Valve Control Performance",
"P3459": "Cylinder 8 Deactivation/lntake Valve Control Circuit Low",
"P3460": "Cylinder 8 Deactivation/lntake Valve Control Circuit High",
"P3461": "Cylinder 8 Exhaust Valve Control Circuit/Open",
"P3462": "Cylinder 8 Exhaust Valve Control Performance",
"P3463": "Cylinder 8 Exhaust Valve Control Circuit Low",
"P3464": "Cylinder 8 Exhaust Valve Control Circuit High",
"P3465": "Cylinder 9 Deactivation/lntake Valve Control Circuit/Open",
"P3466": "Cylinder 9 Deactivation/lntake Valve Control Performance",
"P3467": "Cylinder 9 Deactivation/lntake Valve Control Circuit Low",
"P3468": "Cylinder 9 Deactivation/lntake Valve Control Circuit High",
"P3469": "Cylinder 9 Exhaust Valve Control Circuit/Open",
"P3470": "Cylinder 9 Exhaust Valve Control Performance",
"P3471": "Cylinder 9 Exhaust Valve Control Circuit Low",
"P3472": "Cylinder 9 Exhaust Valve Control Circuit High",
"P3473": "Cylinder 10 Deactivation/lntake Valve Control Circuit/Open",
"P3474": "Cylinder 10 Deactivation/lntake Valve Control Performance",
"P3475": "Cylinder 10 Deactivation/lntake Valve Control Circuit Low",
"P3476": "Cylinder 10 Deactivation/lntake Valve Control Circuit High",
"P3477": "Cylinder 10 Exhaust Valve Control Circuit/Open",
"P3478": "Cylinder 10 Exhaust Valve Control Performance",
"P3479": "Cylinder 10 Exhaust Valve Control Circuit Low",
"P3480": "Cylinder 10 Exhaust Valve Control Circuit High",
"P3481": "Cylinder 11 Deactivation/lntake Valve Control Circuit/Open",
"P3482": "Cylinder 11 Deactivation/lntake Valve Control Performance",
"P3483": "Cylinder 11 Deactivation/lntake Valve Control Circuit Low",
"P3484": "Cylinder 11 Deactivation/lntake Valve Control Circuit High",
"P3485": "Cylinder 11 Exhaust Valve Control Circuit/Open",
"P3486": "Cylinder 11 Exhaust Valve Control Performance",
"P3487": "Cylinder 11 Exhaust Valve Control Circuit Low",
"P3488": "Cylinder 11 Exhaust Valve Control Circuit High",
"P3489": "Cylinder 12 Deactivation/lntake Valve Control Circuit/Open",
"P3490": "Cylinder 12 Deactivation/lntake Valve Control Performance",
"P3491": "Cylinder 12 Deactivation/lntake Valve Control Circuit Low",
"P3492": "Cylinder 12 Deactivation/lntake Valve Control Circuit High",
"P3493": "Cylinder 12 Exhaust Valve Control Circuit/Open",
"P3494": "Cylinder 12 Exhaust Valve Control Performance",
"P3495": "Cylinder 12 Exhaust Valve Control Circuit Low",
"P3496": "Cylinder 12 Exhaust Valve Control Circuit High",
"P3497": "Cylinder Deactivation System",
"U0001" : "High Speed CAN Communication Bus" ,
"U0002" : "High Speed CAN Communication Bus (Performance)" ,
"U0003" : "High Speed CAN Communication Bus (Open)" ,
"U0004" : "High Speed CAN Communication Bus (Low)" ,
"U0005" : "High Speed CAN Communication Bus (High)" ,
"U0006" : "High Speed CAN Communication Bus (Open)" ,
"U0007" : "High Speed CAN Communication Bus (Low)" ,
"U0008" : "High Speed CAN Communication Bus (High)" ,
"U0009" : "High Speed CAN Communication Bus (shorted to Bus)" ,
"U0010" : "Medium Speed CAN Communication Bus" ,
"U0011" : "Medium Speed CAN Communication Bus (Performance)" ,
"U0012" : "Medium Speed CAN Communication Bus (Open)" ,
"U0013" : "Medium Speed CAN Communication Bus (Low)" ,
"U0014" : "Medium Speed CAN Communication Bus (High)" ,
"U0015" : "Medium Speed CAN Communication Bus (Open)" ,
"U0016" : "Medium Speed CAN Communication Bus (Low)" ,
"U0017" : "Medium Speed CAN Communication Bus (High)" ,
"U0018" : "Medium Speed CAN Communication Bus (shorted to Bus)" ,
"U0019" : "Low Speed CAN Communication Bus" ,
"U0020" : "Low Speed CAN Communication Bus (Performance)" ,
"U0021" : "Low Speed CAN Communication Bus (Open)" ,
"U0022" : "Low Speed CAN Communication Bus (Low)" ,
"U0023" : "Low Speed CAN Communication Bus (High)" ,
"U0024" : "Low Speed CAN Communication Bus (Open)" ,
"U0025" : "Low Speed CAN Communication Bus (Low)" ,
"U0026" : "Low Speed CAN Communication Bus (High)" ,
"U0027" : "Low Speed CAN Communication Bus (shorted to Bus)" ,
"U0028" : "Vehicle Communication Bus A" ,
"U0029" : "Vehicle Communication Bus A (Performance)" ,
"U0030" : "Vehicle Communication Bus A (Open)" ,
"U0031" : "Vehicle Communication Bus A (Low)" ,
"U0032" : "Vehicle Communication Bus A (High)" ,
"U0033" : "Vehicle Communication Bus A (Open)" ,
"U0034" : "Vehicle Communication Bus A (Low)" ,
"U0035" : "Vehicle Communication Bus A (High)" ,
"U0036" : "Vehicle Communication Bus A (shorted to Bus A)" ,
"U0037" : "Vehicle Communication Bus B" ,
"U0038" : "Vehicle Communication Bus B (Performance)" ,
"U0039" : "Vehicle Communication Bus B (Open)" ,
"U0040" : "Vehicle Communication Bus B (Low)" ,
"U0041" : "Vehicle Communication Bus B (High)" ,
"U0042" : "Vehicle Communication Bus B (Open)" ,
"U0043" : "Vehicle Communication Bus B (Low)" ,
"U0044" : "Vehicle Communication Bus B (High)" ,
"U0045" : "Vehicle Communication Bus B (shorted to Bus B)" ,
"U0046" : "Vehicle Communication Bus C" ,
"U0047" : "Vehicle Communication Bus C (Performance)" ,
"U0048" : "Vehicle Communication Bus C (Open)" ,
"U0049" : "Vehicle Communication Bus C (Low)" ,
"U0050" : "Vehicle Communication Bus C (High)" ,
"U0051" : "Vehicle Communication Bus C (Open)" ,
"U0052" : "Vehicle Communication Bus C (Low)" ,
"U0053" : "Vehicle Communication Bus C (High)" ,
"U0054" : "Vehicle Communication Bus C (shorted to Bus C)" ,
"U0055" : "Vehicle Communication Bus D" ,
"U0056" : "Vehicle Communication Bus D (Performance)" ,
"U0057" : "Vehicle Communication Bus D (Open)" ,
"U0058" : "Vehicle Communication Bus D (Low)" ,
"U0059" : "Vehicle Communication Bus D (High)" ,
"U0060" : "Vehicle Communication Bus D (Open)" ,
"U0061" : "Vehicle Communication Bus D (Low)" ,
"U0062" : "Vehicle Communication Bus D (High)" ,
"U0063" : "Vehicle Communication Bus D (shorted to Bus D)" ,
"U0064" : "Vehicle Communication Bus E" ,
"U0065" : "Vehicle Communication Bus E (Performance)" ,
"U0066" : "Vehicle Communication Bus E (Open)" ,
"U0067" : "Vehicle Communication Bus E (Low)" ,
"U0068" : "Vehicle Communication Bus E (High)" ,
"U0069" : "Vehicle Communication Bus E (Open)" ,
"U0070" : "Vehicle Communication Bus E (Low)" ,
"U0071" : "Vehicle Communication Bus E (High)" ,
"U0072" : "Vehicle Communication Bus E (shorted to Bus E)" ,
"U0073" : "Control Module Communication Bus Off" ,
"U0074" : "Reserved by J2012" ,
"U0075" : "Reserved by J2012" ,
"U0076" : "Reserved by J2012" ,
"U0077" : "Reserved by J2012" ,
"U0078" : "Reserved by J2012" ,
"U0079" : "Reserved by J2012" ,
"U0080" : "Reserved by J2012" ,
"U0081" : "Reserved by J2012" ,
"U0082" : "Reserved by J2012" ,
"U0083" : "Reserved by J2012" ,
"U0084" : "Reserved by J2012" ,
"U0085" : "Reserved by J2012" ,
"U0086" : "Reserved by J2012" ,
"U0087" : "Reserved by J2012" ,
"U0088" : "Reserved by J2012" ,
"U0089" : "Reserved by J2012" ,
"U0090" : "Reserved by J2012" ,
"U0091" : "Reserved by J2012" ,
"U0092" : "Reserved by J2012" ,
"U0093" : "Reserved by J2012" ,
"U0094" : "Reserved by J2012" ,
"U0095" : "Reserved by J2012" ,
"U0096" : "Reserved by J2012" ,
"U0097" : "Reserved by J2012" ,
"U0098" : "Reserved by J2012" ,
"U0099" : "Reserved by J2012" ,
"U0100" : "Lost Communication With ECM/PCM A" ,
"U0101" : "Lost Communication with TCM" ,
"U0102" : "Lost Communication with Transfer Case Control Module" ,
"U0103" : "Lost Communication With Gear Shift Module" ,
"U0104" : "Lost Communication With Cruise Control Module" ,
"U0105" : "Lost Communication With Fuel Injector Control Module" ,
"U0106" : "Lost Communication With Glow Plug Control Module" ,
"U0107" : "Lost Communication With Throttle Actuator Control Module" ,
"U0108" : "Lost Communication With Alternative Fuel Control Module" ,
"U0109" : "Lost Communication With Fuel Pump Control Module" ,
"U0110" : "Lost Communication With Drive Motor Control Module" ,
"U0111" : "Lost Communication With Battery Energy Control Module 'A'" ,
"U0112" : "Lost Communication With Battery Energy Control Module 'B'" ,
"U0113" : "Lost Communication With Emissions Critical Control Information" ,
"U0114" : "Lost Communication With Four-Wheel Drive Clutch Control Module" ,
"U0115" : "Lost Communication With ECM/PCM B" ,
"U0116" : "Reserved by J2012" ,
"U0117" : "Reserved by J2012" ,
"U0118" : "Reserved by J2012" ,
"U0119" : "Reserved by J2012" ,
"U0120" : "Reserved by J2012" ,
"U0121" : "Lost Communication With Anti-Lock Brake System (ABS) Control Module" ,
"U0122" : "Lost Communication With Vehicle Dynamics Control Module" ,
"U0123" : "Lost Communication With Yaw Rate Sensor Module" ,
"U0124" : "Lost Communication With Lateral Acceleration Sensor Module" ,
"U0125" : "Lost Communication With Multi-axis Acceleration Sensor Module" ,
"U0126" : "Lost Communication With Steering Angle Sensor Module" ,
"U0127" : "Lost Communication With Tire Pressure Monitor Module" ,
"U0128" : "Lost Communication With Park Brake Control Module" ,
"U0129" : "Lost Communication With Brake System Control Module" ,
"U0130" : "Lost Communication With Steering Effort Control Module" ,
"U0131" : "Lost Communication With Power Steering Control Module" ,
"U0132" : "Lost Communication With Ride Level Control Module" ,
"U0133" : "Reserved by J2012" ,
"U0134" : "Reserved by J2012" ,
"U0135" : "Reserved by J2012" ,
"U0136" : "Reserved by J2012" ,
"U0137" : "Reserved by J2012" ,
"U0138" : "Reserved by J2012" ,
"U0139" : "Reserved by J2012" ,
"U0140" : "Lost Communication With Body Control Module" ,
"U0141" : "Lost Communication With Body Control Module 'A'" ,
"U0142" : "Lost Communication With Body Control Module 'B'" ,
"U0143" : "Lost Communication With Body Control Module 'C'" ,
"U0144" : "Lost Communication With Body Control Module 'D'" ,
"U0145" : "Lost Communication With Body Control Module 'E'" ,
"U0146" : "Lost Communication With Gateway 'A'" ,
"U0147" : "Lost Communication With Gateway 'B'" ,
"U0148" : "Lost Communication With Gateway 'C'" ,
"U0149" : "Lost Communication With Gateway 'D'" ,
"U0150" : "Lost Communication With Gateway 'E'" ,
"U0151" : "Lost Communication With Restraints Control Module" ,
"U0152" : "Lost Communication With Side Restraints Control Module Left" ,
"U0153" : "Lost Communication With Side Restraints Control Module Right" ,
"U0154" : "Lost Communication With Restraints Occupant Sensing Control Module" ,
"U0155" : "Lost Communication With Instrument Panel Cluster (IPC) Control Module" ,
"U0156" : "Lost Communication With Information Center 'A'" ,
"U0157" : "Lost Communication With Information Center 'B'" ,
"U0158" : "Lost Communication With Head Up Display" ,
"U0159" : "Lost Communication With Parking Assist Control Module" ,
"U0160" : "Lost Communication With Audible Alert Control Module" ,
"U0161" : "Lost Communication With Compass Module" ,
"U0162" : "Lost Communication With Navigation Display Module" ,
"U0163" : "Lost Communication With Navigation Control Module" ,
"U0164" : "Lost Communication With HVAC Control Module" ,
"U0165" : "Lost Communication With HVAC Control Module Rear" ,
"U0166" : "Lost Communication With Auxiliary Heater Control Module" ,
"U0167" : "Lost Communication With Vehicle Immobilizer Control Module" ,
"U0168" : "Lost Communication With Vehicle Security Control Module" ,
"U0169" : "Lost Communication With Sunroof Control Module" ,
"U0170" : "Lost Communication With 'Restraints System Sensor A'" ,
"U0171" : "Lost Communication With 'Restraints System Sensor B'" ,
"U0172" : "Lost Communication With 'Restraints System Sensor C'" ,
"U0173" : "Lost Communication With 'Restraints System Sensor D'" ,
"U0174" : "Lost Communication With 'Restraints System Sensor E'" ,
"U0175" : "Lost Communication With 'Restraints System Sensor F'" ,
"U0176" : "Lost Communication With 'Restraints System Sensor G'" ,
"U0177" : "Lost Communication With 'Restraints System Sensor H'" ,
"U0178" : "Lost Communication With 'Restraints System Sensor I'" ,
"U0179" : "Lost Communication With 'Restraints System Sensor J'" ,
"U0180" : "Lost Communication With Automatic Lighting Control Module" ,
"U0181" : "Lost Communication With Headlamp Leveling Control Module" ,
"U0182" : "Lost Communication With Lighting Control Module Front" ,
"U0183" : "Lost Communication With Lighting Control Module Rear" ,
"U0184" : "Lost Communication With Radio" ,
"U0185" : "Lost Communication With Antenna Control Module" ,
"U0186" : "Lost Communication With Audio Amplifier" ,
"U0187" : "Lost Communication With Digital Disc Player/Changer Module 'A'" ,
"U0188" : "Lost Communication With Digital Disc Player/Changer Module 'B'" ,
"U0189" : "Lost Communication With Digital Disc Player/Changer Module 'C'" ,
"U0190" : "Lost Communication With Digital Disc Player/Changer Module 'D'" ,
"U0191" : "Lost Communication With Television" ,
"U0192" : "Lost Communication With Personal Computer" ,
"U0193" : "Lost Communication With 'Digital Audio Control Module A'" ,
"U0194" : "Lost Communication With 'Digital Audio Control Module B'" ,
"U0195" : "Lost Communication With Subscription Entertainment Receiver Module" ,
"U0196" : "Lost Communication With Rear Seat Entertainment Control Module" ,
"U0197" : "Lost Communication With Telephone Control Module" ,
"U0198" : "Lost Communication With Telematic Control Module" ,
"U0199" : "Lost Communication With 'Door Control Module A'" ,
"U0200" : "Lost Communication With 'Door Control Module B'" ,
"U0201" : "Lost Communication With 'Door Control Module C'" ,
"U0202" : "Lost Communication With 'Door Control Module D'" ,
"U0203" : "Lost Communication With 'Door Control Module E'" ,
"U0204" : "Lost Communication With 'Door Control Module F'" ,
"U0205" : "Lost Communication With 'Door Control Module G'" ,
"U0206" : "Lost Communication With Folding Top Control Module" ,
"U0207" : "Lost Communication With Moveable Roof Control Module" ,
"U0208" : "Lost Communication With 'Seat Control Module A'" ,
"U0209" : "Lost Communication With 'Seat Control Module B'" ,
"U0210" : "Lost Communication With 'Seat Control Module C'" ,
"U0211" : "Lost Communication With 'Seat Control Module D'" ,
"U0212" : "Lost Communication With Steering Column Control Module" ,
"U0213" : "Lost Communication With Mirror Control Module" ,
"U0214" : "Lost Communication With Remote Function Actuation" ,
"U0215" : "Lost Communication With 'Door Switch A'" ,
"U0216" : "Lost Communication With 'Door Switch B'" ,
"U0217" : "Lost Communication With 'Door Switch C'" ,
"U0218" : "Lost Communication With 'Door Switch D'" ,
"U0219" : "Lost Communication With 'Door Switch E'" ,
"U0220" : "Lost Communication With 'Door Switch F'" ,
"U0221" : "Lost Communication With 'Door Switch G'" ,
"U0222" : "Lost Communication With 'Door Window Motor A'" ,
"U0223" : "Lost Communication With 'Door Window Motor B'" ,
"U0224" : "Lost Communication With 'Door Window Motor C'" ,
"U0225" : "Lost Communication With 'Door Window Motor D'" ,
"U0226" : "Lost Communication With 'Door Window Motor E'" ,
"U0227" : "Lost Communication With 'Door Window Motor F'" ,
"U0228" : "Lost Communication With 'Door Window Motor G'" ,
"U0229" : "Lost Communication With Heated Steering Wheel Module" ,
"U0230" : "Lost Communication With Rear Gate Module" ,
"U0231" : "Lost Communication With Rain Sensing Module" ,
"U0232" : "Lost Communication With Side Obstacle Detection Control Module Left" ,
"U0233" : "Lost Communication With Side Obstacle Detection Control Module Right" ,
"U0234" : "Lost Communication With Convenience Recall Module" ,
"U0235" : "Lost Communication With Cruise Control Front Distance Range Sensor" ,
"U0300" : "Internal Control Module Software Incompatibility" ,
"U0301" : "Software Incompatibility with ECM/PCM" ,
"U0302" : "Software Incompatibility with Transmission Control Module" ,
"U0303" : "Software Incompatibility with Transfer Case Control Module" ,
"U0304" : "Software Incompatibility with Gear Shift Control Module" ,
"U0305" : "Software Incompatibility with Cruise Control Module" ,
"U0306" : "Software Incompatibility with Fuel Injector Control Module" ,
"U0307" : "Software Incompatibility with Glow Plug Control Module" ,
"U0308" : "Software Incompatibility with Throttle Actuator Control Module" ,
"U0309" : "Software Incompatibility with Alternative Fuel Control Module" ,
"U0310" : "Software Incompatibility with Fuel Pump Control Module" ,
"U0311" : "Software Incompatibility with Drive Motor Control Module" ,
"U0312" : "Software Incompatibility with Battery Energy Control Module A" ,
"U0313" : "Software Incompatibility with Battery Energy Control Module B" ,
"U0314" : "Software Incompatibility with Four-Wheel Drive Clutch Control Module" ,
"U0315" : "Software Incompatibility with Anti-Lock Brake System Control Module" ,
"U0316" : "Software Incompatibility with Vehicle Dynamics Control Module" ,
"U0317" : "Software Incompatibility with Park Brake Control Module" ,
"U0318" : "Software Incompatibility with Brake System Control Module" ,
"U0319" : "Software Incompatibility with Steering Effort Control Module" ,
"U0320" : "Software Incompatibility with Power Steering Control Module" ,
"U0321" : "Software Incompatibility with Ride Level Control Module" ,
"U0322" : "Software Incompatibility with Body Control Module" ,
"U0323" : "Software Incompatibility with Instrument Panel Control Module" ,
"U0324" : "Software Incompatibility with HVAC Control Module" ,
"U0325" : "Software Incompatibility with Auxiliary Heater Control Module" ,
"U0326" : "Software Incompatibility with Vehicle Immobilizer Control Module" ,
"U0327" : "Software Incompatibility with Vehicle Security Control Module" ,
"U0328" : "Software Incompatibility with Steering Angle Sensor Module" ,
"U0329" : "Software Incompatibility with Steering Column Control Module" ,
"U0330" : "Software Incompatibility with Tire Pressure Monitor Module" ,
"U0331" : "Software Incompatibility with Body Control Module 'A'" ,
"U0400" : "Invalid Data Received" ,
"U0401" : "Invalid Data Received From ECM/PCM" ,
"U0402" : "Invalid Data Received From Transmission Control Module" ,
"U0403" : "Invalid Data Received From Transfer Case Control Module" ,
"U0404" : "Invalid Data Received From Gear Shift Control Module" ,
"U0405" : "Invalid Data Received From Cruise Control Module" ,
"U0406" : "Invalid Data Received From Fuel Injector Control Module" ,
"U0407" : "Invalid Data Received From Glow Plug Control Module" ,
"U0408" : "Invalid Data Received From Throttle Actuator Control Module" ,
"U0409" : "Invalid Data Received From Alternative Fuel Control Module" ,
"U0410" : "Invalid Data Received From Fuel Pump Control Module" ,
"U0411" : "Invalid Data Received From Drive Motor Control Module" ,
"U0412" : "Invalid Data Received From Battery Energy Control Module A" ,
"U0413" : "Invalid Data Received From Battery Energy Control Module B" ,
"U0414" : "Invalid Data Received From Four-Wheel Drive Clutch Control Module" ,
"U0415" : "Invalid Data Received From Anti-Lock Brake System Control Module" ,
"U0416" : "Invalid Data Received From Vehicle Dynamics Control Module" ,
"U0417" : "Invalid Data Received From Park Brake Control Module" ,
"U0418" : "Invalid Data Received From Brake System Control Module" ,
"U0419" : "Invalid Data Received From Steering Effort Control Module" ,
"U0420" : "Invalid Data Received From Power Steering Control Module" ,
"U0421" : "Invalid Data Received From Ride Level Control Module" ,
"U0422" : "Invalid Data Received From Body Control Module" ,
"U0423" : "Invalid Data Received From Instrument Panel Control Module" ,
"U0424" : "Invalid Data Received From HVAC Control Module" ,
"U0425" : "Invalid Data Received From Auxiliary Heater Control Module" ,
"U0426" : "Invalid Data Received From Vehicle Immobilizer Control Module" ,
"U0427" : "Invalid Data Received From Vehicle Security Control Module" ,
"U0428" : "Invalid Data Received From Steering Angle Sensor Module" ,
"U0429" : "Invalid Data Received From Steering Column Control Module" ,
"U0430" : "Invalid Data Received From Tire Pressure Monitor Module" ,
"U0431" : "Invalid Data Received From Body Control Module 'A'"
}
pcode_classes = {
"P00XX": "Fuel and Air Metering and Auxiliary Emission Controls",
"P01XX": "Fuel and Air Merering",
"P02XX": "Fuel and Air Merering",
"P03XX": "Ignition System or Misfire",
"P04XX": "Auxiliary Emission Controls",
"P05XX": "Vehicle Speed, Idle Control, and Auxiliary Inputs",
"P06XX": "Computer and Auxiliary Outputs",
"P07XX": "Transmission",
"P08XX": "Transmission",
"P09XX": "Transmission",
"P0AXX": "Hybrid Propulsion",
"P10XX": "Manufacturer Controlled Fuel and Air Metering and Auxiliary Emission Controls",
"P11XX": "Manufacturer Controlled Fuel and Air Merering",
"P12XX": "Fuel and Air Merering",
"P13XX": "Ignition System or Misfire",
"P14XX": "Auxiliary Emission Controls",
"P15XX": "Vehicle Speed, Idle Control, and Auxiliary Inputs",
"P16XX": "Computer and Auxiliary Outputs",
"P17XX": "Transmission",
"P18XX": "Transmission",
"P19XX": "Transmission",
}
ptest= [
"DTCs:",
"MIL:",
#A
"Misfire:",
"Fuel system:",
"Components:",
#B,D
"Catalyst:",
"Heated Catalyst:",
"Evaporative system:",
"Secondary Air System:",
"A/C Refrigerant:" ,
"Oxygen Sensor:",
"Oxygen Sensor Heater:",
"EGR SystemC7:" ,
]
| gpl-2.0 |
enikki/otp | lib/asn1/test/asn1_SUITE_data/CommonDataTypes.py | 97 | 84084 | CommonDataTypes DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
-- @prop dataType
-- @descr This types only purpose is to avoid OSS compiler warning : Duplicate PDU tag
-- @
CommonDataTypeWrapper ::= CHOICE
{
wrapAddAnalysisRejectReason AddAnalysisRejectReason,
wrapAddServiceToServiceProfileRejectReason AddServiceToServiceProfileRejectReason,
wrapAddUserIdentifiersRejectReason AddUserIdentifiersRejectReason,
wrapAdmissionRejectReason AdmissionRejectReason,
wrapAlertingUUIE AlertingUUIE,
wrapAllocateTransmissionPathRejectReason AllocateTransmissionPathRejectReason,
wrapAnalyseRejectReason AnalyseRejectReason,
wrapAvailabilityOfEquipment AvailabilityOfEquipment,
wrapBandwidth Bandwidth,
wrapBandwidthReducedInformation BandwidthReducedInformation,
wrapBandwidthReducedReason BandwidthReducedReason,
wrapBandwidthRejectReason BandwidthRejectReason,
wrapBasicCallCategories BasicCallCategories,
wrapBearerCapability BearerCapability,
wrapCallInformation CallInformation,
wrapCallModel CallModel,
wrapCallProceedingUUIE CallProceedingUUIE,
wrapCallReference CallReference,
wrapCallServices CallServices,
wrapCallState CallState,
wrapCallType CallType,
wrapCause Cause,
wrapCauseValue CauseValue,
wrapChangeServiceAndStatusRejectReason ChangeServiceAndStatusRejectReason,
wrapCheckServiceRejectReason CheckServiceRejectReason,
wrapCoding Coding,
wrapConferenceGoal ConferenceGoal,
wrapConferenceIdentifier ConferenceIdentifier,
wrapConnectTransmissionPathRejectReason ConnectTransmissionPathRejectReason,
wrapConnectUUIE ConnectUUIE,
wrapConnectionData ConnectionData,
wrapConnectionIdentifier ConnectionIdentifier,
wrapConnectionInformation ConnectionInformation,
wrapConnectionInformationOriginatingSide ConnectionInformationOriginatingSide,
wrapConnectionInformationTerminatingSide ConnectionInformationTerminatingSide,
wrapConnectionType ConnectionType,
wrapCreateEquipmentRepresentationRejectReason CreateEquipmentRepresentationRejectReason,
wrapCreateServiceAndStatusRejectReason CreateServiceAndStatusRejectReason,
wrapCreateServiceIdentifierRejectReason CreateServiceIdentifierRejectReason,
wrapDeallocateTransmissionPathRejectReason DeallocateTransmissionPathRejectReason,
wrapDetailedReasonAtom DetailedReasonAtom,
wrapDiagnostics Diagnostics,
wrapDisconnectTransmissionPathRejectReason DisconnectTransmissionPathRejectReason,
wrapDisengageReason DisengageReason,
wrapDisengageRejectReason DisengageRejectReason,
wrapDisplay Display,
wrapE164Identifier E164Identifier,
wrapEndToEndEndpointInformationServiceCallAcknowledge EndToEndEndpointInformationServiceCallAcknowledge,
wrapEndToEndEndpointInformationServiceCallActive EndToEndEndpointInformationServiceCallActive,
wrapEndToEndEndpointInformationServiceCallProgress EndToEndEndpointInformationServiceCallProgress,
wrapEndToEndEndpointInformationServiceCallSetup EndToEndEndpointInformationServiceCallSetup,
wrapEndToEndEndpointInformationServiceCallTermination EndToEndEndpointInformationServiceCallTermination,
wrapEndpointIdentifier EndpointIdentifier,
wrapEndpointRegistrationCategories EndpointRegistrationCategories,
wrapEndpointRegistrationRejectReason EndpointRegistrationRejectReason,
wrapEndpointType EndpointType,
wrapEndpointUnregistrationCategories EndpointUnregistrationCategories,
wrapEndpointUnregistrationRejectReason EndpointUnregistrationRejectReason,
wrapEquipmentAddressAN EquipmentAddressAN,
wrapEquipmentAddressLAN EquipmentAddressLAN,
wrapEquipmentRelatedInformation EquipmentRelatedInformation,
wrapEquipmentRelatedInformationIdentifier EquipmentRelatedInformationIdentifier,
wrapFacilityReason FacilityReason,
wrapFacilityUUIE FacilityUUIE,
wrapGatekeeperIdentifier GatekeeperIdentifier,
wrapGatekeeperInformation GatekeeperInformation,
wrapGatekeeperRejectReason GatekeeperRejectReason,
wrapGatewayInformation GatewayInformation,
wrapGetAnalysisRejectReason GetAnalysisRejectReason,
wrapGetEquipmentInformationRejectReason GetEquipmentInformationRejectReason,
wrapGetLANDataRejectReason GetLANDataRejectReason,
wrapGetPartyInformationRejectReason GetPartyInformationRejectReason,
wrapGetRejectReasonUser GetRejectReasonUser,
wrapGetServiceFromServiceProfileRejectReason GetServiceFromServiceProfileRejectReason,
wrapGetServiceProfileRejectReason GetServiceProfileRejectReason,
wrapGetServicesAndStatusRejectReason GetServicesAndStatusRejectReason,
wrapGetUserServiceInformationAndStatusRejectReason GetUserServiceInformationAndStatusRejectReason,
wrapH221NonStandard H221NonStandard,
wrapH310Information H310Information,
wrapH320Information H320Information,
wrapH321Information H321Information,
wrapH322Information H322Information,
wrapH323Information H323Information,
wrapH323InterfaceAddCallReferenceRejectReason H323InterfaceAddCallReferenceRejectReason,
wrapH323InterfaceAddCallRelatedDataRejectReason H323InterfaceAddCallRelatedDataRejectReason,
wrapH323InterfaceAddFixedTransportAddressDataRejectReason H323InterfaceAddFixedTransportAddressDataRejectReason,
wrapH323InterfaceAddKeysAndSetAttributesRejectReason H323InterfaceAddKeysAndSetAttributesRejectReason,
wrapH323InterfaceAdditionalKeys H323InterfaceAdditionalKeys,
wrapH323InterfaceAllocateResourceRejectReason H323InterfaceAllocateResourceRejectReason,
wrapH323InterfaceChangeKeysAndRelationsToUsersReject H323InterfaceChangeKeysAndRelationsToUsersReject,
wrapH323InterfaceCommonAttribute H323InterfaceCommonAttribute,
wrapH323InterfaceCommonAttributeIdentifier H323InterfaceCommonAttributeIdentifier,
wrapH323InterfaceCreateCallReferenceRejectReason H323InterfaceCreateCallReferenceRejectReason,
wrapH323InterfaceCreateRejectReason H323InterfaceCreateRejectReason,
wrapH323InterfaceDeallocateResourceRejectReason H323InterfaceDeallocateResourceRejectReason,
wrapH323InterfaceGetFixedTransportAddressDataRejectReason H323InterfaceGetFixedTransportAddressDataRejectReason,
wrapH323InterfaceGetOrRemoveCallRelatedDataRejectReason H323InterfaceGetOrRemoveCallRelatedDataRejectReason,
wrapH323InterfaceGetOrSetCommonRejectReason H323InterfaceGetOrSetCommonRejectReason,
wrapH323InterfaceGetOrSetInstanceRejectReason H323InterfaceGetOrSetInstanceRejectReason,
wrapH323InterfaceInstanceAttribute H323InterfaceInstanceAttribute,
wrapH323InterfaceInstanceAttributeIdentifier H323InterfaceInstanceAttributeIdentifier,
wrapH323InterfaceKey H323InterfaceKey,
wrapH323InterfaceKeyEndpointIdentifier H323InterfaceKeyEndpointIdentifier,
wrapH323InterfaceReduceBandwidthRejectReason H323InterfaceReduceBandwidthRejectReason,
wrapH323InterfaceRemoveCallReferenceRejectReason H323InterfaceRemoveCallReferenceRejectReason,
wrapH323InterfaceRemoveFixedTransportAddressDataRejectReason H323InterfaceRemoveFixedTransportAddressDataRejectReason,
wrapH323InterfaceRemoveKeysAndSetAttributesRejectReason H323InterfaceRemoveKeysAndSetAttributesRejectReason,
wrapH323InterfaceRemoveRejectReason H323InterfaceRemoveRejectReason,
wrapH324Information H324Information,
wrapHighLayerCompatibility HighLayerCompatibility,
wrapInterfaceRegistrationInformation InterfaceRegistrationInformation,
wrapLANAttribute LANAttribute,
wrapLANAttributeIdentifier LANAttributeIdentifier,
wrapLayer1ProtUserInfo Layer1ProtUserInfo,
wrapLocation Location,
wrapLocationRejectReason LocationRejectReason,
wrapLogicalConnectionPointIdentifier LogicalConnectionPointIdentifier,
wrapLowLayerCompatibility LowLayerCompatibility,
wrapMaximumNumberOfAllowedConnections MaximumNumberOfAllowedConnections,
wrapMaximumTotalBandwidth MaximumTotalBandwidth,
wrapMcuInformation McuInformation,
wrapNonStandardIdentifier NonStandardIdentifier,
wrapNonStandardMessage NonStandardMessage,
wrapNonStandardParameter NonStandardParameter,
wrapNumber Number,
wrapNumberOfTimesLANWasCrowded NumberOfTimesLANWasCrowded,
wrapNumberType NumberType,
wrapNumberingPlan NumberingPlan,
wrapObjectIdentifier ObjectIdentifier,
wrapPhysicalConnectionPointIdentifier PhysicalConnectionPointIdentifier,
wrapPid Pid,
wrapPreStringToRemoveInDestinationAddress PreStringToRemoveInDestinationAddress,
wrapProgressIndicator ProgressIndicator,
wrapProtocolIdentifier ProtocolIdentifier,
wrapQ931Timer301Value Q931Timer301Value,
wrapQ931Timer303Value Q931Timer303Value,
wrapQ954Details Q954Details,
wrapQseriesOptions QseriesOptions,
wrapRASMessageTimerValue RASMessageTimerValue,
wrapRTPSession RTPSession,
wrapRegistrationRejectReason RegistrationRejectReason,
wrapRegistrationStatus RegistrationStatus,
wrapRelationToEquipment RelationToEquipment,
wrapRelationToUser RelationToUser,
wrapReleaseCompleteReason ReleaseCompleteReason,
wrapReleaseCompleteUUIE ReleaseCompleteUUIE,
wrapReleaseInformation ReleaseInformation,
wrapRemoveAnalysisRejectReason RemoveAnalysisRejectReason,
wrapRemoveEquipmentRepresentationRejectReason RemoveEquipmentRepresentationRejectReason,
wrapRemoveServiceAndStatusRejectReason RemoveServiceAndStatusRejectReason,
wrapRemoveServiceFromServiceProfileRejectReason RemoveServiceFromServiceProfileRejectReason,
wrapRemoveServiceIdentifierRejectReason RemoveServiceIdentifierRejectReason,
wrapRepeatIndicator RepeatIndicator,
wrapRequestSeqNum RequestSeqNum,
wrapRequestedUserAndLinkedUserAreIdentical RequestedUserAndLinkedUserAreIdentical,
wrapServiceAndStatus ServiceAndStatus,
wrapServiceCallSetupRejectionInformation ServiceCallSetupRejectionInformation,
wrapServiceCallSetupRejectionReason ServiceCallSetupRejectionReason,
wrapServiceCallTerminationInformation ServiceCallTerminationInformation,
wrapServiceCallTerminationReason ServiceCallTerminationReason,
wrapServiceData ServiceData,
wrapServiceIdentifier ServiceIdentifier,
wrapServiceProfile ServiceProfile,
wrapSetEquipmentStatusRejectReason SetEquipmentStatusRejectReason,
wrapSetLANDataRejectReason SetLANDataRejectReason,
wrapSetUserAttributeData SetUserAttributeData,
wrapSetupUUIE SetupUUIE,
wrapStateOfEquipment StateOfEquipment,
wrapStateOfUser StateOfUser,
wrapStatusOfService StatusOfService,
wrapSubaddress Subaddress,
wrapSubaddressInformation SubaddressInformation,
wrapSubaddressType SubaddressType,
wrapSupportedProtocols SupportedProtocols,
wrapT120Information T120Information,
wrapTerminalInformation TerminalInformation,
wrapTerminationInitiatior TerminationInitiatior,
wrapTimeSlot TimeSlot,
wrapTransferCapability TransferCapability,
wrapTransferRate TransferRate,
wrapTransportAddress TransportAddress,
wrapTransportAddressInformation TransportAddressInformation,
wrapTransportChannelInformation TransportChannelInformation,
wrapTypeOfEquipment TypeOfEquipment,
wrapTypeOfFlowControl TypeOfFlowControl,
wrapTypeOfLAN TypeOfLAN,
wrapTypeOfRegistration TypeOfRegistration,
wrapTypeOfService TypeOfService,
wrapTypeOfUser TypeOfUser,
wrapUnknownMessageResponse UnknownMessageResponse,
wrapUnregistrationRejectReason UnregistrationRejectReason,
wrapUserAllocateResourceRejectReason UserAllocateResourceRejectReason,
wrapUserAttributeData UserAttributeData,
wrapUserAttributeIdentifier UserAttributeIdentifier,
wrapUserCreateRejectReason UserCreateRejectReason,
wrapUserDeallocateResourceRejectReason UserDeallocateResourceRejectReason,
wrapUserIdentifier UserIdentifier,
wrapUserIdentifierInformation UserIdentifierInformation,
wrapUserInformation UserInformation,
wrapUserInformationUUIE UserInformationUUIE,
wrapUserKey UserKey,
wrapUserOrEquipmentRelatedInformation UserOrEquipmentRelatedInformation,
wrapUserOrEquipmentRelatedInformationIdentifier UserOrEquipmentRelatedInformationIdentifier,
wrapUserRelatedInformation UserRelatedInformation,
wrapUserRelatedInformationIdentifier UserRelatedInformationIdentifier,
wrapUserRemoveRejectReason UserRemoveRejectReason,
wrapUserSetRejectReason UserSetRejectReason,
wrapUserSpecificInformation UserSpecificInformation,
wrapVendorIdentifier VendorIdentifier,
wrapVoiceInformation VoiceInformation,
...
}
-- ---------------------------------
--
-- AddAnalysisRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AddAnalysisRejectReason ::= CHOICE
{
analysisTableEntryAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AddServiceToServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AddServiceToServiceProfileRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AddUserIdentifiersRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AddUserIdentifiersRejectReason ::= CHOICE
{
userIdentifierExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AdmissionRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
AdmissionRejectReason ::= CHOICE
{
calledPartyNotRegistered NULL,
invalidPermission NULL,
requestDenied NULL,
undefinedReason NULL,
callerNotRegistered NULL,
routeCallToGatekeeper NULL,
invalidEndpointIdentifier NULL,
resourceUnavailable NULL,
...
}
-- ---------------------------------
--
-- AlertingUUIE
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
AlertingUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
destinationEndpointType EndpointType, -- destinationInfo
destinationH245Address TransportAddress OPTIONAL, -- h245Address
...
}
-- ---------------------------------
--
-- AllocateTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
AllocateTransmissionPathRejectReason ::= CHOICE
{
calledUserNotAvailable NULL,
calledUserUnknown NULL,
permissionDenied NULL,
resourcesNotAvailable NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AnalyseRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AnalyseRejectReason ::= CHOICE
{
noMatchingEntryFound NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AvailabilityOfEquipment
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
AvailabilityOfEquipment ::= CHOICE
{
available NULL,
notAvailable NULL,
...
}
-- ---------------------------------
--
-- Bandwidth
--
-- @prop dataType
--
-- @descr States the bandwidth to be used in 100 bps.
--
-- @
--
-- ---------------------------------
Bandwidth ::= INTEGER ( 1.. 4294967295 )
-- ---------------------------------
--
-- BandwidthReducedInformation
--
-- @prop dataType
--
-- @descr States information related to the recuction of the bandwidth.
--
-- @
--
-- ---------------------------------
BandwidthReducedInformation ::= SEQUENCE
{
allocatedBandwidth Bandwidth,
bandwidthReducedReason BandwidthReducedReason,
...
}
-- ---------------------------------
--
-- BandwidthReducedReason
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
BandwidthReducedReason ::= CHOICE
{
bandwidthLimited NULL,
bandwidthAdaptedToOriginatingEndpoint NULL,
originBandwidthBarredDueToCategories NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- BandwidthRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
BandwidthRejectReason ::= CHOICE
{
notBound NULL,
invalidConferenceID NULL,
invalidPermission NULL,
insufficientResources NULL,
invalidRevision NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- BasicCallCategories
--
-- @prop dataType
--
-- @descr Categories for the service basic call.
--
-- @
-- ---------------------------------
BasicCallCategories ::= SEQUENCE
{
... -- So far, no specific categories identified
}
-- ---------------------------------
--
-- BearerCapability
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
BearerCapability ::= SEQUENCE
{
transferCapability TransferCapability,
transferRate TransferRate,
layer1ProtUserInfo Layer1ProtUserInfo,
rateMultiplier INTEGER (0..127),
...
}
-- ---------------------------------
--
-- CallInformation
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
callReference CallReference, -- callReferenceValue
conferenceID ConferenceIdentifier,
originator BOOLEAN OPTIONAL,
audio SEQUENCE OF RTPSession OPTIONAL,
video SEQUENCE OF RTPSession OPTIONAL,
data SEQUENCE OF TransportChannelInformation OPTIONAL,
h245 TransportChannelInformation,
callSignaling TransportChannelInformation,
callType CallType,
bandwidth Bandwidth, -- bandWidth
callModel CallModel,
...
}
-- ---------------------------------
--
-- CallModel
--
-- @prop dataType
--
-- @descr Type of callmodel used i.e routed via gatekeeper or not
--
-- @
--
-- ---------------------------------
CallModel ::= CHOICE
{
gatekeeperRouted NULL,
direct NULL,
...
}
-- ---------------------------------
--
-- CallProceedingUUIE
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallProceedingUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
destinationEndpointType EndpointType, -- destinationInfo
destinationH245Address TransportAddress OPTIONAL, -- h245Address
...
}
-- ---------------------------------
--
-- PreStringToRemoveInDestinationAddress
--
-- @prop dataType
--
-- @descr states the call reference that identifies a specific call.
-- Origin: H.225.0 CallReferenceValue.
--
-- @
--
-- ---------------------------------
CallReference ::= INTEGER (0..65535)
-- ---------------------------------
--
-- CallServices
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallServices ::= SEQUENCE
{
q932Full BOOLEAN,
q951Full BOOLEAN,
q952Full BOOLEAN,
q953Full BOOLEAN,
q955Full BOOLEAN,
q956Full BOOLEAN,
q957Full BOOLEAN,
q954Info Q954Details,
...
}
-- ---------------------------------
--
-- CallType
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallState ::= CHOICE
{
null NULL,
callInit NULL,
overlapSending NULL,
outgoingCallProceeding NULL,
callDelivered NULL,
callPresent NULL,
callReceived NULL,
connectRequest NULL,
incomingCallProceeding NULL,
active NULL,
disconnectRequest NULL,
disconnectIndication NULL,
releaseRequest NULL,
facilityRequest NULL,
overlapReceiving NULL,
restartRequest NULL,
restart NULL,
...
}
-- ---------------------------------
--
-- CallType
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallType ::= CHOICE
{
pointToPoint NULL,
oneToN NULL,
nToOne NULL,
nToN NULL,
...
}
-- ---------------------------------
--
-- Cause
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Cause ::= SEQUENCE
{
coding Coding,
location Location,
value CauseValue,
diagnostics Diagnostics,
...
}
-- ---------------------------------
--
-- CauseValue
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
CauseValue ::= CHOICE
{
unassignedNumber NULL, -- 1
noRouteToSpecifiedTransitNetwork NULL, -- 2
noRouteToDestination NULL, -- 3
channelUnacceptable NULL, -- 6
normalClearing NULL, -- 16
userBusy NULL, -- 17
noUserResponding NULL, -- 18
noAnswereFromUser NULL, -- 19
portableNotAvailable NULL, -- 20
callRejected NULL, -- 21
numberChanged NULL, -- 22
destinationOutOfOrder NULL, -- 27
invalidNumberFormat NULL, -- 28
facilityRequestRejected NULL, -- 29
responseToStatusEnquiry NULL, -- 30
normalUnspecified NULL, -- 31
noCircuitChannelAvailable NULL, -- 34
networkOutOfOrder NULL, -- 38
temporaryFailure NULL, -- 41
switchingEquipmentCongestion NULL, -- 42
accessInformationDiscarded NULL, -- 43
requestedCircuitChannelNotAvailable NULL, -- 44
resourceUnavailableUnspecified NULL, -- 47
qualityOfServiceUnavailable NULL, -- 49
notSubscribedToRequestedFacility NULL, -- 50
bearerCapabilityNotAuthorized NULL, -- 57
bearerCapabilityNotPresentlyAvailable NULL, -- 58
serviceOrOptionNotAvailableUnspecified NULL, -- 63, 79
bearerCapabilityNotImplemented NULL, -- 65
channelTypeNotImplemented NULL, -- 66
requestedFacilityNotImplemented NULL, -- 69
onlyRestrictedDigitalInformationBcIsAvailable NULL, -- 70
invalidCallReferenceValue NULL, -- 81
incompatibleDestination NULL, -- 88
invalidTransitNetworkSelection NULL, -- 91
invalidMessageUnspecified NULL, -- 95
mandatoryInformationElementIsMissing NULL, -- 96
messageTypeNonexistingOrNotimplemented NULL, -- 97
messageNotCompatibleOrImplemented NULL, -- 98
informationElementNonExisting NULL, -- 99
invalidInformationElementContents NULL, -- 100
messageNotCompatibleWithCallState NULL, -- 101
recoveryOnTimerExpiry NULL, -- 102
protocolErrorUnspecified NULL, -- 111
interworkingUnspecified NULL, -- 127
...
}
-- ---------------------------------
--
-- ChangeServiceAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
ChangeServiceAndStatusRejectReason ::= CHOICE
{
identifierOfServiceNotKnown NULL,
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- CheckServiceRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
CheckServiceRejectReason ::= CHOICE
{
deniedDueToInteraction NULL,
deniedDueToCategories NULL,
undefined NULL,
userNotKnown NULL,
...
}
-- ---------------------------------
--
-- Coding
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Coding ::= CHOICE
{
ccitt NULL,
ecma NULL,
national NULL,
network NULL,
...
}
-- ---------------------------------
--
-- ConferenceGoal
--
-- @prop dataType
--
-- @descr Type of call setup desire
--
-- @
--
-- ---------------------------------
ConferenceGoal ::= CHOICE
{
create NULL,
join NULL,
invite NULL,
...
}
-- ---------------------------------
--
-- ConferenceIdentifier
--
-- @prop dataType
--
--
--
-- @
--
-- ---------------------------------
ConferenceIdentifier ::= OCTET STRING (SIZE (16))
-- ---------------------------------
--
-- ConnectTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
ConnectTransmissionPathRejectReason ::= CHOICE
{
resourcesNotAllocated NULL,
switchFailure NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ConnectUUIE
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
ConnectUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
destinationH245Address TransportAddress OPTIONAL, -- h245Address
destinationEndpointType EndpointType, -- destinationInfo
conferenceIdentifier ConferenceIdentifier, -- conferenceID
...
}
-- ---------------------------------
--
-- ConnectionData
--
-- @prop dataType
--
-- @descr This parameter holds connection data that are specific for
-- certain types of Equipments.
-- @
--
-- ---------------------------------
ConnectionData ::= CHOICE
{
timeSlotInformation SEQUENCE OF TimeSlot,
...
}
-- ---------------------------------
--
-- ConnectionIdentifier
--
-- @prop dataType
--
-- @descr Identifier to the connection handler instance.
--
-- @
--
-- ---------------------------------
ConnectionIdentifier ::= ObjectIdentifier
-- ---------------------------------
--
-- ConnectionInformation
--
-- @prop dataType
--
-- @descr This parameter specifies information that are of interest for
-- the functionallity handled by component Connection Handler.
-- @
--
-- ---------------------------------
ConnectionInformation ::= SEQUENCE
{
logicalConnectionPointIdentifier LogicalConnectionPointIdentifier,
connectionData ConnectionData OPTIONAL,
...
}
-- ---------------------------------
--
-- ConnectionInformationOriginatingSide
--
-- @prop dataType
--
-- @descr Contains connection information that shall be used for the originating side of the connection.
--
-- @
--
-- ---------------------------------
ConnectionInformationOriginatingSide ::= SEQUENCE
{
bandwidth Bandwidth,
callType CallType,
originatorConnectionInformation ConnectionInformation,
terminatorConnectionInformation ConnectionInformation,
...
}
-- ---------------------------------
--
-- ConnectionInformationTerminatingSide
--
-- @prop dataType
--
-- @descr Contains connection information that shall be used for the terminating side of the connection.
--
-- @
--
-- ---------------------------------
ConnectionInformationTerminatingSide ::= SEQUENCE
{
connectionIdentifier ConnectionIdentifier,
originatorConnectionInformation ConnectionInformation,
...
}
-- ---------------------------------
--
-- ConnectionType
--
-- @prop dataType
--
-- @descr States the type of connection.
--
-- @
--
-- ---------------------------------
ConnectionType ::= CHOICE
{
pointToPoint NULL,
oneToN NULL,
nToOne NULL,
nToN NULL,
...
}
-- ---------------------------------
--
-- CreateEquipmentRepresentationRejectReason
--
-- @prop dataType
--
-- @descr This reason for rejection.
--
-- @
--
-- ---------------------------------
CreateEquipmentRepresentationRejectReason ::= CHOICE
{
equipmentRepresentationAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- CreateServiceAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
CreateServiceAndStatusRejectReason ::= CHOICE
{
undefined NULL,
...
}
-- ---------------------------------
--
-- CreateServiceIdentifierRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
CreateServiceIdentifierRejectReason ::= CHOICE
{
keyNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- DeallocateTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
DeallocateTransmissionPathRejectReason ::= CHOICE
{
resourcesNotAllocated NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- DetailedReasonAtom
--
-- @prop dataType
-- @descr This data type indicates the release information of a forced drop
-- during a call.
-- @
--
-- ---------------------------------
DetailedReasonAtom ::= CHOICE
{
internalDataMissmatch NULL,
destinationUserIdentifierNotKnown NULL,
rejectedDueToCategories NULL,
rejectedDueToResources NULL,
failedToOpenDestinationCallSignallingPort NULL,
theRequestedServiceIsNotSupported NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- Diagnostics
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Diagnostics ::= INTEGER(1..127)
-- ---------------------------------
--
-- DisconnectTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
DisconnectTransmissionPathRejectReason ::= CHOICE
{
resourcesNotAllocated NULL,
switchFailure NULL,
switchNotConnected NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- DisengageReason
--
-- @prop dataType
-- @descr the reason why a change was requested by the gatekeeper or the terminal.
-- @
-- ---------------------------------
DisengageReason ::= CHOICE
{
forcedDrop NULL,
normalDrop NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- DisengageRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
DisengageRejectReason ::= CHOICE
{
notRegistered NULL,
requestToDropOther NULL,
...
}
-- ---------------------------------
--
-- Display
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Display ::= OCTET STRING (SIZE(1..82))
-- ---------------------------------
--
-- E164Identifier
--
-- @prop dataType
--
-- @descr Identifier for the user identifier of the type E.164.
--
-- @
--
-- ---------------------------------
E164Identifier ::= IA5String (SIZE (1..128)) (FROM ("0123456789#*,"))
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallAcknowledge
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallAcknowledge ::= SEQUENCE
{
bearerCapability BearerCapability OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323AcknowledgeInformation AlertingUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallActive
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallActive ::= SEQUENCE
{
bearerCapability BearerCapability OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
lowLayerCompatibility LowLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323ActiveInformation ConnectUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallProgress
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallProgress ::=SEQUENCE
{
cause Cause OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallSetup
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallSetup ::=SEQUENCE
{
bearerCapability BearerCapability OPTIONAL,
calledNumber Number OPTIONAL,
calledSubaddress Subaddress OPTIONAL,
callingNumber Number OPTIONAL,
callingSubaddress Subaddress OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
lowLayerCompatibility LowLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
repeatIndicator RepeatIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323SetupInformation SetupUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallTermination
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallTermination ::=SEQUENCE
{
cause Cause OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323TerminationInformation ReleaseCompleteUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndpointIdentifier
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
EndpointIdentifier ::= BMPString (SIZE(1..128)) -- change from SIZE(128)
-- ---------------------------------
--
-- EndpointRegistrationCategories
--
-- @prop dataType
--
-- @descr Categories for the service endpoint registration.
--
-- @
-- ---------------------------------
EndpointRegistrationCategories ::= SEQUENCE
{
... -- So far, no specific categories identified
}
-- ---------------------------------
--
-- EndpointRegistrationRejectReason
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
EndpointRegistrationRejectReason ::= CHOICE
{
attemptToChangeEndpoint NULL,
requestedUserNotKnown NULL,
endpointTypeNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- EndpointType
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
EndpointType ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
vendor VendorIdentifier OPTIONAL,
gatekeeper GatekeeperInformation OPTIONAL,
gateway GatewayInformation OPTIONAL,
mcu McuInformation OPTIONAL,
terminal TerminalInformation OPTIONAL,
mc BOOLEAN,
undefinedNode BOOLEAN,
...
}
-- ---------------------------------
--
-- EndpointUnregistrationCategories
--
-- @prop dataType
--
-- @descr Categories for the service endpoint unregistration.
--
-- @
-- ---------------------------------
EndpointUnregistrationCategories ::= SEQUENCE
{
... -- So far, no specific categories identified
}
-- ---------------------------------
--
-- EndpointUnregistrationRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
EndpointUnregistrationRejectReason ::= CHOICE
{
permissionDenied NULL,
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- EquipmentAddressAN
--
-- @prop dataType
--
-- @descr States the address for a certain equipment connected
-- to the Access Node.
-- @
--
-- ---------------------------------
EquipmentAddressAN ::= SEQUENCE
{
--TBD by SEA,
...
}
-- ---------------------------------
--
-- EquipmentAddressLAN
--
-- @prop dataType
--
-- @descr States the transport address for a certain equipment
--
-- @
--
-- ---------------------------------
EquipmentAddressLAN ::= SEQUENCE
{
transportAddresses SEQUENCE OF TransportAddress,
...
}
-- ---------------------------------
--
-- EquipmentRelatedInformation
--
-- @prop dataType
--
-- @descr Contains the retreived data.
--
-- @
--
-- ---------------------------------
EquipmentRelatedInformation ::= CHOICE
{
logicalConnectionPointIdentifier LogicalConnectionPointIdentifier,
registrationStatus RegistrationStatus,
stateOfEquipment StateOfEquipment,
typeOfEquipment TypeOfEquipment,
...
}
-- ---------------------------------
--
-- EquipmentRelatedInformationIdentifier
--
--
-- @prop dataType
--
-- @descr This parameter specifies different types of data
-- that are specific to a certain equipment.
--
-- @
-- ---------------------------------
EquipmentRelatedInformationIdentifier ::= CHOICE
{
logicalConnectionPointIdentifier NULL,
registrationStatus NULL,
stateOfEquipment NULL,
typeOfEquipment NULL,
...
}
-- ---------------------------------
--
-- FacilityReason
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
FacilityReason ::= CHOICE
{
routeCallToGatekeeper NULL,
callForwarded NULL,
routeCallToMC NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- FacilityUUIE
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
FacilityUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
alternativeH245Address TransportAddress OPTIONAL, -- alternativeAddress
alternativeUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- alternativeAliasAddress
conferenceIdentifier ConferenceIdentifier OPTIONAL, -- conferenceID
facilityReason FacilityReason, -- reason
...
}
-- ---------------------------------
--
-- GatekeeperIdentifier
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatekeeperIdentifier ::= BMPString (SIZE(1..128))
-- ---------------------------------
--
-- GatekeeperInformation
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatekeeperInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- GatekeeperRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatekeeperRejectReason ::= CHOICE
{
resourceUnavailable NULL,
terminalExcluded NULL,
invalidRevision NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- GatewayInformation
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatewayInformation ::= SEQUENCE
{
protocol SEQUENCE OF SupportedProtocols OPTIONAL,
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- GetAnalysisRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
GetAnalysisRejectReason ::= CHOICE
{
noDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetEquipmentInformationRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetEquipmentInformationRejectReason ::= CHOICE
{
equipmentUnknown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetLANDataRejectReason
--
-- @prop dataType
--
-- @descr This reason for rejection.
--
-- @
--
-- ---------------------------------
GetLANDataRejectReason ::= CHOICE
{
noDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetPartyInformationRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetPartyInformationRejectReason ::= CHOICE
{
noEquipmentAvailable NULL,
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetRejectReasonUser
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
GetRejectReasonUser ::= CHOICE
{
keyNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetServiceFromServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
GetServiceFromServiceProfileRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceDoNotExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GetServiceProfileRejectReason ::= CHOICE
{
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetServicesAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetServicesAndStatusRejectReason ::= CHOICE
{
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetUserServiceInformationAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetUserServiceInformationAndStatusRejectReason ::= CHOICE
{
undefined NULL,
userNotKnown NULL,
...
}
-- ---------------------------------
--
-- H221NonStandard
-- @prop dataType
--
-- @descr Gives non standard information about the standard protocol H.221.
-- @
--
-- ---------------------------------
H221NonStandard ::= SEQUENCE
{ t35CountryCode INTEGER(0..255),
t35Extension INTEGER(0..255),
manufacturerCode INTEGER(0..65535),
...
}
-- ---------------------------------
--
-- H310Information
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.310.
-- @
--
-- ---------------------------------
H310Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H320Information
-- @prop dataType
--
-- @descr Gives detailed information about the standard protocol H.320.
-- @
--
-- ---------------------------------
H320Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H321Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.321.
-- @
--
-- ---------------------------------
H321Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H322Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.322.
-- @
--
-- ---------------------------------
H322Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H323Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.323.
-- @
--
-- ---------------------------------
H323Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddCallReferenceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAddCallReferenceRejectReason ::= CHOICE
{
keyNotValid NULL,
requestedCallReferenceAlreadyInUse NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddCallRelatedDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAddCallRelatedDataRejectReason ::= CHOICE
{
callReferenceNotValid NULL,
keyNotValid NULL,
callRelatedDataAlredyStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddFixedTransportAddressDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAddFixedTransportAddressDataRejectReason ::= CHOICE
{
fixedTransportAddressDataAlredyStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddKeysAndSetAttributesRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceAddKeysAndSetAttributesRejectReason ::= CHOICE
{
existingKeyNotValid NULL,
newKeyAlreadyExists NULL,
newKeyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAdditionalKeys
--
-- @prop dataType
-- @descr Additional keys for an instance of the type H.323Interface.
-- @
-- ---------------------------------
H323InterfaceAdditionalKeys ::= SEQUENCE
{
endpointCallSignallingAddresses SEQUENCE OF TransportAddress,
endpointRASAddresses SEQUENCE OF TransportAddress,
...
}
-- ---------------------------------
--
-- H323InterfaceAllocateResourceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAllocateResourceRejectReason ::= CHOICE
{
callReferenceNotValid NULL,
keyNotValid NULL,
resourceNotAvailable NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceChangeKeysAndRelationsToUsersReject
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceChangeKeysAndRelationsToUsersReject ::= CHOICE
{
firstKeyNotValid NULL,
secondKeyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceCommonAttribute
--
-- @prop dataType
--
-- @descr This parameter contains the attributes which holds data
-- that are common for all objects of the type H.323Interface.
--
-- @
--
-- ---------------------------------
H323InterfaceCommonAttribute ::= CHOICE
{
gatekeeperCallSignallingAddressData CHOICE
{
gatekeeperCallSignallingAddresses SEQUENCE OF TransportAddress,
undefined NULL,
...
},
gatekeeperRASAddressInformation CHOICE
{
gatekeeperRASAddressData SEQUENCE
{
multicastRASAddress TransportAddress,
gatekeeperRASAddress TransportAddress,
...
},
undefined NULL,
...
},
q931Timer301Value Q931Timer301Value,
q931Timer303Value Q931Timer303Value,
rasMessageTimerValue RASMessageTimerValue,
...
}
-- ---------------------------------
--
-- H323InterfaceCommonAttributeIdentifier
--
-- @prop dataType
--
-- @descr This parameter contains the attribute identifiers of the
-- attributes which holds data that are common for all objects
-- of the type H.323Interface.
--
-- @
--
-- ---------------------------------
H323InterfaceCommonAttributeIdentifier ::= CHOICE
{
gatekeeperCallSignallingAddresses NULL,
gatekeeperRASAddress NULL,
q931Timer301Value NULL,
q931Timer303Value NULL,
rasMessageTimerValue NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceCreateCallReferenceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceCreateCallReferenceRejectReason ::= CHOICE
{
keyNotValid NULL,
noCallReferenceAvailable NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceCreateRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceCreateRejectReason ::= CHOICE
{
keyAlreadyInUse NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceDeallocateResourceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceDeallocateResourceRejectReason ::= CHOICE
{
resourceNotAllocated NULL,
callReferenceNotValid NULL,
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetFixedTransportAddressDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceGetFixedTransportAddressDataRejectReason ::= CHOICE
{
noDataStoredForThisTransportAddress NULL,
noFixedTransportAddressDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetOrRemoveCallRelatedDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceGetOrRemoveCallRelatedDataRejectReason ::= CHOICE
{
callReferenceNotValid NULL,
keyNotValid NULL,
noCallRelatedDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetOrSetCommonRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceGetOrSetCommonRejectReason ::= CHOICE
{
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetOrSetInstanceRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceGetOrSetInstanceRejectReason ::= CHOICE
{
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceInstanceAttribute
--
-- @prop dataType
--
-- @descr This parameter contains the attributes which holds data
-- that are specific for a h323Interface object.
--
-- @
--
-- ---------------------------------
H323InterfaceInstanceAttribute ::= CHOICE
{
endpointCallSignallingAddresses SEQUENCE OF TransportAddress,
endpointRasAddresses SEQUENCE OF TransportAddress,
registrationStatus RegistrationStatus,
gatekeeperCallSignallingAddress TransportAddress,
maximumTotalBandwidthForInterface Bandwidth,
preStringsToRemoveInDestinationAddress SEQUENCE OF PreStringToRemoveInDestinationAddress,
relationToH2250CallSignalling Pid,
relationToUser RelationToUser,
typeOfEquipment TypeOfEquipment,
...
}
-- ---------------------------------
--
-- H323InterfaceInstanceAttributeIdentifier
--
-- @prop dataType
--
-- @descr This parameter contains the attribute identifiers of the
-- attributes which holds data that are specific for a
-- h323Interface object.
--
-- @
--
-- ---------------------------------
H323InterfaceInstanceAttributeIdentifier ::= CHOICE
{
endpointCallSignallingAddresses NULL,
endpointRASAddresses NULL,
registrationStatus NULL,
gatekeeperCallSignallingAddress NULL,
maximumTotalBandwidthForInterface NULL,
preStringsToRemoveInDestinationAddress NULL,
relationToH2250CallSignalling NULL,
relationToUser NULL,
typeOfEquipment NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceKey
--
-- @prop dataType
-- @descr Allowed keys for an instance of the type H.323Interface.
-- @
-- ---------------------------------
H323InterfaceKey ::= CHOICE
{
endpointIdentifier EndpointIdentifier,
endpointCallSignallingAddresses SEQUENCE OF TransportAddress,
endpointRASAddresses SEQUENCE OF TransportAddress,
...
}
-- ---------------------------------
--
-- H323InterfaceKeyEndpointIdentifier
--
-- @descr Allowed keys for an instance of the type H.323Interface.
--
-- ---------------------------------
H323InterfaceKeyEndpointIdentifier ::= SEQUENCE
{
endpointIdentifier EndpointIdentifier,
...
}
-- ---------------------------------
--
-- H323InterfaceReduceBandwidthRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceReduceBandwidthRejectReason ::= CHOICE
{
bandwidthNotAllocated NULL,
callReferenceNotValid NULL,
keyNotValid NULL,
newBandwidthHigherThanAllocatedBandwidth NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveCallReferenceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceRemoveCallReferenceRejectReason ::= CHOICE
{
callReferenceNotStored NULL,
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveFixedTransportAddressDataRejectReason
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceRemoveFixedTransportAddressDataRejectReason ::= CHOICE
{
noDataStoredForThisTransportAddress NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveKeysAndSetAttributesRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceRemoveKeysAndSetAttributesRejectReason ::= CHOICE
{
keysNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceRemoveRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceInProgress NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H324Information
-- @prop dataType
--
-- @descr Gives detailed information about the standard protocol H.324.
-- @
--
-- ---------------------------------
H324Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
HighLayerCompatibility ::= SEQUENCE
{
...
}
-- ---------------------------------
--
-- InterfaceRegistrationInformation
-- @prop dataType
--
-- @descr This parameter specifies the current registration status of an
-- endpoints registration request.
-- @
--
-- ---------------------------------
InterfaceRegistrationInformation ::= SEQUENCE
{
isInterfaceRegistered BOOLEAN,
relationToH323User EndpointIdentifier OPTIONAL,
...
}
-- ---------------------------------
--
-- LANAttribute
--
-- @prop dataType
-- @descr This parameter contains a LAN attribute value.
-- @
--
-- ---------------------------------
LANAttribute ::= CHOICE
{
maximumTotalBandwidth MaximumTotalBandwidth,
maximumNumberOfAllowedConnections MaximumNumberOfAllowedConnections,
numberOfTimesLANWasCrowded NumberOfTimesLANWasCrowded,
typeOfFlowControl TypeOfFlowControl,
typeOfLAN TypeOfLAN,
...
}
-- ---------------------------------
--
-- LANAttributeIdentifier
--
-- @prop dataType
-- @descr This parameter contains a LAN attribute identifier.
-- @
--
-- ---------------------------------
LANAttributeIdentifier ::= CHOICE
{
maximumTotalBandwidth NULL,
maximumNumberOfAllowedConnections NULL,
numberOfTimesLANWasCrowded NULL,
typeOfFlowControl NULL,
typeOfLAN NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
Layer1ProtUserInfo ::= CHOICE
{
g711u-law NULL,
g711a-law NULL,
h323VidephoneCall NULL, -- the meaning of "5" in H323
h221Andh242 NULL, -- the meaning of "5" in Q931
...
}-- @prop dataType
-- @descr Origin: Q931
-- @
Location ::= CHOICE
{
user NULL,
localPrivateNetwork NULL,
localPublicNetwork NULL,
transitNetwork NULL,
remotePublicNetwork NULL,
remotePrivateNetwork NULL,
internationalNetwork NULL,
beyondInterworkingPoint NULL,
...
}
-- ---------------------------------
--
-- LocationRejectReason
-- @prop dataType
--
-- @descr
-- @
-- ---------------------------------
LocationRejectReason ::= CHOICE
{
notRegistered NULL,
invalidPermission NULL,
requestDenied NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- LogicalConnectionPointIdentifier
--
-- @prop dataType
-- @descr Identifier of the logical connection point.
-- @
--
-- ---------------------------------
LogicalConnectionPointIdentifier ::= INTEGER (0..65535)
--
-- Created by :
-- Creation date :
-- Modified by :
-- Modification date :
-- Version :
--
-- @prop dataType
-- @descr origin Q931
-- @
LowLayerCompatibility ::= SEQUENCE
{
}
-- ---------------------------------
--
-- MaximumNumberOfAllowedConnections
--
-- @prop dataType
-- @descr States the maximum number of allowed connections.
-- @
--
-- ---------------------------------
MaximumNumberOfAllowedConnections ::= CHOICE
{
maximumNumberOfAllowedConnectionsValue INTEGER ( 0.. 999999999),
undefined NULL,
...
}
-- ---------------------------------
--
-- MaximumTotalBandwidth
-- @prop dataType
-- @descr States the maximum total bandwidth.
-- @
-- ---------------------------------
MaximumTotalBandwidth ::= CHOICE
{
maximumTotalBandwidthValue Bandwidth,
undefined NULL,
...
}
-- ---------------------------------
--
-- McuInformation
-- @prop dataType
--
-- @descr Gives detailed information about the endpoint type, MCU.
-- @
-- ---------------------------------
McuInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- NonStandardIdentifier
-- @prop dataType
--
-- @descr
-- @
--
-- ---------------------------------
NonStandardIdentifier ::= CHOICE
{
object OBJECT IDENTIFIER,
h221NonStandard H221NonStandard,
...
}
-- ---------------------------------
--
-- NonStandardMessage
-- @prop dataType
--
-- @descr
-- @
--
-- ---------------------------------
NonStandardMessage ::= SEQUENCE
{
requestSeqNum RequestSeqNum,
nonStandardData NonStandardParameter,
...
}
-- ---------------------------------
--
-- NonStandardParameter
--
-- @prop dataType
-- @
-- ---------------------------------
NonStandardParameter ::= SEQUENCE
{
nonStandardIdentifier NonStandardIdentifier,
data OCTET STRING,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
Number ::= SEQUENCE
{
type NumberType,
indicator NumberingPlan,
number IA5String (FROM ("0123456789#*")),
...
}
-- ---------------------------------
--
-- NumberOfTimesLANWasCrowded
-- @prop dataType
--
-- @descr States the number of times the Lan has been crowded,
-- i.e. the endpoints has released the initiated call due to
-- heavy load in the LAN.
-- @
--
-- ---------------------------------
NumberOfTimesLANWasCrowded ::= CHOICE
{
numberOfTimesLANWasCrowdedValue INTEGER ( 0.. 999999999),
undefined NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
NumberType ::= CHOICE
{
unknown NULL,
international NULL,
national NULL,
network NULL,
local NULL,
abbreviated NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
NumberingPlan ::= CHOICE
{
unknown NULL,
e164 NULL,
data NULL,
telex NULL,
national NULL,
private NULL,
...
}
-- ---------------------------------
--
-- ObjectIdentifier
--
-- @prop dataType
-- @descr An identifier to a certain instance of an object.
-- @
--
-- ---------------------------------
ObjectIdentifier ::= OCTET STRING
-- ---------------------------------
--
-- PhysicalConnectionPointIdentifier
--
-- @prop dataType
-- @descr Contains data that identifies a specific equipment instance.
-- @
--
-- ---------------------------------
PhysicalConnectionPointIdentifier ::= CHOICE
{
equipmentAN EquipmentAddressAN, -- Equipment connected to the Access Node.
equipmentLAN EquipmentAddressLAN, -- Equipment connected to the LAN.
...
}
-- ---------------------------------
--
-- Pid
-- @prop dataType
--
-- @descr A process identifier.
-- @
--
-- ---------------------------------
Pid ::= ObjectIdentifier
-- ---------------------------------
--
-- PreStringToRemoveInDestinationAddress
--
-- @prop dataType
--
-- @descr A pre-string that shall be removed when sending the destination address.
--
-- @
--
-- ---------------------------------
PreStringToRemoveInDestinationAddress ::= CHOICE
{
e164 IA5String (SIZE (1..128)) (FROM ("0123456789,")),
h323 BMPString (SIZE (1..256)),
-- h323 is Basic ISO/IEC 10646-1 (Unicode)
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
ProgressIndicator ::= SEQUENCE
{
}
-- ---------------------------------
--
-- ProtocolIdentifier
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
ProtocolIdentifier ::= OBJECT IDENTIFIER
-- ---------------------------------
--
-- Q931Timer301Value
-- @prop dataType
--
-- @descr States the Q931 timer 301 value to be used in milli seconds.
-- @
--
-- ---------------------------------
Q931Timer301Value ::= INTEGER ( 180000.. 360000 )
-- ---------------------------------
--
-- Q931Timer303Value
--
-- @prop dataType
-- @descr States the Q931 timer 303 value to be used in milli seconds.
-- @
--
-- ---------------------------------
Q931Timer303Value ::= INTEGER ( 1000.. 10000 )
-- ---------------------------------
--
-- Q954Details
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
Q954Details ::= SEQUENCE
{
conferenceCalling BOOLEAN,
threePartyService BOOLEAN,
...
}
-- ---------------------------------
--
-- QseriesOptions
--
-- @prop dataType
-- @
--
-- ---------------------------------
QseriesOptions ::=SEQUENCE
{
q932Full BOOLEAN,
q951Full BOOLEAN,
q952Full BOOLEAN,
q953Full BOOLEAN,
q955Full BOOLEAN,
q956Full BOOLEAN,
q957Full BOOLEAN,
q954Info Q954Details,
...
}
-- ---------------------------------
--
-- RASMessageTimerValue
--
-- @prop dataType
-- @descr States the RAS message timer value to be used in milli seconds.
-- @
--
-- ---------------------------------
RASMessageTimerValue ::= INTEGER ( 1000.. 10000 )
-- ---------------------------------
--
-- RTPSession
--
-- @prop dataType
-- @
--
-- ---------------------------------
RTPSession ::= SEQUENCE
{
rtpAddress TransportChannelInformation,
rtcpAddress TransportChannelInformation,
cname PrintableString,
ssrc INTEGER (1.. 134217727), -- change from 4294967295 for erl 4.2
sessionId INTEGER (1..255),
associatedSessionIds SEQUENCE OF INTEGER (1..255),
...
}
-- ---------------------------------
--
-- RegistrationRejectReason
--
-- @prop dataType
-- @descr Specifies the registration reject reason that are valid
-- in the H.225.0 message RegistartionReject
-- @ --
-- ---------------------------------
RegistrationRejectReason ::= CHOICE
{
discoveryRequired NULL,
invalidRevision NULL,
invalidCallSignalAddress NULL,
invalidRasAddress NULL,
duplicateAlias UserIdentifierInformation,
invalidTerminalType NULL,
undefinedReason NULL,
transportNotSupported NULL,
...
}
-- ---------------------------------
--
-- RegistrationStatus
--
-- @prop dataType
-- @
--
-- ---------------------------------
RegistrationStatus ::= CHOICE
{
notRegistered NULL,
registered NULL,
...
}
-- ---------------------------------
--
-- RelationToEquipment
--
-- @prop dataType
-- @descr Relation to the architecture component Equipment.
-- @
--
-- ---------------------------------
RelationToEquipment ::= SEQUENCE
{
relationToUser RelationToUser,
typeOfEquipment TypeOfEquipment,
...
}
-- ---------------------------------
--
-- RelationToUser
--
-- @prop dataType
-- @descr Relation to the architecture component User.
-- @
--
-- ---------------------------------
RelationToUser ::= BMPString (SIZE(1..128))
-- ---------------------------------
--
-- ReleaseCompleteReason
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
ReleaseCompleteReason ::= CHOICE
{
noBandwidth NULL,
gatekeeperResources NULL,
unreachableDestination NULL,
destinationRejection NULL,
invalidRevision NULL,
noPermission NULL,
unreachableGatekeeper NULL,
gatewayResources NULL,
badFormatAddress NULL,
adaptiveBusy NULL,
inConf NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- ReleaseCompleteUUIE
-- @prop dataType
--
-- @
-- ---------------------------------
ReleaseCompleteUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
releaseCompleteReason ReleaseCompleteReason OPTIONAL, -- reason
...
}
-- ---------------------------------
--
-- ReleaseInformation
--
-- @prop dataType
-- @descr This data type is used to transfer the reason for the
-- rejection or release.
-- @
--
-- ---------------------------------
ReleaseInformation ::= CHOICE
{
forcedDrop DetailedReasonAtom,
normalDrop NULL,
...
}
-- ---------------------------------
--
-- RemoveAnalysisRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
RemoveAnalysisRejectReason ::= CHOICE
{
analysisTableEntryNotFound NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- RemoveEquipmentRepresentationRejectReason
--
-- @prop dataType
-- @descr This reason for rejection.
-- @
--
-- ---------------------------------
RemoveEquipmentRepresentationRejectReason ::= CHOICE
{
invalidInputData NULL,
equipmentRepresentationDoesNotExist NULL,
other NULL,
...
}
-- ---------------------------------
--
-- RemoveServiceAndStatusRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
RemoveServiceAndStatusRejectReason ::= CHOICE
{
identifierOfServiceNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- RemoveServiceFromServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the service and its categories that
-- shall be added to a service profile.
--
-- @
--
-- ---------------------------------
RemoveServiceFromServiceProfileRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceDoNotExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- RemoveServiceIdentifierRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
RemoveServiceIdentifierRejectReason ::= CHOICE
{
keyNotKnown NULL,
serviceIdentifierDoNotExist NULL,
undefined NULL,
...
}
--
-- Created by :
-- Creation date :
-- Modified by :
-- Modification date :
-- Version :
--
-- @prop dataType
-- @
RepeatIndicator ::= SEQUENCE
{
}
-- ---------------------------------
--
-- RequestSeqNum
--
-- @prop dataType
-- @descr
-- @
-- ---------------------------------
RequestSeqNum ::= INTEGER (1..65535)
-- ---------------------------------
--
-- RequestedUserAndLinkedUserAreIdentical
--
-- @prop dataType
-- @descr This parameter indicates if the requested user and the user
-- linked to the requested endpoint are identical, not identical
-- or if this is undefined.
-- @
--
-- ---------------------------------
RequestedUserAndLinkedUserAreIdentical ::= CHOICE
{
yes NULL,
no NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ServiceAndStatus
-- @prop dataType
--
-- @descr Information of a service and its state.
-- @
--
-- ---------------------------------
ServiceAndStatus ::= SEQUENCE
{
typeOfService TypeOfService,
status StatusOfService,
...
}
-- ---------------------------------
--
-- ServiceCallSetupRejectionInformation
--
-- @prop dataType
-- @descr Information related to the call setup rejection.
-- @
--
-- ---------------------------------
ServiceCallSetupRejectionInformation ::= SEQUENCE
{
terminationInitiatior TerminationInitiatior,
terminationReason ServiceCallSetupRejectionReason,
...
}
-- ---------------------------------
--
-- ServiceCallSetupRejectionReason
--
-- @prop dataType
-- @descr Reason for rejection.
-- @
-- ---------------------------------
ServiceCallSetupRejectionReason ::= CHOICE
{
calledUserBusy NULL,
calledUserNotAvailable NULL,
destinationOutOfOrder NULL,
requestedServiceBarred NULL,
requestedServiceNotAvailable NULL,
requestedServiceNotSubscribed NULL,
resourceUnavailable NULL,
temporaryFailure NULL,
unassignedUserIdentifier NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ServiceCallTerminationInformation
-- @prop dataType
--
-- @descr States information related to the termination.
-- @
--
-- ---------------------------------
ServiceCallTerminationInformation ::= SEQUENCE
{
terminationInitiation TerminationInitiatior,
terminationReason ServiceCallTerminationReason,
...
}
-- ---------------------------------
--
-- ServiceCallTerminationReason
--
-- @prop dataType
-- @descr Reason for termination.
-- @
--
-- ---------------------------------
ServiceCallTerminationReason ::= CHOICE
{
noAnswerFromCalledUser NULL,
normalTermination NULL,
resourceUnavailable NULL,
temporaryFailure NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ServiceData
--
-- @prop dataType
-- @descr Contains the identified services and theirs categories
-- @
-- ---------------------------------
ServiceData ::= CHOICE
{
basicCall BasicCallCategories,
endpointRegistration EndpointRegistrationCategories,
endpointUnregistration EndpointUnregistrationCategories,
...
}
-- @prop dataType
-- @descr
-- @
--
ServiceIdentifier ::= INTEGER
-- ---------------------------------
--
-- ServiceProfile
--
-- @prop dataType
-- @descr Contains services and data related to the services.
-- @
-- ---------------------------------
ServiceProfile ::= SEQUENCE
{
serviceDataInformation SEQUENCE OF ServiceData OPTIONAL,
...
}
-- ---------------------------------
--
-- SetEquipmentStatusRejectReason
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
SetEquipmentStatusRejectReason ::= CHOICE
{
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- SetLANDataRejectReason
--
-- @prop dataType
-- @descr This reason for rejection.
-- @
--
-- ---------------------------------
SetLANDataRejectReason ::= CHOICE
{
invalidInputData NULL,
other NULL,
...
}
-- ---------------------------------
--
-- SetUserAttributeData
--
-- @prop dataType
--
-- @descr This parameter contains an User attribute value.
--
-- @
--
-- ---------------------------------
SetUserAttributeData ::= CHOICE
{
maximumTotalBandwidth Bandwidth,
maximumBandwidthPerService Bandwidth,
stateOfUser StateOfUser,
typeOfUser TypeOfUser,
...
}
-- ---------------------------------
--
-- SetupUUIE
-- @prop dataType
-- @
--
-- ---------------------------------
SetupUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
sourceH245Address TransportAddress OPTIONAL, -- h245Address
sourceUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- sourceAddress
sourceEndpointType EndpointType, -- sourceInfo
destinationUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- destinationAddress
destinationCallSignallingAddress TransportAddress OPTIONAL, -- destCallSignalAddress
destinationExtraUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- destExtraCallInfo
destinationExtraCallReference SEQUENCE OF CallReference OPTIONAL, -- destExtraCRV
activeMC BOOLEAN,
conferenceIdentifier ConferenceIdentifier, -- conferenceID
conferenceGoal ConferenceGoal,
callServices CallServices OPTIONAL,
callType CallType,
...
}
-- ---------------------------------
--
-- StateOfEquipment
--
-- @prop dataType
-- @descr States the state of the equipment.
-- @
--
-- ---------------------------------
StateOfEquipment ::= CHOICE
{
blocked NULL, -- Equipment is blocked
busy NULL, -- Equipment is busy, no more calls possible for moment
available NULL, -- Equipment has reported itself as present and is ready for actions
unregistered NULL, -- Equipment is not present
...
}
-- ---------------------------------
--
-- StateOfUser
--
-- @prop dataType
-- @descr This parameter specifies the state of the user.
-- @
-- ---------------------------------
StateOfUser ::= CHOICE
{
absent NULL,
present NULL,
...
}
-- ---------------------------------
--
-- StatusOfService
--
-- @prop dataType
-- @descr States the state of the service.
-- @
--
-- ---------------------------------
StatusOfService ::= CHOICE
{
acknowledge NULL,
active NULL,
initiatied NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
Subaddress ::= SEQUENCE
{
type SubaddressType,
indicator BOOLEAN,
address SubaddressInformation,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
SubaddressInformation ::= OCTET STRING (SIZE(1..23))
-- @prop dataType
-- @descr Origin: Q931
-- @
SubaddressType ::= CHOICE
{
nsap NULL,
user NULL,
...
}
-- ---------------------------------
--
-- SupportedProtocols
--
-- @prop dataType
-- @descr Gives detailed information about protocols that are
-- supported by the stated endpoint.
-- @
-- ---------------------------------
SupportedProtocols ::= CHOICE
{
nonStandardData NonStandardParameter,
h310 H310Information,
h320 H320Information,
h321 H321Information,
h322 H322Information,
h323 H323Information,
h324 H324Information,
voice VoiceInformation,
t120Only T120Information,
...
}
-- ---------------------------------
--
-- T120Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol T.120
-- @
-- ---------------------------------
T120Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- TerminalInformation
-- @prop dataType
--
-- @
--
-- ---------------------------------
TerminalInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- TerminationInitiatior
--
-- @prop dataType
-- @descr States who initiated the termination.
-- @
--
-- ---------------------------------
TerminationInitiatior ::= CHOICE
{
endpoint NULL,
serviceNode NULL,
...
}
-- ---------------------------------
--
-- TimeSlot
--
-- @prop dataType
-- @descr This parameter contains the identity of the time slot used
-- for the connection.
-- @
--
-- ---------------------------------
TimeSlot ::= INTEGER
-- @prop dataType
-- @descr Origin: Q931
-- @
TransferCapability ::= CHOICE
{
speech NULL,
unrestrictedDigital NULL,
restrictedDigital NULL,
audio3point1kHz NULL,
unrestrictedWithTonesAndAnnouncements NULL,
video NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
TransferRate ::= CHOICE
{
packedMode NULL,
r64kbps NULL,
r2x64kbps NULL,
r384kbps NULL,
r1536kbps NULL,
r1920kbps NULL,
multirate NULL,
...
}
-- ---------------------------------
--
-- TransportAddress
--
-- @prop dataType
-- @descr The transport address.
-- @
--
-- ---------------------------------
TransportAddress ::= CHOICE
{
ipV4Address SEQUENCE
{
ip OCTET STRING ( SIZE (4) ),
port INTEGER ( 0..65535 )
},
ipV6Address SEQUENCE
{
ip OCTET STRING ( SIZE (16) ),
port INTEGER ( 0..65535 ),
...
},
...
}
-- ---------------------------------
--
-- TransportAddressInformation
--
-- @prop dataType
-- @descr sequence of TransportAdress
-- @
-- ---------------------------------
TransportAddressInformation ::= SEQUENCE OF TransportAddress
-- ---------------------------------
--
-- TransportChannelInformation
--
-- @prop dataType
-- @
--
-- ---------------------------------
TransportChannelInformation ::= SEQUENCE
{
sendAddress TransportAddress OPTIONAL,
recvAddress TransportAddress OPTIONAL,
...
}
-- ---------------------------------
--
-- TypeOfEquipment
--
-- @prop dataType
-- @descr Type of equipment.
-- @
--
-- ---------------------------------
TypeOfEquipment ::= CHOICE
{
cordlessTerminal NULL,
h323Terminal NULL,
h323Gateway NULL,
isdnTerminal NULL,
...
}
-- ---------------------------------
--
-- TypeOfFlowControl
--
-- @prop dataType
-- @descr This parameter specifies the type of flow control used in the LAN.
-- @
--
-- ---------------------------------
TypeOfFlowControl ::= CHOICE
{
isa NULL,
priorityOutputRouting NULL,
other NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- TypeOfLAN
--
-- @prop dataType
-- @descr This parameter specifies the type of LAN.
-- @
--
-- ---------------------------------
TypeOfLAN ::= CHOICE
{
ethernet NULL,
tokenRing NULL,
other NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- TypeOfRegistration
--
-- @prop dataType
-- @descr Type of service.
-- @
--
-- ---------------------------------
TypeOfRegistration ::= CHOICE
{
changeOfUser NULL,
noChangeOfUser NULL,
...
}
-- ---------------------------------
--
-- TypeOfService
--
-- @prop dataType
-- @descr Type of service.
-- @
--
-- ---------------------------------
TypeOfService ::= CHOICE
{
basicCall NULL,
endpointRegistration NULL,
endpointUnregistration NULL,
...
}
-- ---------------------------------
--
-- TypeOfUser
--
-- @prop dataType
-- @descr Type of user.
-- @
--
-- ---------------------------------
TypeOfUser ::= CHOICE
{
human NULL,
network NULL,
...
}
-- ---------------------------------
--
-- UnknownMessageResponse
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
UnknownMessageResponse ::= SEQUENCE
{
requestSeqNum RequestSeqNum,
...
}
-- ---------------------------------
--
-- UnregistrationRejectReason
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
UnregistrationRejectReason ::= CHOICE
{
notCurrentlyRegistered NULL,
callInProgress NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- UserAllocateResourceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
UserAllocateResourceRejectReason ::= CHOICE
{
keyNotValid NULL,
resourceNotAvailable NULL,
serviceIdentifierExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserAttributeData
--
-- @prop dataType
--
-- @descr This parameter contains an User attribute value.
--
-- @
--
-- ---------------------------------
UserAttributeData ::= CHOICE
{
maximumTotalBandwidth Bandwidth,
maximumBandwidthPerService Bandwidth,
relationToEquipment SEQUENCE OF RelationToEquipment,
stateOfUser StateOfUser,
typeOfUser TypeOfUser,
userIdentifierInformation SEQUENCE OF UserIdentifier,
...
}
-- ---------------------------------
--
-- UserAttributeIdentifier
--
-- @prop dataType
--
-- @descr This parameter contains User attribute identifiers.
--
-- @
--
-- ---------------------------------
UserAttributeIdentifier ::= CHOICE
{
maximumTotalBandwidth NULL,
maximumBandwidthPerService NULL,
relationToEquipment NULL,
stateOfUser NULL,
typeOfUser NULL,
userIdentifierInformation NULL,
...
}
-- ---------------------------------
--
-- UserCreateRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
UserCreateRejectReason ::= CHOICE
{
userIdentifierAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserDeallocateResourceRejectReason
-- @prop dataType
--
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
UserDeallocateResourceRejectReason ::= CHOICE
{
resourceNotAllocated NULL,
serviceIdentifierNotValid NULL,
userNotExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserIdentifier
--
-- @prop dataType
-- @descr The identifier of the User.
-- @
--
-- ---------------------------------
UserIdentifier ::= CHOICE
{
e164 E164Identifier,
h323 BMPString (SIZE (1..256)),
-- h323 is Basic ISO/IEC 10646-1 (Unicode)
...
}
-- ---------------------------------
--
-- UserIdentifierInformation
--
-- @prop dataType
-- @descr sequence of UserIdentifier
-- @
--
-- ---------------------------------
UserIdentifierInformation ::= SEQUENCE OF UserIdentifier--
-- Created by :
-- Creation date :
-- Modified by :
-- Modification date :
-- Version :
--
-- @prop dataType
-- @
UserInformation ::= OCTET STRING (SIZE(1..131))
-- ---------------------------------
--
-- UserInformationUUIE
--
-- @prop dataType
-- @ --
-- ---------------------------------
UserInformationUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
...
}
-- ---------------------------------
--
-- UserKey
--
-- @prop dataType
-- @descr Unique key for a certain user.
-- @
-- ---------------------------------
UserKey ::= CHOICE
{
relationToUser RelationToUser,
userIdentifierInformation SEQUENCE OF UserIdentifier,
...
}
-- ---------------------------------
--
-- UserOrEquipmentRelatedInformation
--
-- @prop dataType
-- @descr This parameter specifies the type of information.
-- @
-- ---------------------------------
UserOrEquipmentRelatedInformation ::= CHOICE
{
userRelatedInformation SEQUENCE OF UserRelatedInformation,
equipmentRelatedInformation SEQUENCE OF EquipmentRelatedInformation,
...
}
-- ---------------------------------
--
-- UserOrEquipmentRelatedInformationIdentifier
--
-- @prop dataType
-- @descr This parameter specifies the type of information identifiers.
-- @
-- ---------------------------------
UserOrEquipmentRelatedInformationIdentifier ::= CHOICE
{
userRelatedInformationIdentifiers SEQUENCE OF UserRelatedInformationIdentifier,
equipmentRelatedInformationIdentifiers SEQUENCE OF EquipmentRelatedInformationIdentifier,
...
}
-- ---------------------------------
--
-- UserRelatedInformation
--
-- @prop dataType
-- @descr This parameter specifies different types of data
-- that are related to the user.
-- @
--
-- ---------------------------------
UserRelatedInformation ::= CHOICE
{
numberOfEquipments INTEGER,
stateOfUser StateOfUser,
typeOfUser TypeOfUser,
...
}
-- ---------------------------------
--
-- UserRelatedInformationIdentifier
--
--
-- @prop dataType
--
-- @descr This parameter specifies different types of data
-- that are specific to a certain user.
--
-- @
-- ---------------------------------
UserRelatedInformationIdentifier ::= CHOICE
{
numberOfEquipments NULL,
stateOfUser NULL,
typeOfUser NULL,
...
}
-- ---------------------------------
--
-- UserRemoveRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
UserRemoveRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceInProgress NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserSetRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
UserSetRejectReason ::= CHOICE
{
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserSpecificInformation
--
-- @descr This parameter specifies different types of data
-- that are specific to the user.
-- @
--
-- ---------------------------------
UserSpecificInformation ::= CHOICE
{
userRelatedInformation SEQUENCE OF UserRelatedInformation,
equipmentRelatedInformation SEQUENCE OF EquipmentRelatedInformation,
...
}
-- ---------------------------------
--
-- VendorIdentifier
--
-- @prop dataType
-- @
--
-- ---------------------------------
VendorIdentifier ::= SEQUENCE
{
vendor H221NonStandard,
productId OCTET STRING (SIZE(1..256)) OPTIONAL,
versionId OCTET STRING (SIZE(1..256)) OPTIONAL,
...
}
-- ---------------------------------
--
-- VoiceInformation
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
VoiceInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
END
| apache-2.0 |
cartwheelweb/packaginator | apps/feeds/tests/data.py | 8 | 6264 | from grid.models import Grid
from django.contrib.auth.models import Group, User, Permission
from package.models import Category, PackageExample, Package
from grid.models import Element, Feature, GridPackage
from core.tests import datautil
def load():
category, created = Category.objects.get_or_create(
pk=1,
slug=u'apps',
title=u'App',
description=u'Small components used to build projects.',
)
package1, created = Package.objects.get_or_create(
pk=1,
category=category,
repo_watchers=0,
title=u'Testability',
pypi_url='',
participants=u'malcomt,jacobian',
pypi_downloads=0,
repo_url=u'https://github.com/pydanny/django-la-facebook',
repo_commits=0,
repo_forks=0,
slug=u'testability',
repo_description=u'Increase your testing ability with this steroid free supplement.',
)
package2, created = Package.objects.get_or_create(
pk=2,
category=category,
repo_watchers=0,
title=u'Supertester',
pypi_url='',
participants=u'thetestman',
pypi_downloads=0,
repo_url=u'https://github.com/pydanny/django-uni-form',
repo_commits=0,
repo_forks=0,
slug=u'supertester',
repo_description=u'Test everything under the sun with one command!',
)
package3, created = Package.objects.get_or_create(
pk=3,
category=category,
repo_watchers=0,
title=u'Serious Testing',
pypi_url='',
participants=u'pydanny',
pypi_downloads=0,
repo_url=u'https://github.com/cartwheelweb/packaginator',
repo_commits=0,
repo_forks=0,
slug=u'serious-testing',
repo_description=u'Make testing as painless as waxing your legs.',
)
package4, created = Package.objects.get_or_create(
pk=4,
category=category,
repo_watchers=0,
title=u'Another Test',
pypi_url='',
participants=u'pydanny',
pypi_downloads=0,
repo_url=u'https://github.com/djangopackages/djangopackages',
repo_commits=0,
repo_forks=0,
slug=u'another-test',
repo_description=u'Yet another test package, with no grid affiliation.',
)
grid1, created = Grid.objects.get_or_create(
pk=1,
description=u'A grid for testing.',
title=u'Testing',
is_locked=False,
slug=u'testing',
)
grid2, created = Grid.objects.get_or_create(
pk=2,
description=u'Another grid for testing.',
title=u'Another Testing',
is_locked=False,
slug=u'another-testing',
)
gridpackage1, created = GridPackage.objects.get_or_create(
pk=1,
package=package1,
grid=grid1,
)
gridpackage2, created = GridPackage.objects.get_or_create(
pk=2,
package=package1,
grid=grid1,
)
gridpackage3, created = GridPackage.objects.get_or_create(
pk=3,
package=package3,
grid=grid1,
)
gridpackage4, created = GridPackage.objects.get_or_create(
pk=4,
package=package3,
grid=grid2,
)
gridpackage5, created = GridPackage.objects.get_or_create(
pk=5,
package=package2,
grid=grid1,
)
feature1, created = Feature.objects.get_or_create(
pk=1,
title=u'Has tests?',
grid=grid1,
description=u'Does this package come with tests?',
)
feature2, created = Feature.objects.get_or_create(
pk=2,
title=u'Coolness?',
grid=grid1,
description=u'Is this package cool?',
)
element, created = Element.objects.get_or_create(
pk=1,
text=u'Yes',
feature=feature1,
grid_package=gridpackage1,
)
group1, created = Group.objects.get_or_create(
pk=1,
name=u'Moderators',
#permissions=[[u'delete_gridpackage', u'grid', u'gridpackage'], [u'delete_feature', u'grid', u'feature']],
)
group1.permissions.clear()
group1.permissions = [
Permission.objects.get(codename='delete_gridpackage'),
Permission.objects.get(codename='delete_feature')
]
user1, created = User.objects.get_or_create(
pk=1,
username=u'user',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=False,
last_login=u'2010-01-01 12:00:00',
password=u'sha1$644c9$347f3dd85fb609a5745ebe33d0791929bf08f22e',
email='',
date_joined=u'2010-01-01 12:00:00',
)
user2, created = User.objects.get_or_create(
pk=2,
username=u'cleaner',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=False,
last_login=u'2010-01-01 12:00:00',
#groups=[group1],
password=u'sha1$e6fe2$78b744e21cddb39117997709218f4c6db4e91894',
email='',
date_joined=u'2010-01-01 12:00:00',
)
user2.groups = [group1]
user3, created = User.objects.get_or_create(
pk=3,
username=u'staff',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=True,
last_login=u'2010-01-01 12:00:00',
password=u'sha1$8894d$c4814980edd6778f0ab1632c4270673c0fd40efe',
email='',
date_joined=u'2010-01-01 12:00:00',
)
user4, created = User.objects.get_or_create(
pk=4,
username=u'admin',
first_name='',
last_name='',
is_active=True,
is_superuser=True,
is_staff=True,
last_login=u'2010-01-01 12:00:00',
password=u'sha1$52c7f$59b4f64ffca593e6abd23f90fd1f95cf71c367a4',
email='',
date_joined=u'2010-01-01 12:00:00',
)
packageexample, created = PackageExample.objects.get_or_create(
pk=1,
package=package1,
url=u'http://www.example.com/',
active=True,
title=u'www.example.com',
)
datautil.reset_sequences(Grid, Group, User, Permission, Category, PackageExample,
Package, Element, Feature, GridPackage)
| mit |
laurent-george/bokeh | examples/plotting/server/selection_histogram.py | 42 | 4001 | # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
import numpy as np
from bokeh.models import BoxSelectTool, LassoSelectTool, Paragraph
from bokeh.plotting import (
curdoc, cursession, figure, output_server, show, hplot, vplot
)
# create three normal population samples with different parameters
N1 = 2000
N2 = 5000
N3 = 1000
x1 = np.random.normal(loc=5.0, size=N1) * 100
y1 = np.random.normal(loc=10.0, size=N1) * 10
x2 = np.random.normal(loc=5.0, size=N2) * 50
y2 = np.random.normal(loc=5.0, size=N2) * 10
x3 = np.random.normal(loc=55.0, size=N3) * 10
y3 = np.random.normal(loc=4.0, size=N3) * 10
x = np.concatenate((x1, x2, x3))
y = np.concatenate((y1, y2, y3))
all_inds = np.arange(len(x1) + len(x2) + len(x3))
output_server("selection_histogram")
TOOLS="pan,wheel_zoom,box_select,lasso_select"
# create the scatter plot
p = figure(tools=TOOLS, plot_width=600, plot_height=600, title=None, min_border=10, min_border_left=50)
p.scatter(x, y, size=3, color="#3A5785", alpha=0.6, name="scatter")
renderer = p.select(dict(name="scatter"))
scatter_ds = renderer[0].data_source
box_select_tool = p.select(dict(type=BoxSelectTool))
box_select_tool.select_every_mousemove = False
lasso_select_tool = p.select(dict(type=LassoSelectTool))
lasso_select_tool.select_every_mousemove = False
# create the horizontal histogram
hhist, hedges = np.histogram(x, bins=20)
hzeros = np.zeros(len(hedges)-1)
hmax = max(hhist)*1.1
ph = figure(toolbar_location=None, plot_width=p.plot_width, plot_height=200, x_range=p.x_range,
y_range=(-hmax, hmax), title=None, min_border=10, min_border_left=50)
ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hhist, color="white", line_color="#3A5785")
ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hzeros, color="#3A5785", alpha=0.5, line_color=None, name="hhist")
ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hzeros, color="#3A5785", alpha=0.1, line_color=None, name="hhist2")
ph.xgrid.grid_line_color = None
ph_source = ph.select(dict(name="hhist"))[0].data_source
ph_source2 = ph.select(dict(name="hhist2"))[0].data_source
# create the vertical histogram
vhist, vedges = np.histogram(y, bins=20)
vzeros = np.zeros(len(vedges)-1)
vmax = max(vhist)*1.1
# need to adjust for toolbar height, unfortunately
th = 42
pv = figure(toolbar_location=None, plot_width=200, plot_height=p.plot_height+th-10, x_range=(-vmax, vmax),
y_range=p.y_range, title=None, min_border=10, min_border_top=th)
pv.quad(left=0, bottom=vedges[:-1], top=vedges[1:], right=vhist, color="white", line_color="#3A5785")
pv.quad(left=0, bottom=vedges[:-1], top=vedges[1:], right=vzeros, color="#3A5785", alpha=0.5, line_color=None, name="vhist")
pv.quad(left=0, bottom=vedges[:-1], top=vedges[1:], right=vzeros, color="#3A5785", alpha=0.1, line_color=None, name="vhist2")
pv.ygrid.grid_line_color = None
pv_source = pv.select(dict(name="vhist"))[0].data_source
pv_source2 = pv.select(dict(name="vhist2"))[0].data_source
# set up callbacks
def on_selection_change(obj, attr, old, new):
inds = np.array(new['1d']['indices'])
if len(inds) == 0 or len(inds) == len(x):
hhist = hzeros
vhist = vzeros
hhist2 = hzeros
vhist2 = vzeros
else:
hhist, _ = np.histogram(x[inds], bins=hedges)
vhist, _ = np.histogram(y[inds], bins=vedges)
negative_inds = np.ones_like(x, dtype=np.bool)
negative_inds[inds] = False
hhist2, _ = np.histogram(x[negative_inds], bins=hedges)
vhist2, _ = np.histogram(y[negative_inds], bins=vedges)
ph_source.data["top"] = hhist
pv_source.data["right"] = vhist
ph_source2.data["top"] = -hhist2
pv_source2.data["right"] = -vhist2
cursession().store_objects(ph_source, pv_source, ph_source2, pv_source2)
scatter_ds.on_change('selected', on_selection_change)
layout = vplot(hplot(p, pv), hplot(ph, Paragraph()))
show(layout)
cursession().poll_document(curdoc(), 0.05)
| bsd-3-clause |
daspots/dasapp | lib/werkzeug/local.py | 107 | 14553 | # -*- coding: utf-8 -*-
"""
werkzeug.local
~~~~~~~~~~~~~~
This module implements context-local objects.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import copy
from functools import update_wrapper
from werkzeug.wsgi import ClosingIterator
from werkzeug._compat import PY2, implements_bool
# since each thread has its own greenlet we can just use those as identifiers
# for the context. If greenlets are not available we fall back to the
# current thread ident depending on where it is.
try:
from greenlet import getcurrent as get_ident
except ImportError:
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
def release_local(local):
"""Releases the contents of the local for the current context.
This makes it possible to use locals without a manager.
Example::
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
With this function one can release :class:`Local` objects as well
as :class:`LocalStack` objects. However it is not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
.. versionadded:: 0.6.1
"""
local.__release_local__()
class Local(object):
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
object.__setattr__(self, '__storage__', {})
object.__setattr__(self, '__ident_func__', get_ident)
def __iter__(self):
return iter(self.__storage__.items())
def __call__(self, proxy):
"""Create a proxy for a name."""
return LocalProxy(self, proxy)
def __release_local__(self):
self.__storage__.pop(self.__ident_func__(), None)
def __getattr__(self, name):
try:
return self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
ident = self.__ident_func__()
storage = self.__storage__
try:
storage[ident][name] = value
except KeyError:
storage[ident] = {name: value}
def __delattr__(self, name):
try:
del self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
class LocalStack(object):
"""This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it returns a proxy that resolves to
the topmost item on the stack.
.. versionadded:: 0.6.1
"""
def __init__(self):
self._local = Local()
def __release_local__(self):
self._local.__release_local__()
def _get__ident_func__(self):
return self._local.__ident_func__
def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
del _get__ident_func__, _set__ident_func__
def __call__(self):
def _lookup():
rv = self.top
if rv is None:
raise RuntimeError('object unbound')
return rv
return LocalProxy(_lookup)
def push(self, obj):
"""Pushes a new item to the stack"""
rv = getattr(self._local, 'stack', None)
if rv is None:
self._local.stack = rv = []
rv.append(obj)
return rv
def pop(self):
"""Removes the topmost item from the stack, will return the
old value or `None` if the stack was already empty.
"""
stack = getattr(self._local, 'stack', None)
if stack is None:
return None
elif len(stack) == 1:
release_local(self._local)
return stack[-1]
else:
return stack.pop()
@property
def top(self):
"""The topmost item on the stack. If the stack is empty,
`None` is returned.
"""
try:
return self._local.stack[-1]
except (AttributeError, IndexError):
return None
class LocalManager(object):
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Every time the manager cleans up,
it will clean up all the data left in the locals for this context.
The `ident_func` parameter can be added to override the default ident
function for the wrapped locals.
.. versionchanged:: 0.6.1
Instead of a manager the :func:`release_local` function can be used
as well.
.. versionchanged:: 0.7
`ident_func` was added.
"""
def __init__(self, locals=None, ident_func=None):
if locals is None:
self.locals = []
elif isinstance(locals, Local):
self.locals = [locals]
else:
self.locals = list(locals)
if ident_func is not None:
self.ident_func = ident_func
for local in self.locals:
object.__setattr__(local, '__ident_func__', ident_func)
else:
self.ident_func = get_ident
def get_ident(self):
"""Return the context identifier the local objects use internally for
this context. You cannot override this method to change the behavior
but use it to link other context local objects (such as SQLAlchemy's
scoped sessions) to the Werkzeug locals.
.. versionchanged:: 0.7
You can pass a different ident function to the local manager that
will then be propagated to all the locals passed to the
constructor.
"""
return self.ident_func()
def cleanup(self):
"""Manually clean up the data in the locals for this context. Call
this at the end of the request or use `make_middleware()`.
"""
for local in self.locals:
release_local(local)
def make_middleware(self, app):
"""Wrap a WSGI application so that cleaning up happens after
request end.
"""
def application(environ, start_response):
return ClosingIterator(app(environ, start_response), self.cleanup)
return application
def middleware(self, func):
"""Like `make_middleware` but for decorating functions.
Example usage::
@manager.middleware
def application(environ, start_response):
...
The difference to `make_middleware` is that the function passed
will have all the arguments copied from the inner application
(name, docstring, module).
"""
return update_wrapper(self.make_middleware(func), func)
def __repr__(self):
return '<%s storages: %d>' % (
self.__class__.__name__,
len(self.locals)
)
@implements_bool
class LocalProxy(object):
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.
Example usage::
from werkzeug.local import Local
l = Local()
# these are proxies
request = l('request')
user = l('user')
from werkzeug.local import LocalStack
_response_local = LocalStack()
# this is a proxy
response = _response_local()
Whenever something is bound to l.user / l.request the proxy objects
will forward all operations. If no object is bound a :exc:`RuntimeError`
will be raised.
To create proxies to :class:`Local` or :class:`LocalStack` objects,
call the object as shown above. If you want to have a proxy to an
object looked up by a function, you can (as of Werkzeug 0.6.1) pass
a function to the :class:`LocalProxy` constructor::
session = LocalProxy(lambda: get_current_request().session)
.. versionchanged:: 0.6.1
The class can be instantiated with a callable as well now.
"""
__slots__ = ('__local', '__dict__', '__name__', '__wrapped__')
def __init__(self, local, name=None):
object.__setattr__(self, '_LocalProxy__local', local)
object.__setattr__(self, '__name__', name)
if callable(local) and not hasattr(local, '__release_local__'):
# "local" is a callable that is not an instance of Local or
# LocalManager: mark it as a wrapped function.
object.__setattr__(self, '__wrapped__', local)
def _get_current_object(self):
"""Return the current object. This is useful if you want the real
object behind the proxy at a time for performance reasons or because
you want to pass the object into a different context.
"""
if not hasattr(self.__local, '__release_local__'):
return self.__local()
try:
return getattr(self.__local, self.__name__)
except AttributeError:
raise RuntimeError('no object bound to %s' % self.__name__)
@property
def __dict__(self):
try:
return self._get_current_object().__dict__
except RuntimeError:
raise AttributeError('__dict__')
def __repr__(self):
try:
obj = self._get_current_object()
except RuntimeError:
return '<%s unbound>' % self.__class__.__name__
return repr(obj)
def __bool__(self):
try:
return bool(self._get_current_object())
except RuntimeError:
return False
def __unicode__(self):
try:
return unicode(self._get_current_object()) # noqa
except RuntimeError:
return repr(self)
def __dir__(self):
try:
return dir(self._get_current_object())
except RuntimeError:
return []
def __getattr__(self, name):
if name == '__members__':
return dir(self._get_current_object())
return getattr(self._get_current_object(), name)
def __setitem__(self, key, value):
self._get_current_object()[key] = value
def __delitem__(self, key):
del self._get_current_object()[key]
if PY2:
__getslice__ = lambda x, i, j: x._get_current_object()[i:j]
def __setslice__(self, i, j, seq):
self._get_current_object()[i:j] = seq
def __delslice__(self, i, j):
del self._get_current_object()[i:j]
__setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v)
__delattr__ = lambda x, n: delattr(x._get_current_object(), n)
__str__ = lambda x: str(x._get_current_object())
__lt__ = lambda x, o: x._get_current_object() < o
__le__ = lambda x, o: x._get_current_object() <= o
__eq__ = lambda x, o: x._get_current_object() == o
__ne__ = lambda x, o: x._get_current_object() != o
__gt__ = lambda x, o: x._get_current_object() > o
__ge__ = lambda x, o: x._get_current_object() >= o
__cmp__ = lambda x, o: cmp(x._get_current_object(), o) # noqa
__hash__ = lambda x: hash(x._get_current_object())
__call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw)
__len__ = lambda x: len(x._get_current_object())
__getitem__ = lambda x, i: x._get_current_object()[i]
__iter__ = lambda x: iter(x._get_current_object())
__contains__ = lambda x, i: i in x._get_current_object()
__add__ = lambda x, o: x._get_current_object() + o
__sub__ = lambda x, o: x._get_current_object() - o
__mul__ = lambda x, o: x._get_current_object() * o
__floordiv__ = lambda x, o: x._get_current_object() // o
__mod__ = lambda x, o: x._get_current_object() % o
__divmod__ = lambda x, o: x._get_current_object().__divmod__(o)
__pow__ = lambda x, o: x._get_current_object() ** o
__lshift__ = lambda x, o: x._get_current_object() << o
__rshift__ = lambda x, o: x._get_current_object() >> o
__and__ = lambda x, o: x._get_current_object() & o
__xor__ = lambda x, o: x._get_current_object() ^ o
__or__ = lambda x, o: x._get_current_object() | o
__div__ = lambda x, o: x._get_current_object().__div__(o)
__truediv__ = lambda x, o: x._get_current_object().__truediv__(o)
__neg__ = lambda x: -(x._get_current_object())
__pos__ = lambda x: +(x._get_current_object())
__abs__ = lambda x: abs(x._get_current_object())
__invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object())
__long__ = lambda x: long(x._get_current_object()) # noqa
__float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object())
__index__ = lambda x: x._get_current_object().__index__()
__coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o)
__enter__ = lambda x: x._get_current_object().__enter__()
__exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw)
__radd__ = lambda x, o: o + x._get_current_object()
__rsub__ = lambda x, o: o - x._get_current_object()
__rmul__ = lambda x, o: o * x._get_current_object()
__rdiv__ = lambda x, o: o / x._get_current_object()
if PY2:
__rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o)
else:
__rtruediv__ = __rdiv__
__rfloordiv__ = lambda x, o: o // x._get_current_object()
__rmod__ = lambda x, o: o % x._get_current_object()
__rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o)
__copy__ = lambda x: copy.copy(x._get_current_object())
__deepcopy__ = lambda x, memo: copy.deepcopy(x._get_current_object(), memo)
| mit |
srickardti/openthread | tools/harness-automation/cases/ed_6_4_1.py | 18 | 1869 | #!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class ED_6_4_1(HarnessCase):
role = HarnessCase.ROLE_ED
case = '6 4 1'
golden_devices_required = 1
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
mayankcu/Django-social | venv/Lib/site-packages/django/contrib/admindocs/views.py | 77 | 15064 | import inspect
import os
import re
from django import template
from django.template import RequestContext
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from django.db import models
from django.shortcuts import render_to_response
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.http import Http404
from django.core import urlresolvers
from django.contrib.admindocs import utils
from django.contrib.sites.models import Site
from django.utils.importlib import import_module
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
# Exclude methods starting with these strings from documentation
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
class GenericSite(object):
domain = 'example.com'
name = 'my site'
@staff_member_required
def doc_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
return render_to_response('admin_doc/index.html', {
'root_path': urlresolvers.reverse('admin:index'),
}, context_instance=RequestContext(request))
@staff_member_required
def bookmarklets(request):
admin_root = urlresolvers.reverse('admin:index')
return render_to_response('admin_doc/bookmarklets.html', {
'root_path': admin_root,
'admin_url': mark_safe("%s://%s%s" % (request.is_secure() and 'https' or 'http', request.get_host(), admin_root)),
}, context_instance=RequestContext(request))
@staff_member_required
def template_tag_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
load_all_installed_template_libraries()
tags = []
app_libs = template.libraries.items()
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for tag_name, tag_func in library.tags.items():
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
if title:
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
if body:
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
if library in template.builtins:
tag_library = None
else:
tag_library = module_name.split('.')[-1]
tags.append({
'name': tag_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
return render_to_response('admin_doc/template_tag_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'tags': tags
}, context_instance=RequestContext(request))
@staff_member_required
def template_filter_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
load_all_installed_template_libraries()
filters = []
app_libs = template.libraries.items()
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for filter_name, filter_func in library.filters.items():
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
if title:
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
if body:
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
if library in template.builtins:
tag_library = None
else:
tag_library = module_name.split('.')[-1]
filters.append({
'name': filter_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
return render_to_response('admin_doc/template_filter_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'filters': filters
}, context_instance=RequestContext(request))
@staff_member_required
def view_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
if settings.ADMIN_FOR:
settings_modules = [import_module(m) for m in settings.ADMIN_FOR]
else:
settings_modules = [settings]
views = []
for settings_mod in settings_modules:
urlconf = import_module(settings_mod.ROOT_URLCONF)
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
if Site._meta.installed:
site_obj = Site.objects.get(pk=settings_mod.SITE_ID)
else:
site_obj = GenericSite()
for (func, regex) in view_functions:
views.append({
'full_name': '%s.%s' % (func.__module__, getattr(func, '__name__', func.__class__.__name__)),
'site_id': settings_mod.SITE_ID,
'site': site_obj,
'url': simplify_regex(regex),
})
return render_to_response('admin_doc/view_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'views': views
}, context_instance=RequestContext(request))
@staff_member_required
def view_detail(request, view):
if not utils.docutils_is_available:
return missing_docutils_page(request)
mod, func = urlresolvers.get_mod_func(view)
try:
view_func = getattr(import_module(mod), func)
except (ImportError, AttributeError):
raise Http404
title, body, metadata = utils.parse_docstring(view_func.__doc__)
if title:
title = utils.parse_rst(title, 'view', _('view:') + view)
if body:
body = utils.parse_rst(body, 'view', _('view:') + view)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
return render_to_response('admin_doc/view_detail.html', {
'root_path': urlresolvers.reverse('admin:index'),
'name': view,
'summary': title,
'body': body,
'meta': metadata,
}, context_instance=RequestContext(request))
@staff_member_required
def model_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
m_list = [m._meta for m in models.get_models()]
return render_to_response('admin_doc/model_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'models': m_list
}, context_instance=RequestContext(request))
@staff_member_required
def model_detail(request, app_label, model_name):
if not utils.docutils_is_available:
return missing_docutils_page(request)
# Get the model class.
try:
app_mod = models.get_app(app_label)
except ImproperlyConfigured:
raise Http404(_("App %r not found") % app_label)
model = None
for m in models.get_models(app_mod):
if m._meta.object_name.lower() == model_name:
model = m
break
if model is None:
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % {'model_name': model_name, 'app_label': app_label})
opts = model._meta
# Gather fields/field descriptions.
fields = []
for field in opts.fields:
# ForeignKey is a special case since the field will actually be a
# descriptor that returns the other object
if isinstance(field, models.ForeignKey):
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = utils.parse_rst((_("the related `%(app_label)s.%(data_type)s` object") % {'app_label': app_label, 'data_type': data_type}), 'model', _('model:') + data_type)
else:
data_type = get_readable_field_data_type(field)
verbose = field.verbose_name
fields.append({
'name': field.name,
'data_type': data_type,
'verbose': verbose,
'help_text': field.help_text,
})
# Gather many-to-many fields.
for field in opts.many_to_many:
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': app_label, 'object_name': data_type}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
'verbose': utils.parse_rst(_("all %s") % verbose , 'model', _('model:') + opts.module_name),
})
fields.append({
'name' : "%s.count" % field.name,
'data_type' : 'Integer',
'verbose' : utils.parse_rst(_("number of %s") % verbose , 'model', _('model:') + opts.module_name),
})
# Gather model methods.
for func_name, func in model.__dict__.items():
if (inspect.isfunction(func) and len(inspect.getargspec(func)[0]) == 1):
try:
for exclude in MODEL_METHODS_EXCLUDE:
if func_name.startswith(exclude):
raise StopIteration
except StopIteration:
continue
verbose = func.__doc__
if verbose:
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.module_name)
fields.append({
'name': func_name,
'data_type': get_return_data_type(func_name),
'verbose': verbose,
})
# Gather related objects
for rel in opts.get_all_related_objects() + opts.get_all_related_many_to_many_objects():
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': rel.opts.app_label, 'object_name': rel.opts.object_name}
accessor = rel.get_accessor_name()
fields.append({
'name' : "%s.all" % accessor,
'data_type' : 'List',
'verbose' : utils.parse_rst(_("all %s") % verbose , 'model', _('model:') + opts.module_name),
})
fields.append({
'name' : "%s.count" % accessor,
'data_type' : 'Integer',
'verbose' : utils.parse_rst(_("number of %s") % verbose , 'model', _('model:') + opts.module_name),
})
return render_to_response('admin_doc/model_detail.html', {
'root_path': urlresolvers.reverse('admin:index'),
'name': '%s.%s' % (opts.app_label, opts.object_name),
'summary': _("Fields on %s objects") % opts.object_name,
'description': model.__doc__,
'fields': fields,
}, context_instance=RequestContext(request))
@staff_member_required
def template_detail(request, template):
templates = []
for site_settings_module in settings.ADMIN_FOR:
settings_mod = import_module(site_settings_module)
if Site._meta.installed:
site_obj = Site.objects.get(pk=settings_mod.SITE_ID)
else:
site_obj = GenericSite()
for dir in settings_mod.TEMPLATE_DIRS:
template_file = os.path.join(dir, template)
templates.append({
'file': template_file,
'exists': os.path.exists(template_file),
'contents': lambda: os.path.exists(template_file) and open(template_file).read() or '',
'site_id': settings_mod.SITE_ID,
'site': site_obj,
'order': list(settings_mod.TEMPLATE_DIRS).index(dir),
})
return render_to_response('admin_doc/template_detail.html', {
'root_path': urlresolvers.reverse('admin:index'),
'name': template,
'templates': templates,
}, context_instance=RequestContext(request))
####################
# Helper functions #
####################
def missing_docutils_page(request):
"""Display an error message for people without docutils"""
return render_to_response('admin_doc/missing_docutils.html')
def load_all_installed_template_libraries():
# Load/register all template tag libraries from installed apps.
for module_name in template.get_templatetags_modules():
mod = import_module(module_name)
try:
libraries = [
os.path.splitext(p)[0]
for p in os.listdir(os.path.dirname(mod.__file__))
if p.endswith('.py') and p[0].isalpha()
]
except OSError:
libraries = []
for library_name in libraries:
try:
lib = template.get_library(library_name)
except template.InvalidTemplateLibrary, e:
pass
def get_return_data_type(func_name):
"""Return a somewhat-helpful data type given a function name"""
if func_name.startswith('get_'):
if func_name.endswith('_list'):
return 'List'
elif func_name.endswith('_count'):
return 'Integer'
return ''
def get_readable_field_data_type(field):
"""Returns the description for a given field type, if it exists,
Fields' descriptions can contain format strings, which will be interpolated
against the values of field.__dict__ before being output."""
return field.description % field.__dict__
def extract_views_from_urlpatterns(urlpatterns, base=''):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a two-tuple: (view_func, regex)
"""
views = []
for p in urlpatterns:
if hasattr(p, 'url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern))
elif hasattr(p, 'callback'):
try:
views.append((p.callback, base + p.regex.pattern))
except ViewDoesNotExist:
continue
else:
raise TypeError(_("%s does not appear to be a urlpattern object") % p)
return views
named_group_matcher = re.compile(r'\(\?P(<\w+>).+?\)')
non_named_group_matcher = re.compile(r'\(.*?\)')
def simplify_regex(pattern):
"""
Clean up urlpattern regexes into something somewhat readable by Mere Humans:
turns something like "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
into "<sport_slug>/athletes/<athlete_slug>/"
"""
# handle named groups first
pattern = named_group_matcher.sub(lambda m: m.group(1), pattern)
# handle non-named groups
pattern = non_named_group_matcher.sub("<var>", pattern)
# clean up any outstanding regex-y characters.
pattern = pattern.replace('^', '').replace('$', '').replace('?', '').replace('//', '/').replace('\\', '')
if not pattern.startswith('/'):
pattern = '/' + pattern
return pattern
| bsd-3-clause |
pablodanielrey/ws | SilegWS/ladon/SilegWS/SilegData.py | 1 | 5480 | # -*- coding: utf-8 -*-
import inject
from connection.connection import Connection
class SilegData:
conn = inject.attr(Connection)
def _fetchall(self, sql):
con = self.conn.get()
try:
cur = con.cursor()
try:
cur.execute(sql)
rows = cur.fetchall()
data = []
for row in rows:
data.append(dict(row))
return data
finally:
cur.close()
finally:
self.conn.put(con)
def getListadoDpto(self):
sql = """
SELECT dpto_nombre, dpto_id
FROM (
SELECT desig_fecha_baja, desig_catxmat_id
FROM designacion_docente
) as dd
LEFT JOIN (
catedras_x_materia LEFT JOIN materia ON (materia.materia_id=catedras_x_materia.catxmat_materia_id)
) ON (catedras_x_materia.catxmat_id = dd.desig_catxmat_id)
LEFT JOIN (
SELECT dpto_id, dpto_nombre
FROM departamento WHERE NOT (
upper(dpto_nombre) LIKE 'C. U.%'
OR upper(dpto_nombre) like 'C.U.%'
)
) AS dpto ON (materia_dpto_id = dpto.dpto_id)
WHERE ( desig_fecha_baja IS NULL )
AND materia_nombre IS NOT NULL
GROUP BY dpto_nombre, dpto_id
"""
return self._fetchall(sql)
def getListadoMateriasDpto(self, idDpto):
sql = """
SELECT materia_nombre, materia_id
FROM (
SELECT desig_fecha_baja, desig_catxmat_id
FROM designacion_docente
) AS dd
LEFT JOIN (
catedras_x_materia LEFT JOIN materia ON (materia.materia_id = catedras_x_materia.catxmat_materia_id)
) ON (catedras_x_materia.catxmat_id=dd.desig_catxmat_id)
LEFT JOIN (
SELECT dpto_id
FROM departamento
WHERE not (
upper(dpto_nombre) LIKE 'C. U.%'
OR upper(dpto_nombre) like 'C.U.%')
) AS dpto ON (materia_dpto_id = dpto.dpto_id)
WHERE dpto_id = {dpto_id}
AND ( desig_fecha_baja IS NULL ) AND materia_nombre IS NOT NULL
GROUP BY materia_nombre,materia_id
ORDER BY materia_nombre ASC
""".format(dpto_id=idDpto)
return self._fetchall(sql)
def getListadoMaterias(self):
sql = """
SELECT materia_nombre, materia_id
FROM (
SELECT desig_fecha_baja, desig_catxmat_id
FROM designacion_docente ) AS dd
LEFT JOIN (
catedras_x_materia
LEFT JOIN materia ON (materia.materia_id=catedras_x_materia.catxmat_materia_id)
) ON (catedras_x_materia.catxmat_id=dd.desig_catxmat_id)
LEFT JOIN (
SELECT dpto_id
FROM departamento
WHERE NOT (
UPPER(dpto_nombre) LIKE 'C. U.%'
OR UPPER(dpto_nombre) like 'C.U.%'
)
) AS dpto ON (materia_dpto_id = dpto.dpto_id)
WHERE ( desig_fecha_baja IS NULL )
AND materia_nombre IS NOT NULL
GROUP BY materia_nombre,materia_id
ORDER BY materia_nombre ASC
"""
return self._fetchall(sql)
def getCatedras(self, idMateria):
sql = """
SELECT catxmat_id, catedra_nombre
FROM catedras_x_materia cm
INNER JOIN catedra c ON cm.catxmat_catedra_id = c.catedra_id
WHERE cm.catxmat_materia_id = {materia_id}
""".format(materia_id=idMateria)
return self._fetchall(sql)
def getCuerpoDocente(self, idCatedra):
sql = """
SELECT p.pers_id, e.empleado_id, upper(p.pers_apellidos) AS apellido,
upper(p.pers_nombres) nombre, upper(tc.tipocargo_nombre) AS tipocargo_nombre,
tg.titgenerico_abrev, tpe.titposemp_titulo_abrev, desig_observaciones,pers_nrodoc, tipodoc_descripcion
FROM designacion_docente dd
LEFT JOIN tipo_cargo tc ON dd.desig_tipocargo_id = tc.tipocargo_id
LEFT JOIN empleado e ON dd.desig_empleado_id = e.empleado_id
LEFT JOIN persona p ON p.pers_id = e.empleado_pers_id
LEFT JOIN tipo_documento ON (tipodoc_id = pers_tipodoc_id)
LEFT JOIN titulo_grado_empleado tge ON tge.titgrademp_empleado_id = e.empleado_id
LEFT JOIN titulo_postgrado_empleado tpe ON tpe.titposemp_empleado_id = e.empleado_id
LEFT JOIN titulo_especifico te ON te.titesp_id = tge.titgrademp_esp_id
LEFT JOIN titulo_generico tg ON tg.titgenerico_id = te.titesp_gen_id
WHERE dd.desig_catxmat_id = {catedra_id}
AND dd.desig_fecha_baja IS NULL
AND dd.desig_id NOT IN (
SELECT licencia_designacion_id
FROM licencia
WHERE licencia_fecha_hasta > now()
)
ORDER BY tc.tipocargo_orden, apellido
""".format(catedra_id=idCatedra)
return self._fetchall(sql)
def getCuerpoDocenteCargo(self, idCatedra, cargo):
sql = """
SELECT p.pers_id, e.empleado_id, upper(p.pers_apellidos) AS apellido,
upper(p.pers_nombres) nombre, upper(tc.tipocargo_nombre) AS tipocargo_nombre,
tg.titgenerico_abrev, tpe.titposemp_titulo_abrev, desig_observaciones, pers_nrodoc, tipodoc_descripcion
FROM designacion_docente dd
LEFT JOIN tipo_cargo tc ON dd.desig_tipocargo_id = tc.tipocargo_id
LEFT JOIN empleado e ON dd.desig_empleado_id = e.empleado_id
LEFT JOIN persona p ON p.pers_id = e.empleado_pers_id
LEFT JOIN tipo_documento ON (tipodoc_id = pers_tipodoc_id)
LEFT JOIN titulo_grado_empleado tge ON tge.titgrademp_empleado_id = e.empleado_id
LEFT JOIN titulo_postgrado_empleado tpe ON tpe.titposemp_empleado_id = e.empleado_id
LEFT JOIN titulo_especifico te ON te.titesp_id = tge.titgrademp_esp_id
LEFT JOIN titulo_generico tg ON tg.titgenerico_id = te.titesp_gen_id
WHERE dd.desig_catxmat_id = {catedra_id}
AND upper(tc.tipocargo_nombre) LIKE '%{cargo}%'
AND dd.desig_fecha_baja IS NULL
AND dd.desig_id NOT IN (
SELECT licencia_designacion_id
FROM licencia
WHERE licencia_fecha_hasta > now()
)
ORDER BY tc.tipocargo_orden, apellido
""".format(catedra_id=idCatedra,cargo=cargo.upper())
return self._fetchall(sql)
| lgpl-3.0 |
home-assistant/home-assistant | homeassistant/components/toon/const.py | 2 | 16331 | """Constants for the Toon integration."""
from datetime import datetime, timedelta
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_PROBLEM,
)
from homeassistant.components.sensor import (
ATTR_LAST_RESET,
ATTR_STATE_CLASS,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_TEMPERATURE,
STATE_CLASS_MEASUREMENT,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ICON,
ATTR_NAME,
ATTR_UNIT_OF_MEASUREMENT,
ENERGY_KILO_WATT_HOUR,
PERCENTAGE,
POWER_WATT,
TEMP_CELSIUS,
)
DOMAIN = "toon"
CONF_AGREEMENT = "agreement"
CONF_AGREEMENT_ID = "agreement_id"
CONF_CLOUDHOOK_URL = "cloudhook_url"
CONF_MIGRATE = "migrate"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=300)
DEFAULT_MAX_TEMP = 30.0
DEFAULT_MIN_TEMP = 6.0
CURRENCY_EUR = "EUR"
VOLUME_CM3 = "CM3"
VOLUME_M3 = "M3"
VOLUME_LHOUR = "L/H"
VOLUME_LMIN = "L/MIN"
ATTR_DEFAULT_ENABLED = "default_enabled"
ATTR_INVERTED = "inverted"
ATTR_MEASUREMENT = "measurement"
ATTR_SECTION = "section"
BINARY_SENSOR_ENTITIES = {
"thermostat_info_boiler_connected_None": {
ATTR_NAME: "Boiler Module Connection",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "boiler_module_connected",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: DEVICE_CLASS_CONNECTIVITY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
},
"thermostat_info_burner_info_1": {
ATTR_NAME: "Boiler Heating",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "heating",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:fire",
ATTR_DEFAULT_ENABLED: False,
},
"thermostat_info_burner_info_2": {
ATTR_NAME: "Hot Tap Water",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "hot_tapwater",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water-pump",
ATTR_DEFAULT_ENABLED: True,
},
"thermostat_info_burner_info_3": {
ATTR_NAME: "Boiler Preheating",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "pre_heating",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:fire",
ATTR_DEFAULT_ENABLED: False,
},
"thermostat_info_burner_info_None": {
ATTR_NAME: "Boiler Burner",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "burner",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:fire",
ATTR_DEFAULT_ENABLED: True,
},
"thermostat_info_error_found_255": {
ATTR_NAME: "Boiler Status",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "error_found",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: DEVICE_CLASS_PROBLEM,
ATTR_ICON: "mdi:alert",
ATTR_DEFAULT_ENABLED: True,
},
"thermostat_info_ot_communication_error_0": {
ATTR_NAME: "OpenTherm Connection",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "opentherm_communication_error",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: DEVICE_CLASS_PROBLEM,
ATTR_ICON: "mdi:check-network-outline",
ATTR_DEFAULT_ENABLED: False,
},
"thermostat_program_overridden": {
ATTR_NAME: "Thermostat Program Override",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "program_overridden",
ATTR_INVERTED: False,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gesture-tap",
ATTR_DEFAULT_ENABLED: True,
},
}
SENSOR_ENTITIES = {
"current_display_temperature": {
ATTR_NAME: "Temperature",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "current_display_temperature",
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: None,
},
"gas_average": {
ATTR_NAME: "Average Gas Usage",
ATTR_SECTION: "gas_usage",
ATTR_MEASUREMENT: "average",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_CM3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gas-cylinder",
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"gas_average_daily": {
ATTR_NAME: "Average Daily Gas Usage",
ATTR_SECTION: "gas_usage",
ATTR_MEASUREMENT: "day_average",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gas-cylinder",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"gas_daily_usage": {
ATTR_NAME: "Gas Usage Today",
ATTR_SECTION: "gas_usage",
ATTR_MEASUREMENT: "day_usage",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gas-cylinder",
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"gas_daily_cost": {
ATTR_NAME: "Gas Cost Today",
ATTR_SECTION: "gas_usage",
ATTR_MEASUREMENT: "day_cost",
ATTR_UNIT_OF_MEASUREMENT: CURRENCY_EUR,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gas-cylinder",
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"gas_meter_reading": {
ATTR_NAME: "Gas Meter",
ATTR_SECTION: "gas_usage",
ATTR_MEASUREMENT: "meter",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gas-cylinder",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: datetime.fromtimestamp(0),
},
"gas_value": {
ATTR_NAME: "Current Gas Usage",
ATTR_SECTION: "gas_usage",
ATTR_MEASUREMENT: "current",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_CM3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gas-cylinder",
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"power_average": {
ATTR_NAME: "Average Power Usage",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "average",
ATTR_UNIT_OF_MEASUREMENT: POWER_WATT,
ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"power_average_daily": {
ATTR_NAME: "Average Daily Energy Usage",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "day_average",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"power_daily_cost": {
ATTR_NAME: "Energy Cost Today",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "day_cost",
ATTR_UNIT_OF_MEASUREMENT: CURRENCY_EUR,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:power-plug",
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"power_daily_value": {
ATTR_NAME: "Energy Usage Today",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "day_usage",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"power_meter_reading": {
ATTR_NAME: "Electricity Meter Feed IN Tariff 1",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "meter_high",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: datetime.fromtimestamp(0),
},
"power_meter_reading_low": {
ATTR_NAME: "Electricity Meter Feed IN Tariff 2",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "meter_low",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: datetime.fromtimestamp(0),
},
"power_value": {
ATTR_NAME: "Current Power Usage",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "current",
ATTR_UNIT_OF_MEASUREMENT: POWER_WATT,
ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: None,
},
"solar_meter_reading_produced": {
ATTR_NAME: "Electricity Meter Feed OUT Tariff 1",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "meter_produced_high",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: datetime.fromtimestamp(0),
},
"solar_meter_reading_low_produced": {
ATTR_NAME: "Electricity Meter Feed OUT Tariff 2",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "meter_produced_low",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: datetime.fromtimestamp(0),
},
"solar_value": {
ATTR_NAME: "Current Solar Power Production",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "current_solar",
ATTR_UNIT_OF_MEASUREMENT: POWER_WATT,
ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: None,
},
"solar_maximum": {
ATTR_NAME: "Max Solar Power Production Today",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "day_max_solar",
ATTR_UNIT_OF_MEASUREMENT: POWER_WATT,
ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"solar_produced": {
ATTR_NAME: "Solar Power Production to Grid",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "current_produced",
ATTR_UNIT_OF_MEASUREMENT: POWER_WATT,
ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: ATTR_MEASUREMENT,
ATTR_LAST_RESET: None,
},
"power_usage_day_produced_solar": {
ATTR_NAME: "Solar Energy Produced Today",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "day_produced_solar",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"power_usage_day_to_grid_usage": {
ATTR_NAME: "Energy Produced To Grid Today",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "day_to_grid_usage",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"power_usage_day_from_grid_usage": {
ATTR_NAME: "Energy Usage From Grid Today",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "day_from_grid_usage",
ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"solar_average_produced": {
ATTR_NAME: "Average Solar Power Production to Grid",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "average_produced",
ATTR_UNIT_OF_MEASUREMENT: POWER_WATT,
ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
ATTR_ICON: None,
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"thermostat_info_current_modulation_level": {
ATTR_NAME: "Boiler Modulation Level",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "current_modulation_level",
ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:percent",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: None,
},
"power_usage_current_covered_by_solar": {
ATTR_NAME: "Current Power Usage Covered By Solar",
ATTR_SECTION: "power_usage",
ATTR_MEASUREMENT: "current_covered_by_solar",
ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:solar-power",
ATTR_DEFAULT_ENABLED: True,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: None,
},
"water_average": {
ATTR_NAME: "Average Water Usage",
ATTR_SECTION: "water_usage",
ATTR_MEASUREMENT: "average",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_LMIN,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"water_average_daily": {
ATTR_NAME: "Average Daily Water Usage",
ATTR_SECTION: "water_usage",
ATTR_MEASUREMENT: "day_average",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"water_daily_usage": {
ATTR_NAME: "Water Usage Today",
ATTR_SECTION: "water_usage",
ATTR_MEASUREMENT: "day_usage",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
"water_meter_reading": {
ATTR_NAME: "Water Meter",
ATTR_SECTION: "water_usage",
ATTR_MEASUREMENT: "meter",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: datetime.fromtimestamp(0),
},
"water_value": {
ATTR_NAME: "Current Water Usage",
ATTR_SECTION: "water_usage",
ATTR_MEASUREMENT: "current",
ATTR_UNIT_OF_MEASUREMENT: VOLUME_LMIN,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water-pump",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: STATE_CLASS_MEASUREMENT,
ATTR_LAST_RESET: None,
},
"water_daily_cost": {
ATTR_NAME: "Water Cost Today",
ATTR_SECTION: "water_usage",
ATTR_MEASUREMENT: "day_cost",
ATTR_UNIT_OF_MEASUREMENT: CURRENCY_EUR,
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water-pump",
ATTR_DEFAULT_ENABLED: False,
ATTR_STATE_CLASS: None,
ATTR_LAST_RESET: None,
},
}
SWITCH_ENTITIES = {
"thermostat_holiday_mode": {
ATTR_NAME: "Holiday Mode",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "holiday_mode",
ATTR_INVERTED: False,
ATTR_ICON: "mdi:airport",
ATTR_DEFAULT_ENABLED: True,
},
"thermostat_program": {
ATTR_NAME: "Thermostat Program",
ATTR_SECTION: "thermostat",
ATTR_MEASUREMENT: "program",
ATTR_INVERTED: False,
ATTR_ICON: "mdi:calendar-clock",
ATTR_DEFAULT_ENABLED: True,
},
}
| apache-2.0 |
Cadair/astropy-helpers | astropy_helpers/git_helpers.py | 3 | 6495 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for retrieving revision information from a project's git repository.
"""
# Do not remove the following comment; it is used by
# astropy_helpers.version_helpers to determine the beginning of the code in
# this module
# BEGIN
import locale
import os
import subprocess
import warnings
__all__ = ['get_git_devstr']
def _decode_stdio(stream):
try:
stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8'
except ValueError:
stdio_encoding = 'utf-8'
try:
text = stream.decode(stdio_encoding)
except UnicodeDecodeError:
# Final fallback
text = stream.decode('latin1')
return text
def update_git_devstr(version, path=None):
"""
Updates the git revision string if and only if the path is being imported
directly from a git working copy. This ensures that the revision number in
the version string is accurate.
"""
try:
# Quick way to determine if we're in git or not - returns '' if not
devstr = get_git_devstr(sha=True, show_warning=False, path=path)
except OSError:
return version
if not devstr:
# Probably not in git so just pass silently
return version
if 'dev' in version: # update to the current git revision
version_base = version.split('.dev', 1)[0]
devstr = get_git_devstr(sha=False, show_warning=False, path=path)
return version_base + '.dev' + devstr
else:
# otherwise it's already the true/release version
return version
def get_git_devstr(sha=False, show_warning=True, path=None):
"""
Determines the number of revisions in this repository.
Parameters
----------
sha : bool
If True, the full SHA1 hash will be returned. Otherwise, the total
count of commits in the repository will be used as a "revision
number".
show_warning : bool
If True, issue a warning if git returns an error code, otherwise errors
pass silently.
path : str or None
If a string, specifies the directory to look in to find the git
repository. If `None`, the current working directory is used, and must
be the root of the git repository.
If given a filename it uses the directory containing that file.
Returns
-------
devversion : str
Either a string with the revision number (if `sha` is False), the
SHA1 hash of the current commit (if `sha` is True), or an empty string
if git version info could not be identified.
"""
if path is None:
path = os.getcwd()
if not os.path.isdir(path):
path = os.path.abspath(os.path.dirname(path))
if sha:
# Faster for getting just the hash of HEAD
cmd = ['rev-parse', 'HEAD']
else:
cmd = ['rev-list', '--count', 'HEAD']
def run_git(cmd):
try:
p = subprocess.Popen(['git'] + cmd, cwd=path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
stdout, stderr = p.communicate()
except OSError as e:
if show_warning:
warnings.warn('Error running git: ' + str(e))
return (None, b'', b'')
if p.returncode == 128:
if show_warning:
warnings.warn('No git repository present at {0!r}! Using '
'default dev version.'.format(path))
return (p.returncode, b'', b'')
if p.returncode == 129:
if show_warning:
warnings.warn('Your git looks old (does it support {0}?); '
'consider upgrading to v1.7.2 or '
'later.'.format(cmd[0]))
return (p.returncode, stdout, stderr)
elif p.returncode != 0:
if show_warning:
warnings.warn('Git failed while determining revision '
'count: {0}'.format(_decode_stdio(stderr)))
return (p.returncode, stdout, stderr)
return p.returncode, stdout, stderr
returncode, stdout, stderr = run_git(cmd)
if not sha and returncode == 128:
# git returns 128 if the command is not run from within a git
# repository tree. In this case, a warning is produced above but we
# return the default dev version of '0'.
return '0'
elif not sha and returncode == 129:
# git returns 129 if a command option failed to parse; in
# particular this could happen in git versions older than 1.7.2
# where the --count option is not supported
# Also use --abbrev-commit and --abbrev=0 to display the minimum
# number of characters needed per-commit (rather than the full hash)
cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD']
returncode, stdout, stderr = run_git(cmd)
# Fall back on the old method of getting all revisions and counting
# the lines
if returncode == 0:
return str(stdout.count(b'\n'))
else:
return ''
elif sha:
return _decode_stdio(stdout)[:40]
else:
return _decode_stdio(stdout).strip()
# This function is tested but it is only ever executed within a subprocess when
# creating a fake package, so it doesn't get picked up by coverage metrics.
def _get_repo_path(pathname, levels=None): # pragma: no cover
"""
Given a file or directory name, determine the root of the git repository
this path is under. If given, this won't look any higher than ``levels``
(that is, if ``levels=0`` then the given path must be the root of the git
repository and is returned if so.
Returns `None` if the given path could not be determined to belong to a git
repo.
"""
if os.path.isfile(pathname):
current_dir = os.path.abspath(os.path.dirname(pathname))
elif os.path.isdir(pathname):
current_dir = os.path.abspath(pathname)
else:
return None
current_level = 0
while levels is None or current_level <= levels:
if os.path.exists(os.path.join(current_dir, '.git')):
return current_dir
current_level += 1
if current_dir == os.path.dirname(current_dir):
break
current_dir = os.path.dirname(current_dir)
return None
| bsd-3-clause |
hzy001/ansible | lib/ansible/plugins/lookup/__init__.py | 79 | 1905 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['LookupBase']
class LookupBase:
def __init__(self, loader=None, templar=None, **kwargs):
self._loader = loader
self._templar = templar
self._display = display
def get_basedir(self, variables):
if 'role_path' in variables:
return variables['role_path']
else:
return self._loader.get_basedir()
def _flatten(self, terms):
ret = []
for term in terms:
if isinstance(term, (list, tuple)):
ret.extend(term)
else:
ret.append(term)
return ret
def _combine(self, a, b):
results = []
for x in a:
for y in b:
results.append(self._flatten([x,y]))
return results
def _flatten_hash_to_list(self, terms):
ret = []
for key in terms:
ret.append({'key': key, 'value': terms[key]})
return ret
| gpl-3.0 |
luiscarlosgph/nas | env/lib/python2.7/site-packages/django/contrib/gis/db/backends/oracle/models.py | 86 | 2268 | """
The GeometryColumns and SpatialRefSys models for the Oracle spatial
backend.
It should be noted that Oracle Spatial does not have database tables
named according to the OGC standard, so the closest analogs are used.
For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns
model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model.
"""
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class OracleGeometryColumns(models.Model):
"Maps to the Oracle USER_SDO_GEOM_METADATA table."
table_name = models.CharField(max_length=32)
column_name = models.CharField(max_length=1024)
srid = models.IntegerField(primary_key=True)
# TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY).
class Meta:
app_label = 'gis'
db_table = 'USER_SDO_GEOM_METADATA'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'column_name'
def __str__(self):
return '%s - %s (SRID: %s)' % (self.table_name, self.column_name, self.srid)
class OracleSpatialRefSys(models.Model, SpatialRefSysMixin):
"Maps to the Oracle MDSYS.CS_SRS table."
cs_name = models.CharField(max_length=68)
srid = models.IntegerField(primary_key=True)
auth_srid = models.IntegerField()
auth_name = models.CharField(max_length=256)
wktext = models.CharField(max_length=2046)
# Optional geometry representing the bounds of this coordinate
# system. By default, all are NULL in the table.
cs_bounds = models.PolygonField(null=True)
objects = models.GeoManager()
class Meta:
app_label = 'gis'
db_table = 'CS_SRS'
managed = False
@property
def wkt(self):
return self.wktext
@classmethod
def wkt_col(cls):
return 'wktext'
| mit |
VanirAOSP/external_chromium_org | chrome/common/extensions/docs/server2/content_providers_test.py | 23 | 5685 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
from compiled_file_system import CompiledFileSystem
from content_providers import ContentProviders
from extensions_paths import EXTENSIONS
from object_store_creator import ObjectStoreCreator
from test_file_system import TestFileSystem
from test_util import DisableLogging
_CONTENT_PROVIDERS = {
'apples': {
'chromium': {
'dir': 'chrome/common/extensions/apples'
},
'serveFrom': 'apples-dir',
},
'bananas': {
'serveFrom': '',
'chromium': {
'dir': 'chrome/common/extensions'
},
},
'github-provider': {
'serveFrom': 'gh',
'github': {
'dir': 'chrome/common/extensions',
'owner': 'GoogleChrome',
'repo': 'hello-world',
},
},
'github-provider-with-dir': {
'serveFrom': 'gh2',
'github': {
'dir': 'chrome/common/extensions/tomatoes/are/a',
'owner': 'SomeOwner',
'repo': 'some-repo',
},
},
'tomatoes': {
'serveFrom': 'tomatoes-dir/are/a',
'chromium': {
'dir': 'chrome/common/extensions/tomatoes/are/a'
},
},
}
_FILE_SYSTEM_DATA = {
'docs': {
'templates': {
'json': {
'content_providers.json': json.dumps(_CONTENT_PROVIDERS),
},
},
},
'apples': {
'gala.txt': 'gala apples',
'green': {
'granny smith.txt': 'granny smith apples',
},
},
'tomatoes': {
'are': {
'a': {
'vegetable.txt': 'no they aren\'t',
'fruit': {
'cherry.txt': 'cherry tomatoes',
},
},
},
},
}
class _MockGithubFileSystemProvider(object):
'''A GithubFileSystemProvider imitation which records every call to Create
and returns them from GetAndReset.
'''
def __init__(self, file_system):
self._file_system = file_system
self._calls = []
def Create(self, owner, repo):
self._calls.append((owner, repo))
return self._file_system
def GetAndReset(self):
calls = self._calls
self._calls = []
return calls
class ContentProvidersTest(unittest.TestCase):
def setUp(self):
test_file_system = TestFileSystem(_FILE_SYSTEM_DATA, relative_to=EXTENSIONS)
self._github_fs_provider = _MockGithubFileSystemProvider(test_file_system)
self._content_providers = ContentProviders(
CompiledFileSystem.Factory(ObjectStoreCreator.ForTest()),
test_file_system,
self._github_fs_provider)
def testSimpleRootPath(self):
provider = self._content_providers.GetByName('apples')
self.assertEqual(
'gala apples',
provider.GetContentAndType('gala.txt').Get().content)
self.assertEqual(
'granny smith apples',
provider.GetContentAndType('green/granny smith.txt').Get().content)
def testComplexRootPath(self):
provider = self._content_providers.GetByName('tomatoes')
self.assertEqual(
'no they aren\'t',
provider.GetContentAndType('vegetable.txt').Get().content)
self.assertEqual(
'cherry tomatoes',
provider.GetContentAndType('fruit/cherry.txt').Get().content)
def testParentRootPath(self):
provider = self._content_providers.GetByName('bananas')
self.assertEqual(
'gala apples',
provider.GetContentAndType('apples/gala.txt').Get().content)
def testSimpleServlet(self):
provider, path = self._content_providers.GetByServeFrom('apples-dir')
self.assertEqual('apples', provider.name)
self.assertEqual('', path)
provider, path = self._content_providers.GetByServeFrom(
'apples-dir/are/forever')
self.assertEqual('apples', provider.name)
self.assertEqual('are/forever', path)
def testComplexServlet(self):
provider, path = self._content_providers.GetByServeFrom(
'tomatoes-dir/are/a')
self.assertEqual('tomatoes', provider.name)
self.assertEqual('', path)
provider, path = self._content_providers.GetByServeFrom(
'tomatoes-dir/are/a/fruit/they/are')
self.assertEqual('tomatoes', provider.name)
self.assertEqual('fruit/they/are', path)
def testEmptyStringServlet(self):
provider, path = self._content_providers.GetByServeFrom('tomatoes-dir/are')
self.assertEqual('bananas', provider.name)
self.assertEqual('tomatoes-dir/are', path)
provider, path = self._content_providers.GetByServeFrom('')
self.assertEqual('bananas', provider.name)
self.assertEqual('', path)
@DisableLogging('error')
def testProviderNotFound(self):
self.assertEqual(None, self._content_providers.GetByName('cabbages'))
def testGithubContentProvider(self):
provider, path = self._content_providers.GetByServeFrom(
'gh/apples/green/granny smith.txt')
self.assertEqual('github-provider', provider.name)
self.assertEqual('apples/green/granny smith.txt', path)
self.assertEqual([('GoogleChrome', 'hello-world')],
self._github_fs_provider.GetAndReset())
self.assertEqual(
'granny smith apples',
provider.GetContentAndType(path).Get().content)
def testGithubContentProviderWithDir(self):
provider, path = self._content_providers.GetByServeFrom(
'gh2/fruit/cherry.txt')
self.assertEqual('github-provider-with-dir', provider.name)
self.assertEqual('fruit/cherry.txt', path)
self.assertEqual([('SomeOwner', 'some-repo')],
self._github_fs_provider.GetAndReset())
self.assertEqual(
'cherry tomatoes',
provider.GetContentAndType(path).Get().content)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
VirtusLab/ansible-modules-extras | packaging/language/maven_artifact.py | 13 | 13501 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Chris Schmidt <chris.schmidt () contrastsecurity.com>
#
# Built using https://github.com/hamnis/useful-scripts/blob/master/python/download-maven-artifact
# as a reference and starting point.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
__author__ = 'cschmidt'
from lxml import etree
import os
import hashlib
import sys
DOCUMENTATION = '''
---
module: maven_artifact
short_description: Downloads an Artifact from a Maven Repository
version_added: "2.0"
description:
- Downloads an artifact from a maven repository given the maven coordinates provided to the module. Can retrieve
- snapshots or release versions of the artifact and will resolve the latest available version if one is not
- available.
author: "Chris Schmidt (@chrisisbeef)"
requirements:
- "python >= 2.6"
- lxml
options:
group_id:
description:
- The Maven groupId coordinate
required: true
artifact_id:
description:
- The maven artifactId coordinate
required: true
version:
description:
- The maven version coordinate
required: false
default: latest
classifier:
description:
- The maven classifier coordinate
required: false
default: null
extension:
description:
- The maven type/extension coordinate
required: false
default: jar
repository_url:
description:
- The URL of the Maven Repository to download from
required: false
default: http://repo1.maven.org/maven2
username:
description:
- The username to authenticate as to the Maven Repository
required: false
default: null
password:
description:
- The password to authenticate with to the Maven Repository
required: false
default: null
dest:
description:
- The path where the artifact should be written to
required: true
default: false
state:
description:
- The desired state of the artifact
required: true
default: present
choices: [present,absent]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be set to C(no) when no other option exists.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: "1.9.3"
'''
EXAMPLES = '''
# Download the latest version of the JUnit framework artifact from Maven Central
- maven_artifact: group_id=junit artifact_id=junit dest=/tmp/junit-latest.jar
# Download JUnit 4.11 from Maven Central
- maven_artifact: group_id=junit artifact_id=junit version=4.11 dest=/tmp/junit-4.11.jar
# Download an artifact from a private repository requiring authentication
- maven_artifact: group_id=com.company artifact_id=library-name repository_url=https://repo.company.com/maven username=user password=pass dest=/tmp/library-name-latest.jar
# Download a WAR File to the Tomcat webapps directory to be deployed
- maven_artifact: group_id=com.company artifact_id=web-app extension=war repository_url=https://repo.company.com/maven dest=/var/lib/tomcat7/webapps/web-app.war
'''
class Artifact(object):
def __init__(self, group_id, artifact_id, version, classifier=None, extension='jar'):
if not group_id:
raise ValueError("group_id must be set")
if not artifact_id:
raise ValueError("artifact_id must be set")
self.group_id = group_id
self.artifact_id = artifact_id
self.version = version
self.classifier = classifier
if not extension:
self.extension = "jar"
else:
self.extension = extension
def is_snapshot(self):
return self.version and self.version.endswith("SNAPSHOT")
def path(self, with_version=True):
base = self.group_id.replace(".", "/") + "/" + self.artifact_id
if with_version and self.version:
return base + "/" + self.version
else:
return base
def _generate_filename(self):
if not self.classifier:
return self.artifact_id + "." + self.extension
else:
return self.artifact_id + "-" + self.classifier + "." + self.extension
def get_filename(self, filename=None):
if not filename:
filename = self._generate_filename()
elif os.path.isdir(filename):
filename = os.path.join(filename, self._generate_filename())
return filename
def __str__(self):
if self.classifier:
return "%s:%s:%s:%s:%s" % (self.group_id, self.artifact_id, self.extension, self.classifier, self.version)
elif self.extension != "jar":
return "%s:%s:%s:%s" % (self.group_id, self.artifact_id, self.extension, self.version)
else:
return "%s:%s:%s" % (self.group_id, self.artifact_id, self.version)
@staticmethod
def parse(input):
parts = input.split(":")
if len(parts) >= 3:
g = parts[0]
a = parts[1]
v = parts[len(parts) - 1]
t = None
c = None
if len(parts) == 4:
t = parts[2]
if len(parts) == 5:
t = parts[2]
c = parts[3]
return Artifact(g, a, v, c, t)
else:
return None
class MavenDownloader:
def __init__(self, module, base="http://repo1.maven.org/maven2"):
self.module = module
if base.endswith("/"):
base = base.rstrip("/")
self.base = base
self.user_agent = "Maven Artifact Downloader/1.0"
def _find_latest_version_available(self, artifact):
path = "/%s/maven-metadata.xml" % (artifact.path(False))
xml = self._request(self.base + path, "Failed to download maven-metadata.xml", lambda r: etree.parse(r))
v = xml.xpath("/metadata/versioning/versions/version[last()]/text()")
if v:
return v[0]
def find_uri_for_artifact(self, artifact):
if artifact.is_snapshot():
path = "/%s/maven-metadata.xml" % (artifact.path())
xml = self._request(self.base + path, "Failed to download maven-metadata.xml", lambda r: etree.parse(r))
timestamp = xml.xpath("/metadata/versioning/snapshot/timestamp/text()")[0]
buildNumber = xml.xpath("/metadata/versioning/snapshot/buildNumber/text()")[0]
return self._uri_for_artifact(artifact, artifact.version.replace("SNAPSHOT", timestamp + "-" + buildNumber))
else:
return self._uri_for_artifact(artifact)
def _uri_for_artifact(self, artifact, version=None):
if artifact.is_snapshot() and not version:
raise ValueError("Expected uniqueversion for snapshot artifact " + str(artifact))
elif not artifact.is_snapshot():
version = artifact.version
if artifact.classifier:
return self.base + "/" + artifact.path() + "/" + artifact.artifact_id + "-" + version + "-" + artifact.classifier + "." + artifact.extension
return self.base + "/" + artifact.path() + "/" + artifact.artifact_id + "-" + version + "." + artifact.extension
def _request(self, url, failmsg, f):
# Hack to add parameters in the way that fetch_url expects
self.module.params['url_username'] = self.module.params.get('username', '')
self.module.params['url_password'] = self.module.params.get('password', '')
self.module.params['http_agent'] = self.module.params.get('user_agent', None)
response, info = fetch_url(self.module, url)
if info['status'] != 200:
raise ValueError(failmsg + " because of " + info['msg'] + "for URL " + url)
else:
return f(response)
def download(self, artifact, filename=None):
filename = artifact.get_filename(filename)
if not artifact.version or artifact.version == "latest":
artifact = Artifact(artifact.group_id, artifact.artifact_id, self._find_latest_version_available(artifact),
artifact.classifier, artifact.extension)
url = self.find_uri_for_artifact(artifact)
if not self.verify_md5(filename, url + ".md5"):
response = self._request(url, "Failed to download artifact " + str(artifact), lambda r: r)
if response:
with open(filename, 'w') as f:
# f.write(response.read())
self._write_chunks(response, f, report_hook=self.chunk_report)
return True
else:
return False
else:
return True
def chunk_report(self, bytes_so_far, chunk_size, total_size):
percent = float(bytes_so_far) / total_size
percent = round(percent * 100, 2)
sys.stdout.write("Downloaded %d of %d bytes (%0.2f%%)\r" %
(bytes_so_far, total_size, percent))
if bytes_so_far >= total_size:
sys.stdout.write('\n')
def _write_chunks(self, response, file, chunk_size=8192, report_hook=None):
total_size = response.info().getheader('Content-Length').strip()
total_size = int(total_size)
bytes_so_far = 0
while 1:
chunk = response.read(chunk_size)
bytes_so_far += len(chunk)
if not chunk:
break
file.write(chunk)
if report_hook:
report_hook(bytes_so_far, chunk_size, total_size)
return bytes_so_far
def verify_md5(self, file, remote_md5):
if not os.path.exists(file):
return False
else:
local_md5 = self._local_md5(file)
remote = self._request(remote_md5, "Failed to download MD5", lambda r: r.read())
return local_md5 == remote
def _local_md5(self, file):
md5 = hashlib.md5()
with open(file, 'rb') as f:
for chunk in iter(lambda: f.read(8192), ''):
md5.update(chunk)
return md5.hexdigest()
def main():
module = AnsibleModule(
argument_spec = dict(
group_id = dict(default=None),
artifact_id = dict(default=None),
version = dict(default="latest"),
classifier = dict(default=None),
extension = dict(default='jar'),
repository_url = dict(default=None),
username = dict(default=None),
password = dict(default=None),
state = dict(default="present", choices=["present","absent"]), # TODO - Implement a "latest" state
dest = dict(type="path", default=None),
validate_certs = dict(required=False, default=True, type='bool'),
)
)
group_id = module.params["group_id"]
artifact_id = module.params["artifact_id"]
version = module.params["version"]
classifier = module.params["classifier"]
extension = module.params["extension"]
repository_url = module.params["repository_url"]
repository_username = module.params["username"]
repository_password = module.params["password"]
state = module.params["state"]
dest = module.params["dest"]
if not repository_url:
repository_url = "http://repo1.maven.org/maven2"
#downloader = MavenDownloader(module, repository_url, repository_username, repository_password)
downloader = MavenDownloader(module, repository_url)
try:
artifact = Artifact(group_id, artifact_id, version, classifier, extension)
except ValueError as e:
module.fail_json(msg=e.args[0])
prev_state = "absent"
if os.path.isdir(dest):
dest = dest + "/" + artifact_id + "-" + version + "." + extension
if os.path.lexists(dest):
if not artifact.is_snapshot():
prev_state = "present"
elif downloader.verify_md5(dest, downloader.find_uri_for_artifact(artifact) + '.md5'):
prev_state = "present"
else:
path = os.path.dirname(dest)
if not os.path.exists(path):
os.makedirs(path)
if prev_state == "present":
module.exit_json(dest=dest, state=state, changed=False)
try:
if downloader.download(artifact, dest):
module.exit_json(state=state, dest=dest, group_id=group_id, artifact_id=artifact_id, version=version, classifier=classifier, extension=extension, repository_url=repository_url, changed=True)
else:
module.fail_json(msg="Unable to download the artifact")
except ValueError as e:
module.fail_json(msg=e.args[0])
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
armersong/oauthlib | tests/oauth2/rfc6749/test_parameters.py | 10 | 12277 | from __future__ import absolute_import, unicode_literals
from mock import patch
from ...unittest import TestCase
from oauthlib.oauth2.rfc6749.parameters import *
from oauthlib.oauth2.rfc6749.errors import *
from oauthlib import signals
@patch('time.time', new=lambda: 1000)
class ParameterTests(TestCase):
state = 'xyz'
auth_base = {
'uri': 'https://server.example.com/authorize',
'client_id': 's6BhdRkqt3',
'redirect_uri': 'https://client.example.com/cb',
'state': state,
'scope': 'photos'
}
list_scope = ['list', 'of', 'scopes']
auth_grant = {'response_type': 'code'}
auth_grant_list_scope = {}
auth_implicit = {'response_type': 'token', 'extra': 'extra'}
auth_implicit_list_scope = {}
def setUp(self):
self.auth_grant.update(self.auth_base)
self.auth_implicit.update(self.auth_base)
self.auth_grant_list_scope.update(self.auth_grant)
self.auth_grant_list_scope['scope'] = self.list_scope
self.auth_implicit_list_scope.update(self.auth_implicit)
self.auth_implicit_list_scope['scope'] = self.list_scope
auth_base_uri = ('https://server.example.com/authorize?response_type={0}'
'&client_id=s6BhdRkqt3&redirect_uri=https%3A%2F%2F'
'client.example.com%2Fcb&scope={1}&state={2}{3}')
auth_grant_uri = auth_base_uri.format('code', 'photos', state, '')
auth_grant_uri_list_scope = auth_base_uri.format('code', 'list+of+scopes', state, '')
auth_implicit_uri = auth_base_uri.format('token', 'photos', state, '&extra=extra')
auth_implicit_uri_list_scope = auth_base_uri.format('token', 'list+of+scopes', state, '&extra=extra')
grant_body = {
'grant_type': 'authorization_code',
'code': 'SplxlOBeZQQYbYS6WxSbIA',
'redirect_uri': 'https://client.example.com/cb'
}
grant_body_scope = {'scope': 'photos'}
grant_body_list_scope = {'scope': list_scope}
auth_grant_body = ('grant_type=authorization_code&'
'code=SplxlOBeZQQYbYS6WxSbIA&'
'redirect_uri=https%3A%2F%2Fclient.example.com%2Fcb')
auth_grant_body_scope = auth_grant_body + '&scope=photos'
auth_grant_body_list_scope = auth_grant_body + '&scope=list+of+scopes'
pwd_body = {
'grant_type': 'password',
'username': 'johndoe',
'password': 'A3ddj3w'
}
password_body = 'grant_type=password&username=johndoe&password=A3ddj3w'
cred_grant = {'grant_type': 'client_credentials'}
cred_body = 'grant_type=client_credentials'
grant_response = 'https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA&state=xyz'
grant_dict = {'code': 'SplxlOBeZQQYbYS6WxSbIA', 'state': state}
error_nocode = 'https://client.example.com/cb?state=xyz'
error_nostate = 'https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA'
error_wrongstate = 'https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA&state=abc'
error_response = 'https://client.example.com/cb?error=access_denied&state=xyz'
implicit_base = 'https://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA&scope=abc&'
implicit_response = implicit_base + 'state={0}&token_type=example&expires_in=3600'.format(state)
implicit_notype = implicit_base + 'state={0}&expires_in=3600'.format(state)
implicit_wrongstate = implicit_base + 'state={0}&token_type=exampleexpires_in=3600'.format('invalid')
implicit_nostate = implicit_base + 'token_type=example&expires_in=3600'
implicit_notoken = 'https://example.com/cb#state=xyz&token_type=example&expires_in=3600'
implicit_dict = {
'access_token': '2YotnFZFEjr1zCsicMWpAA',
'state': state,
'token_type': 'example',
'expires_in': '3600',
'expires_at': 4600,
'scope': ['abc']
}
json_response = ('{ "access_token": "2YotnFZFEjr1zCsicMWpAA",'
' "token_type": "example",'
' "expires_in": 3600,'
' "refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",'
' "example_parameter": "example_value",'
' "scope":"abc def"}')
json_response_noscope = ('{ "access_token": "2YotnFZFEjr1zCsicMWpAA",'
' "token_type": "example",'
' "expires_in": 3600,'
' "refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",'
' "example_parameter": "example_value" }')
json_error = '{ "error": "access_denied" }'
json_notoken = ('{ "token_type": "example",'
' "expires_in": 3600,'
' "refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",'
' "example_parameter": "example_value" }')
json_notype = ('{ "access_token": "2YotnFZFEjr1zCsicMWpAA",'
' "expires_in": 3600,'
' "refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",'
' "example_parameter": "example_value" }')
json_expires = ('{ "access_token": "2YotnFZFEjr1zCsicMWpAA",'
' "token_type": "example",'
' "expires": 3600,'
' "refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",'
' "example_parameter": "example_value",'
' "scope":"abc def"}')
json_dict = {
'access_token': '2YotnFZFEjr1zCsicMWpAA',
'token_type': 'example',
'expires_in': 3600,
'expires_at': 4600,
'refresh_token': 'tGzv3JOkF0XG5Qx2TlKWIA',
'example_parameter': 'example_value',
'scope': ['abc', 'def']
}
json_noscope_dict = {
'access_token': '2YotnFZFEjr1zCsicMWpAA',
'token_type': 'example',
'expires_in': 3600,
'expires_at': 4600,
'refresh_token': 'tGzv3JOkF0XG5Qx2TlKWIA',
'example_parameter': 'example_value'
}
json_notype_dict = {
'access_token': '2YotnFZFEjr1zCsicMWpAA',
'expires_in': 3600,
'expires_at': 4600,
'refresh_token': 'tGzv3JOkF0XG5Qx2TlKWIA',
'example_parameter': 'example_value',
}
url_encoded_response = ('access_token=2YotnFZFEjr1zCsicMWpAA'
'&token_type=example'
'&expires_in=3600'
'&refresh_token=tGzv3JOkF0XG5Qx2TlKWIA'
'&example_parameter=example_value'
'&scope=abc def')
url_encoded_error = 'error=access_denied'
url_encoded_notoken = ('token_type=example'
'&expires_in=3600'
'&refresh_token=tGzv3JOkF0XG5Qx2TlKWIA'
'&example_parameter=example_value')
def test_prepare_grant_uri(self):
"""Verify correct authorization URI construction."""
self.assertURLEqual(prepare_grant_uri(**self.auth_grant), self.auth_grant_uri)
self.assertURLEqual(prepare_grant_uri(**self.auth_grant_list_scope), self.auth_grant_uri_list_scope)
self.assertURLEqual(prepare_grant_uri(**self.auth_implicit), self.auth_implicit_uri)
self.assertURLEqual(prepare_grant_uri(**self.auth_implicit_list_scope), self.auth_implicit_uri_list_scope)
def test_prepare_token_request(self):
"""Verify correct access token request body construction."""
self.assertFormBodyEqual(prepare_token_request(**self.grant_body), self.auth_grant_body)
self.assertFormBodyEqual(prepare_token_request(**self.pwd_body), self.password_body)
self.assertFormBodyEqual(prepare_token_request(**self.cred_grant), self.cred_body)
def test_grant_response(self):
"""Verify correct parameter parsing and validation for auth code responses."""
params = parse_authorization_code_response(self.grant_response)
self.assertEqual(params, self.grant_dict)
params = parse_authorization_code_response(self.grant_response, state=self.state)
self.assertEqual(params, self.grant_dict)
self.assertRaises(MissingCodeError, parse_authorization_code_response,
self.error_nocode)
self.assertRaises(MissingCodeError, parse_authorization_code_response,
self.error_response)
self.assertRaises(MismatchingStateError, parse_authorization_code_response,
self.error_nostate, state=self.state)
self.assertRaises(MismatchingStateError, parse_authorization_code_response,
self.error_wrongstate, state=self.state)
def test_implicit_token_response(self):
"""Verify correct parameter parsing and validation for implicit responses."""
self.assertEqual(parse_implicit_response(self.implicit_response),
self.implicit_dict)
self.assertRaises(MissingTokenError, parse_implicit_response,
self.implicit_notoken)
self.assertRaises(ValueError, parse_implicit_response,
self.implicit_nostate, state=self.state)
self.assertRaises(ValueError, parse_implicit_response,
self.implicit_wrongstate, state=self.state)
def test_json_token_response(self):
"""Verify correct parameter parsing and validation for token responses. """
self.assertEqual(parse_token_response(self.json_response), self.json_dict)
self.assertRaises(AccessDeniedError, parse_token_response, self.json_error)
self.assertRaises(MissingTokenError, parse_token_response, self.json_notoken)
self.assertEqual(parse_token_response(self.json_response_noscope,
scope=['all', 'the', 'scopes']), self.json_noscope_dict)
scope_changes_recorded = []
def record_scope_change(sender, message, old, new):
scope_changes_recorded.append((message, old, new))
os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = '1'
signals.scope_changed.connect(record_scope_change)
try:
parse_token_response(self.json_response, scope='aaa')
self.assertEqual(len(scope_changes_recorded), 1)
message, old, new = scope_changes_recorded[0]
for scope in new + old:
self.assertIn(scope, message)
self.assertEqual(old, ['aaa'])
self.assertEqual(set(new), set(['abc', 'def']))
finally:
signals.scope_changed.disconnect(record_scope_change)
del os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE']
def test_json_token_notype(self):
"""Verify strict token type parsing only when configured. """
self.assertEqual(parse_token_response(self.json_notype), self.json_notype_dict)
try:
os.environ['OAUTHLIB_STRICT_TOKEN_TYPE'] = '1'
self.assertRaises(MissingTokenTypeError, parse_token_response, self.json_notype)
finally:
del os.environ['OAUTHLIB_STRICT_TOKEN_TYPE']
def test_url_encoded_token_response(self):
"""Verify fallback parameter parsing and validation for token responses. """
self.assertEqual(parse_token_response(self.url_encoded_response), self.json_dict)
self.assertRaises(AccessDeniedError, parse_token_response, self.url_encoded_error)
self.assertRaises(MissingTokenError, parse_token_response, self.url_encoded_notoken)
scope_changes_recorded = []
def record_scope_change(sender, message, old, new):
scope_changes_recorded.append((message, old, new))
os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = '1'
signals.scope_changed.connect(record_scope_change)
try:
token = parse_token_response(self.url_encoded_response, scope='aaa')
self.assertEqual(len(scope_changes_recorded), 1)
message, old, new = scope_changes_recorded[0]
for scope in new + old:
self.assertIn(scope, message)
self.assertEqual(old, ['aaa'])
self.assertEqual(set(new), set(['abc', 'def']))
finally:
signals.scope_changed.disconnect(record_scope_change)
del os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE']
def test_token_response_with_expires(self):
"""Verify fallback for alternate spelling of expires_in. """
self.assertEqual(parse_token_response(self.json_expires), self.json_dict)
| bsd-3-clause |
cecep-edu/edx-platform | docs/shared/conf.py | 158 | 10580 | # -*- coding: utf-8 -*-
#
# getting_started documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 16 11:19:12 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# -----------------------------------------------------------------------------
# Common config
#
# This file is imported by the different project conf.py files (in
# course_authors/, data/, and developers/). It includes configuration options
# common to all three.
#
# -----------------------------------------------------------------------------
import os
BASEDIR = os.path.dirname(os.path.abspath(__file__))
def add_base(paths):
"""
Returns a list of paths relative to BASEDIR.
paths: a list of paths
"""
return [os.path.join(BASEDIR, x) for x in paths]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = add_base(['_templates'])
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'edX'
copyright = u'2013, EdX Doc Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<Studio> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = add_base(['_static'])
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'edxdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
'index',
'getting_started.tex',
u'edX Studio Documentation',
u'EdX Doc Team',
'manual',
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'getting_started', u'getting_started Documentation',
[u'EdX Doc Team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
'index',
'getting_started',
u'getting_started Documentation',
u'EdX Doc Team',
'getting_started',
'One line description of project.',
'Miscellaneous',
),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'getting_started'
epub_author = u'EdX Doc Team'
epub_publisher = u'EdX Doc Team'
epub_copyright = u'2013, EdX Doc Team'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# If 'no', URL addresses will not be shown.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| agpl-3.0 |
virajprabhu/oppia | core/counters.py | 30 | 3121 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Services for performance counters."""
__author__ = 'Sean Lip'
class PerfCounter(object):
"""Generic in-process numeric counter; not aggregated across instances."""
# TODO(sll): Add aggregation across instances.
def __init__(self, name, description):
if name in Registry._counters:
raise Exception('Counter %s already exists.' % name)
self._name = name
self._description = description
self._value = 0
Registry._counters[self.name] = self
def inc(self, increment=1):
"""Increments the counter value by a given increment."""
self._value += increment
@property
def name(self):
return self._name
@property
def description(self):
return self._description
@property
def value(self):
return self._value
class Registry(object):
"""Registry of all counters."""
_counters = {}
@classmethod
def get_all_counters(cls):
return cls._counters.values()
MEMCACHE_HIT = PerfCounter(
'memcache-hit',
'Number of times an object was found in memcache')
MEMCACHE_MISS = PerfCounter(
'memcache-miss',
'Number of times an object was not found in memcache')
MEMCACHE_SET_SUCCESS = PerfCounter(
'memcache-set-success',
'Number of times an object was successfully put in memcache')
MEMCACHE_SET_FAILURE = PerfCounter(
'memcache-set-failure',
'Number of times an object failed to be put in memcache')
MEMCACHE_DELETE_SUCCESS = PerfCounter(
'memcache-delete-success',
'Number of times an object was successfully deleted from memcache')
MEMCACHE_DELETE_MISSING = PerfCounter(
'memcache-delete-missing',
'Number of attempts to delete a non-existent object from memcache')
MEMCACHE_DELETE_FAILURE = PerfCounter(
'memcache-delete-failure',
'Number of times an object failed to be deleted from memcache')
HTML_RESPONSE_TIME_SECS = PerfCounter(
'html-response-time-secs',
'Total processing time for all HTML responses, in seconds')
HTML_RESPONSE_COUNT = PerfCounter(
'html-response-count',
'Number of times a HTML response was sent out')
JSON_RESPONSE_TIME_SECS = PerfCounter(
'json-response-time-secs',
'Total processing time for all JSON responses, in seconds')
JSON_RESPONSE_COUNT = PerfCounter(
'json-response-count',
'Number of times a JSON response was sent out')
EMAILS_SENT = PerfCounter(
'emails-sent',
'Number of times a call to send_mail() was made')
| apache-2.0 |
JulyKikuAkita/PythonPrac | cs15211/MaximumWidthRamp.py | 1 | 3437 | __source__ = 'https://leetcode.com/problems/maximum-width-ramp/'
# Time: O()
# Space: O()
#
# Description: Leetcode # 962. Maximum Width Ramp
#
# Given an array A of integers, a ramp is a tuple (i, j) for which i < j and A[i] <= A[j].
# The width of such a ramp is j - i.
#
# Find the maximum width of a ramp in A. If one doesn't exist, return 0.
#
# Example 1:
#
# Input: [6,0,8,2,1,5]
# Output: 4
# Explanation:
# The maximum width ramp is achieved at (i, j) = (1, 5): A[1] = 0 and A[5] = 5.
#
# Example 2:
#
# Input: [9,8,1,0,1,9,4,0,4,1]
# Output: 7
# Explanation:
# The maximum width ramp is achieved at (i, j) = (2, 9): A[2] = 1 and A[9] = 1.
#
# Note:
#
# 2 <= A.length <= 50000
# 0 <= A[i] <= 50000
#
import unittest
# 260ms 40.40%
class Solution(object):
def maxWidthRamp(self, A):
"""
:type A: List[int]
:rtype: int
"""
ans = 0
m = float('inf')
for i in sorted(range(len(A)), key = A.__getitem__):
ans = max(ans, i - m)
m = min(m, i)
return ans
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/maximum-width-ramp/solution/
#
Approach 1: Sort
Complexity Analysis
Time Complexity: O(NLogN), where N is the length of A.
Space Complexity: O(N), depending on the implementation of the sorting function.
# 160ms 40.70%
class Solution {
public int maxWidthRamp(int[] A) {
int N = A.length;
Integer[] B = new Integer[N];
for (int i = 0; i < N; ++i) B[i] = i;
Arrays.sort(B, (i, j) -> ((Integer) A[i]).compareTo(A[j]));
int ans = 0;
int m = N;
for (int i: B) {
ans = Math.max(ans, i - m);
m = Math.min(m, i);
}
return ans;
}
}
Approach 2: Binary Search Candidates
Complexity Analysis
Time Complexity: O(NlogN), where N is the length of A.
Space Complexity: O(N)
# 38ms 73.26%
import java.awt.Point;
class Solution {
public int maxWidthRamp(int[] A) {
int N = A.length;
int ans = 0;
List<Point> candidates = new ArrayList();
candidates.add(new Point(A[N-1], N-1));
// candidates: i's decreasing, by increasing value of A[i]
for (int i = N-2; i >= 0; --i) {
// Find largest j in candidates with A[j] >= A[i]
int lo = 0, hi = candidates.size();
while (lo < hi) {
int mi = lo + (hi - lo) / 2;
if (candidates.get(mi).x < A[i])
lo = mi + 1;
else
hi = mi;
}
if (lo < candidates.size()) {
int j = candidates.get(lo).y;
ans = Math.max(ans, j - i);
} else {
candidates.add(new Point(A[i], i));
}
}
return ans;
}
}
# 12ms 94.64%
class Solution {
public int maxWidthRamp(int[] A) {
int[] s = new int[A.length];
int ptr = 0;
int res = 0, n = A.length;
for (int i = 0; i < n; ++i){
if (ptr == 0 || A[s[ptr-1]] > A[i]) s[ptr++] = i;
}
for (int i = n - 1; i > res; --i) {
while (ptr != 0 && A[s[ptr-1]] <= A[i]) {
res = Math.max(res, i - s[--ptr]);
}
}
return res;
}
}
'''
| apache-2.0 |
Zhongqilong/mykbengineer | kbe/res/scripts/common/Lib/tkinter/__init__.py | 67 | 163513 | """Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import tkinter
from tkinter.constants import *
tk = tkinter.Tk()
frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
from tkinter import _fix
import _tkinter # If this fails your Python may not be configured for Tk
TclError = _tkinter.TclError
from tkinter.constants import *
import re
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
_magic_re = re.compile(r'([\\{}])')
_space_re = re.compile(r'([\s])', re.ASCII)
def _join(value):
"""Internal function."""
return ' '.join(map(_stringify, value))
def _stringify(value):
"""Internal function."""
if isinstance(value, (list, tuple)):
if len(value) == 1:
value = _stringify(value[0])
if value[0] == '{':
value = '{%s}' % value
else:
value = '{%s}' % _join(value)
else:
value = str(value)
if not value:
value = '{}'
elif _magic_re.search(value):
# add '\' before special characters and spaces
value = _magic_re.sub(r'\\\1', value)
value = _space_re.sub(r'\\\1', value)
elif value[0] == '"' or _space_re.search(value):
value = '{%s}' % value
return value
def _flatten(seq):
"""Internal function."""
res = ()
for item in seq:
if isinstance(item, (tuple, list)):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if isinstance(cnfs, dict):
return cnfs
elif isinstance(cnfs, (type(None), str)):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError) as msg:
print("_cnfmerge: fallback due to:", msg)
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
def _splitdict(tk, v, cut_minus=True, conv=None):
"""Return a properly formatted dict built from Tcl list pairs.
If cut_minus is True, the supposed '-' prefix will be removed from
keys. If conv is specified, it is used to convert values.
Tcl list is expected to contain an even number of elements.
"""
t = tk.splitlist(v)
if len(t) % 2:
raise RuntimeError('Tcl list representing a dict is expected '
'to contain an even number of elements')
it = iter(t)
dict = {}
for key, value in zip(it, it):
key = str(key)
if cut_minus and key[0] == '-':
key = key[1:]
if conv:
value = conv(value)
dict[key] = value
return dict
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code=0):
"""Internal function. Calling it will raise the exception SystemExit."""
try:
code = int(code)
except ValueError:
pass
raise SystemExit(code)
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
_tk = None
_tclCommands = None
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
# check for type of NAME parameter to override weird error message
# raised from Modules/_tkinter.c:SetVar like:
# TypeError: setvar() takes exactly 3 arguments (2 given)
if name is not None and not isinstance(name, str):
raise TypeError("name must be a string")
global _varnum
if not master:
master = _default_root
self._root = master._root()
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.initialize(value)
elif not self._tk.getboolean(self._tk.call("info", "exists", self._name)):
self.initialize(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
if self._tk is None:
return
if self._tk.getboolean(self._tk.call("info", "exists", self._name)):
self._tk.globalunsetvar(self._name)
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self._tk.deletecommand(name)
self._tclCommands = None
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
initialize = set
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
f = CallWrapper(callback, None, self).__call__
cbname = repr(id(f))
try:
callback = callback.__func__
except AttributeError:
pass
try:
cbname = cbname + callback.__name__
except AttributeError:
pass
self._tk.createcommand(cbname, f)
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(cbname)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._tk.deletecommand(cbname)
try:
self._tclCommands.remove(cbname)
except ValueError:
pass
def trace_vinfo(self):
"""Return all trace callback information."""
return [self._tk.split(x) for x in self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name))]
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, str):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
try:
return self._tk.getboolean(self._tk.globalgetvar(self._name))
except TclError:
raise ValueError("invalid literal for getboolean()")
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
try:
return _default_root.tk.getboolean(s)
except TclError:
raise ValueError("invalid literal for getboolean()")
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(list(kw.items())))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
# obsolete since Tk 4.0
import warnings
warnings.warn('tk_menuBar() does nothing and will be removed in 3.6',
DeprecationWarning, stacklevel=2)
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
try:
return self.tk.getboolean(s)
except TclError:
raise ValueError("invalid literal for getboolean()")
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
callit.__name__ = func.__name__
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING, except on X11, where the default
is to try UTF8_STRING and fall back to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('clipboard', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use. A keyword parameter type specifies the form of data to be
fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
before STRING."""
if 'displayof' not in kw: kw['displayof'] = self._w
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('selection', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if isinstance(data, str):
data = [self.tk.split(data)]
return [self.__winfo_parseitem(x) for x in data]
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if isinstance(func, str):
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
@property
def _windowingsystem(self):
"""Internal function."""
try:
return self._root()._windowingsystem_cached
except AttributeError:
ws = self._root()._windowingsystem_cached = \
self.tk.call('tk', 'windowingsystem')
return ws
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if callable(v):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if isinstance(item, int):
nv.append(str(item))
elif isinstance(item, str):
nv.append(_stringify(item))
else:
break
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.__func__
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
exc, val, tb = sys.exc_info()
root = self._root()
root.report_callback_exception(exc, val, tb)
def _getconfigure(self, *args):
"""Call Tcl configure command and return the result as a dict."""
cnf = {}
for x in self.tk.splitlist(self.tk.call(*args)):
x = self.tk.splitlist(x)
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
def _getconfigure1(self, *args):
x = self.tk.splitlist(self.tk.call(*args))
return (x[0][1:],) + x[1:]
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
return self._getconfigure(_flatten((self._w, cmd)))
if isinstance(cnf, str):
return self._getconfigure1(_flatten((self._w, cmd, '-'+cnf)))
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def keys(self):
"""Return a list of all resource names of this widget."""
return [x[0][1:] for x in
self.tk.splitlist(self.tk.call(self._w, 'configure'))]
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w))]
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w))]
# Grid methods that apply to the master
def grid_anchor(self, anchor=None): # new in Tk 8.5
"""The anchor value controls how to place the grid within the
master when no row/column has any weight.
The default anchor is nw."""
self.tk.call('grid', 'anchor', self._w, anchor)
anchor = grid_anchor
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _gridconvvalue(self, value):
if isinstance(value, (str, _tkinter.Tcl_Obj)):
try:
svalue = str(value)
if not svalue:
return None
elif '.' in svalue:
return getdouble(svalue)
else:
return getint(svalue)
except ValueError:
pass
return value
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if isinstance(cnf, str) and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
return _splitdict(
self.tk,
self.tk.call('grid', command, self._w, index),
conv=self._gridconvvalue)
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
return self._gridconvvalue(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return [self._nametowidget(x) for x in
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args))]
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.splitlist(self.tk.call('image', 'names'))
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.splitlist(self.tk.call('image', 'types'))
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit:
raise
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
if wlist:
self.tk.call(args)
else:
return [self._nametowidget(x)
for x in self.tk.splitlist(self.tk.call(args))]
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_forget(self, window): # new in Tk 8.5
"""The window will be unmappend from the screen and will no longer
be managed by wm. toplevel windows will be treated like frame
windows once they are no longer managed by wm, however, the menu
option configuration will be remembered and the menus will return
once the widget is managed again."""
self.tk.call('wm', 'forget', window)
forget = wm_forget
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconphoto(self, default=False, *args): # new in Tk 8.5
"""Sets the titlebar icon for this window based on the named photo
images passed through args. If default is True, this is applied to
all future created toplevels as well.
The data in the images is taken as a snapshot at the time of
invocation. If the images are later changed, this is not reflected
to the titlebar icons. Multiple images are accepted to allow
different images sizes to be provided. The window manager may scale
provided icons to an appropriate size.
On Windows, the images are packed into a Windows icon structure.
This will override an icon specified to wm_iconbitmap, and vice
versa.
On X, the images are arranged into the _NET_WM_ICON X property,
which most modern window managers support. An icon specified by
wm_iconbitmap may exist simuultaneously.
On Macintosh, this currently does nothing."""
if default:
self.tk.call('wm', 'iconphoto', self._w, "-default", *args)
else:
self.tk.call('wm', 'iconphoto', self._w, *args)
iconphoto = wm_iconphoto
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_manage(self, widget): # new in Tk 8.5
"""The widget specified will become a stand alone top-level window.
The window will be decorated with the window managers title bar,
etc."""
self.tk.call('wm', 'manage', widget)
manage = wm_manage
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if callable(func):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
if not sys.flags.ignore_environment:
# Issue #16248: Honor the -E flag to avoid code injection.
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError("tk.h version (%s) doesn't match libtk.a version (%s)"
% (_tkinter.TK_VERSION, tk_version))
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError("tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version))
if TkVersion < 4.0:
raise RuntimeError("Tk 4.0 or higher is required; found Tk %s"
% str(TkVersion))
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls exec on the contents of BASENAME.py and
CLASSNAME.py if such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec('from tkinter import *', dir)
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
exec(open(class_py).read(), dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
exec(open(base_py).read(), dir)
def report_callback_exception(self, exc, val, tb):
"""Report callback exception on sys.stderr.
Applications may want to override this internal function, and
should when sys.stderr is None."""
import traceback
print("Exception in Tkinter callback", file=sys.stderr)
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
d = _splitdict(self.tk, self.tk.call('pack', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
d = _splitdict(self.tk, self.tk.call('place', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
d = _splitdict(self.tk, self.tk.call('grid', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = [(k, v) for k, v in cnf.items() if isinstance(k, type)]
for k, v in classes:
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return [getdouble(x) for x in
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args))]
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if isinstance(cnf, (dict, tuple)):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by the given index."""
return self._getints(self.tk.call(self._w, 'bbox', index)) or None
def curselection(self):
"""Return the indices of currently selected item."""
return self._getints(self.tk.call(self._w, 'curselection')) or ()
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (included)."""
if last is not None:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
# obsolete since Tk 4.0
import warnings
warnings.warn('tk_bindForTraversal() does nothing and '
'will be removed in 3.6',
DeprecationWarning, stacklevel=2)
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def xposition(self, index): # new in Tk 8.5
"""Return the x-position of the leftmost pixel of the menu item
at INDEX."""
return getint(self.tk.call(self._w, 'xposition', index))
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, index):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the given index."""
return self._getints(
self.tk.call(self._w, 'bbox', index)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def count(self, index1, index2, *args): # new in Tk 8.5
"""Counts the number of relevant things between the two indices.
If index1 is after index2, the result will be a negative number
(and this holds for each of the possible options).
The actual items which are counted depends on the options given by
args. The result is a list of integers, one for the result of each
counting option given. Valid counting options are "chars",
"displaychars", "displayindices", "displaylines", "indices",
"lines", "xpixels" and "ypixels". There is an additional possible
option "update", which if given then all subsequent options ensure
that any possible out of date information is recalculated."""
args = ['-%s' % arg for arg in args if not arg.startswith('-')]
args += [index1, index2]
res = self.tk.call(self._w, 'count', *args) or None
if res is not None and len(args) <= 3:
return (res, )
else:
return res
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
if boolean is None:
return self.tk.getboolean(self.tk.call(self._w, 'debug'))
self.tk.call(self._w, 'debug', boolean)
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def peer_create(self, newPathName, cnf={}, **kw): # new in Tk 8.5
"""Creates a peer text widget with the given newPathName, and any
optional standard configuration options. By default the peer will
have the same start and end line as the parent widget, but
these can be overriden with the standard configuration options."""
self.tk.call(self._w, 'peer', 'create', newPathName,
*self._options(cnf, kw))
def peer_names(self): # new in Tk 8.5
"""Returns a list of peers of this widget (this does not include
the widget itself)."""
return self.tk.splitlist(self.tk.call(self._w, 'peer', 'names'))
def replace(self, index1, index2, chars, *args): # new in Tk 8.5
"""Replaces the range of characters between index1 and index2 with
the given characters and tags specified by args.
See the method insert for some more information about args, and the
method delete for information about the indices."""
self.tk.call(self._w, 'replace', index1, index2, chars, *args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError('unknown option -'+kwargs.keys()[0])
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError('Too early to create image')
self.tk = getattr(master, 'tk', master)
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if callable(v):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if callable(v):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage(master=self.tk)
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage(master=self.tk)
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage(master=self.tk)
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names():
return _default_root.tk.splitlist(_default_root.tk.call('image', 'names'))
def image_types():
return _default_root.tk.splitlist(_default_root.tk.call('image', 'types'))
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self._getints(self.tk.call(self._w, 'bbox', index)) or None
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
return self._getconfigure(self._w, 'paneconfigure', tagOrId)
if isinstance(cnf, str) and not kw:
return self._getconfigure1(
self._w, 'paneconfigure', tagOrId, '-'+cnf)
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.splitlist(self.tk.call(self._w, 'panes'))
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
text += "\nThis should be a cedilla: \xe7"
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
| lgpl-3.0 |
gboudreau/CouchPotato | cherrypy/lib/encoding.py | 88 | 15598 | import struct
import time
import cherrypy
from cherrypy._cpcompat import basestring, BytesIO, ntob, set, unicodestr
from cherrypy.lib import file_generator
from cherrypy.lib import set_vary_header
def decode(encoding=None, default_encoding='utf-8'):
"""Replace or extend the list of charsets used to decode a request entity.
Either argument may be a single string or a list of strings.
encoding
If not None, restricts the set of charsets attempted while decoding
a request entity to the given set (even if a different charset is given in
the Content-Type request header).
default_encoding
Only in effect if the 'encoding' argument is not given.
If given, the set of charsets attempted while decoding a request entity is
*extended* with the given value(s).
"""
body = cherrypy.request.body
if encoding is not None:
if not isinstance(encoding, list):
encoding = [encoding]
body.attempt_charsets = encoding
elif default_encoding:
if not isinstance(default_encoding, list):
default_encoding = [default_encoding]
body.attempt_charsets = body.attempt_charsets + default_encoding
class ResponseEncoder:
default_encoding = 'utf-8'
failmsg = "Response body could not be encoded with %r."
encoding = None
errors = 'strict'
text_only = True
add_charset = True
debug = False
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
self.attempted_charsets = set()
request = cherrypy.serving.request
if request.handler is not None:
# Replace request.handler with self
if self.debug:
cherrypy.log('Replacing request.handler', 'TOOLS.ENCODE')
self.oldhandler = request.handler
request.handler = self
def encode_stream(self, encoding):
"""Encode a streaming response body.
Use a generator wrapper, and just pray it works as the stream is
being written out.
"""
if encoding in self.attempted_charsets:
return False
self.attempted_charsets.add(encoding)
def encoder(body):
for chunk in body:
if isinstance(chunk, unicodestr):
chunk = chunk.encode(encoding, self.errors)
yield chunk
self.body = encoder(self.body)
return True
def encode_string(self, encoding):
"""Encode a buffered response body."""
if encoding in self.attempted_charsets:
return False
self.attempted_charsets.add(encoding)
try:
body = []
for chunk in self.body:
if isinstance(chunk, unicodestr):
chunk = chunk.encode(encoding, self.errors)
body.append(chunk)
self.body = body
except (LookupError, UnicodeError):
return False
else:
return True
def find_acceptable_charset(self):
request = cherrypy.serving.request
response = cherrypy.serving.response
if self.debug:
cherrypy.log('response.stream %r' % response.stream, 'TOOLS.ENCODE')
if response.stream:
encoder = self.encode_stream
else:
encoder = self.encode_string
if "Content-Length" in response.headers:
# Delete Content-Length header so finalize() recalcs it.
# Encoded strings may be of different lengths from their
# unicode equivalents, and even from each other. For example:
# >>> t = u"\u7007\u3040"
# >>> len(t)
# 2
# >>> len(t.encode("UTF-8"))
# 6
# >>> len(t.encode("utf7"))
# 8
del response.headers["Content-Length"]
# Parse the Accept-Charset request header, and try to provide one
# of the requested charsets (in order of user preference).
encs = request.headers.elements('Accept-Charset')
charsets = [enc.value.lower() for enc in encs]
if self.debug:
cherrypy.log('charsets %s' % repr(charsets), 'TOOLS.ENCODE')
if self.encoding is not None:
# If specified, force this encoding to be used, or fail.
encoding = self.encoding.lower()
if self.debug:
cherrypy.log('Specified encoding %r' % encoding, 'TOOLS.ENCODE')
if (not charsets) or "*" in charsets or encoding in charsets:
if self.debug:
cherrypy.log('Attempting encoding %r' % encoding, 'TOOLS.ENCODE')
if encoder(encoding):
return encoding
else:
if not encs:
if self.debug:
cherrypy.log('Attempting default encoding %r' %
self.default_encoding, 'TOOLS.ENCODE')
# Any character-set is acceptable.
if encoder(self.default_encoding):
return self.default_encoding
else:
raise cherrypy.HTTPError(500, self.failmsg % self.default_encoding)
else:
for element in encs:
if element.qvalue > 0:
if element.value == "*":
# Matches any charset. Try our default.
if self.debug:
cherrypy.log('Attempting default encoding due '
'to %r' % element, 'TOOLS.ENCODE')
if encoder(self.default_encoding):
return self.default_encoding
else:
encoding = element.value
if self.debug:
cherrypy.log('Attempting encoding %s (qvalue >'
'0)' % element, 'TOOLS.ENCODE')
if encoder(encoding):
return encoding
if "*" not in charsets:
# If no "*" is present in an Accept-Charset field, then all
# character sets not explicitly mentioned get a quality
# value of 0, except for ISO-8859-1, which gets a quality
# value of 1 if not explicitly mentioned.
iso = 'iso-8859-1'
if iso not in charsets:
if self.debug:
cherrypy.log('Attempting ISO-8859-1 encoding',
'TOOLS.ENCODE')
if encoder(iso):
return iso
# No suitable encoding found.
ac = request.headers.get('Accept-Charset')
if ac is None:
msg = "Your client did not send an Accept-Charset header."
else:
msg = "Your client sent this Accept-Charset header: %s." % ac
msg += " We tried these charsets: %s." % ", ".join(self.attempted_charsets)
raise cherrypy.HTTPError(406, msg)
def __call__(self, *args, **kwargs):
response = cherrypy.serving.response
self.body = self.oldhandler(*args, **kwargs)
if isinstance(self.body, basestring):
# strings get wrapped in a list because iterating over a single
# item list is much faster than iterating over every character
# in a long string.
if self.body:
self.body = [self.body]
else:
# [''] doesn't evaluate to False, so replace it with [].
self.body = []
elif hasattr(self.body, 'read'):
self.body = file_generator(self.body)
elif self.body is None:
self.body = []
ct = response.headers.elements("Content-Type")
if self.debug:
cherrypy.log('Content-Type: %r' % [str(h) for h in ct], 'TOOLS.ENCODE')
if ct:
ct = ct[0]
if self.text_only:
if ct.value.lower().startswith("text/"):
if self.debug:
cherrypy.log('Content-Type %s starts with "text/"' % ct,
'TOOLS.ENCODE')
do_find = True
else:
if self.debug:
cherrypy.log('Not finding because Content-Type %s does '
'not start with "text/"' % ct,
'TOOLS.ENCODE')
do_find = False
else:
if self.debug:
cherrypy.log('Finding because not text_only', 'TOOLS.ENCODE')
do_find = True
if do_find:
# Set "charset=..." param on response Content-Type header
ct.params['charset'] = self.find_acceptable_charset()
if self.add_charset:
if self.debug:
cherrypy.log('Setting Content-Type %s' % ct,
'TOOLS.ENCODE')
response.headers["Content-Type"] = str(ct)
return self.body
# GZIP
def compress(body, compress_level):
"""Compress 'body' at the given compress_level."""
import zlib
# See http://www.gzip.org/zlib/rfc-gzip.html
yield ntob('\x1f\x8b') # ID1 and ID2: gzip marker
yield ntob('\x08') # CM: compression method
yield ntob('\x00') # FLG: none set
# MTIME: 4 bytes
yield struct.pack("<L", int(time.time()) & int('FFFFFFFF', 16))
yield ntob('\x02') # XFL: max compression, slowest algo
yield ntob('\xff') # OS: unknown
crc = zlib.crc32(ntob(""))
size = 0
zobj = zlib.compressobj(compress_level,
zlib.DEFLATED, -zlib.MAX_WBITS,
zlib.DEF_MEM_LEVEL, 0)
for line in body:
size += len(line)
crc = zlib.crc32(line, crc)
yield zobj.compress(line)
yield zobj.flush()
# CRC32: 4 bytes
yield struct.pack("<L", crc & int('FFFFFFFF', 16))
# ISIZE: 4 bytes
yield struct.pack("<L", size & int('FFFFFFFF', 16))
def decompress(body):
import gzip
zbuf = BytesIO()
zbuf.write(body)
zbuf.seek(0)
zfile = gzip.GzipFile(mode='rb', fileobj=zbuf)
data = zfile.read()
zfile.close()
return data
def gzip(compress_level=5, mime_types=['text/html', 'text/plain'], debug=False):
"""Try to gzip the response body if Content-Type in mime_types.
cherrypy.response.headers['Content-Type'] must be set to one of the
values in the mime_types arg before calling this function.
The provided list of mime-types must be of one of the following form:
* type/subtype
* type/*
* type/*+subtype
No compression is performed if any of the following hold:
* The client sends no Accept-Encoding request header
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
* The 'identity' value is given with a qvalue > 0.
"""
request = cherrypy.serving.request
response = cherrypy.serving.response
set_vary_header(response, "Accept-Encoding")
if not response.body:
# Response body is empty (might be a 304 for instance)
if debug:
cherrypy.log('No response body', context='TOOLS.GZIP')
return
# If returning cached content (which should already have been gzipped),
# don't re-zip.
if getattr(request, "cached", False):
if debug:
cherrypy.log('Not gzipping cached response', context='TOOLS.GZIP')
return
acceptable = request.headers.elements('Accept-Encoding')
if not acceptable:
# If no Accept-Encoding field is present in a request,
# the server MAY assume that the client will accept any
# content coding. In this case, if "identity" is one of
# the available content-codings, then the server SHOULD use
# the "identity" content-coding, unless it has additional
# information that a different content-coding is meaningful
# to the client.
if debug:
cherrypy.log('No Accept-Encoding', context='TOOLS.GZIP')
return
ct = response.headers.get('Content-Type', '').split(';')[0]
for coding in acceptable:
if coding.value == 'identity' and coding.qvalue != 0:
if debug:
cherrypy.log('Non-zero identity qvalue: %s' % coding,
context='TOOLS.GZIP')
return
if coding.value in ('gzip', 'x-gzip'):
if coding.qvalue == 0:
if debug:
cherrypy.log('Zero gzip qvalue: %s' % coding,
context='TOOLS.GZIP')
return
if ct not in mime_types:
# If the list of provided mime-types contains tokens
# such as 'text/*' or 'application/*+xml',
# we go through them and find the most appropriate one
# based on the given content-type.
# The pattern matching is only caring about the most
# common cases, as stated above, and doesn't support
# for extra parameters.
found = False
if '/' in ct:
ct_media_type, ct_sub_type = ct.split('/')
for mime_type in mime_types:
if '/' in mime_type:
media_type, sub_type = mime_type.split('/')
if ct_media_type == media_type:
if sub_type == '*':
found = True
break
elif '+' in sub_type and '+' in ct_sub_type:
ct_left, ct_right = ct_sub_type.split('+')
left, right = sub_type.split('+')
if left == '*' and ct_right == right:
found = True
break
if not found:
if debug:
cherrypy.log('Content-Type %s not in mime_types %r' %
(ct, mime_types), context='TOOLS.GZIP')
return
if debug:
cherrypy.log('Gzipping', context='TOOLS.GZIP')
# Return a generator that compresses the page
response.headers['Content-Encoding'] = 'gzip'
response.body = compress(response.body, compress_level)
if "Content-Length" in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers["Content-Length"]
return
if debug:
cherrypy.log('No acceptable encoding found.', context='GZIP')
cherrypy.HTTPError(406, "identity, gzip").set_response()
| gpl-3.0 |
Bryukh-Checkio-Tasks/checkio-task-cipher-map | verification/tests.py | 2 | 5103 | """
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Basics": [
{
"input": [
[
"X...",
"..X.",
"X..X",
"...."
],
[
"itdf",
"gdce",
"aton",
"qrdi"
]
],
"answer": "icantforgetiddqd"
},
{
"input": [
[
"....",
"X..X",
".X..",
"...X"
],
[
"xhwc",
"rsqx",
"xqzz",
"fyzr"
]
],
"answer": "rxqrwsfzxqxzhczy"
},
],
"Edge": [
{
"input": [
[
"X...",
".X..",
"..X.",
"...X",
],
[
"aaaa",
"aaaa",
"aaaa",
"aaaa"
]
],
"answer": "aaaaaaaaaaaaaaaa"
},
{
"input": [
[
"X..X",
"....",
"....",
"X..X",
],
[
"abcd",
"efgh",
"ijkl",
"mnop"
]
],
"answer": "admpadmpadmpadmp"
},
{
"input": [
[
"....",
".XX.",
".XX.",
"....",
],
[
"abcd",
"efgh",
"ijkl",
"mnop"
]
],
"answer": "fgjkfgjkfgjkfgjk"
},
],
"Extra": [
{
"input": [
[
"X...",
".X..",
"..X.",
"...X"
],
[
"azbx",
"azbx",
"azbx",
"azbx"
]
],
"answer": "azbxxbzaazbxxbza"
},
{
"input": [
[
"XXXX",
"....",
"....",
"....",
],
[
"call",
"rsqi",
"epzn",
"yeee"
]
],
"answer": "calllineyeeecrey"
},
{
"input": [
[
"X...",
"X...",
"X...",
"X...",
],
[
"call",
"rsqi",
"epzn",
"yeee"
]
],
"answer": "creycalllineyeee"
},
{
"input": [
[
"X...",
"..X.",
".X..",
"...X",
],
[
"name",
"goto",
"line",
"nope"
]
],
"answer": "ntieeonnntieeonn"
},
{
"input": [
[
"XX..",
"....",
"....",
"..XX",
],
[
"cree",
"band",
"test",
"yepp"
]
],
"answer": "crppedtycrppedty"
},
{
"input": [
[
"....",
"X..X",
"X..X",
"....",
],
[
"cree",
"band",
"test",
"yepp"
]
],
"answer": "bdttreepbdttreep"
},
{
"input": [
[
"...X",
"..X.",
"..X.",
"...X",
],
[
"aazz",
"aazz",
"aazz",
"aazz"
]
],
"answer": "zzzzazazaaaaazaz"
},
]
}
| gpl-2.0 |
chirilo/mozillians | vendor-local/lib/python/importlib/__init__.py | 456 | 1327 | """Backport of importlib.import_module from 3.x."""
# While not critical (and in no way guaranteed!), it would be nice to keep this
# code compatible with Python 2.3.
import sys
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in xrange(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
| bsd-3-clause |
raybesiga/congress | congress/conference/models.py | 1 | 6243 | from django.db import models
# Create your models here.
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
# from django.contrib.auth.models import User
# from django.db.models.signals import post_save
from datetime import datetime
from users.models import User
import string
import random
# Create your models here.
class Tag(models.Model):
name = models.CharField(max_length=100)
short = models.CharField(max_length=100, blank=True, null=True)
image = models.ImageField(upload_to='tag', blank=True, null=True)
is_category = models.BooleanField(default=False)
def html(self):
return mark_safe('<div id=\'%s\' class=\'tag tag-%s tag-icon-%s\'>%s</div>' % (self.short, self.short, self.short,
self.name))
def __unicode__(self):
return self.name
class Location(models.Model):
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name
class SessionDate(models.Model):
day = models.DateField()
def sessions(self):
return Session.objects.filter(date=self)
def __unicode__(self):
return self.day.strftime("%m/%d (%a)")
class SessionTime(models.Model):
begin = models.TimeField()
end = models.TimeField()
def _todatetime(self, time):
return datetime.today().replace(hour=time.hour, minute=time.minute, second=time.second, microsecond=time.microsecond,
tzinfo=time.tzinfo)
def duration_in_minutes(self):
return (self._todatetime(self.end) - self._todatetime(self.begin)).seconds / 60
def __unicode__(self):
return '%s - %s' % (self.begin, self.end)
class Organization(models.Model):
name = models.CharField(max_length=100, db_index=True)
description = models.CharField(max_length=2000, null=True, blank=True)
web_url = models.CharField(max_length=255, null=True, blank=True)
class Meta:
ordering = ['name']
def get_absolute_url(self):
return reverse('organization', args=[self.id])
def get_tags(self):
return Tag.objects.filter(id__in=self.get_sessions().values_list('tags', flat=True))
def get_classes(self):
return self([_ for sublist in [_.get_classes() for _ in self.get_sessions()] for _ in sublist])
def get_sessions(self):
return Session.objects.filter(speakers__in=self.speaker_set.all())
def __unicode__(self):
return self.name
class Speaker(models.Model):
name = models.CharField(max_length=100, db_index=True)
email = models.CharField(max_length=255, null=True, blank=True)
twitter = models.CharField(max_length=255, null=True, blank=True)
organization = models.ForeignKey(Organization, null=True)
class Meta:
ordering = ['name']
def get_twitter_name(self):
return mark_safe('<a href="https://twitter.com/%s" target="_blank">@%s</a>' % (self.twitter, self.twitter))
def get_twitter_profile_image(self):
return mark_safe('http://api.twitter.com/1/users/profile_image/?screen_name=%s&size=bigger' % self.twitter)
def get_absolute_url(self):
return reverse('speaker', args=[self.id])
def get_classes(self):
return set([_ for sublist in [_.get_classes() for _ in Session.objects.filter(speakers=self)] for _ in sublist])
def __unicode__(self):
return '%s(%s)' % (self.name, self.company)
class Session(models.Model):
name = models.CharField(max_length=100, db_index=True)
description = models.CharField(max_length=2000, null=True, blank=True, db_index=True)
slide_url = models.CharField(max_length=255, null=True, blank=True)
speakers = models.ManyToManyField(Speaker, blank=True)
tags = models.ManyToManyField(Tag)
location = models.ForeignKey(Location)
date = models.ForeignKey(SessionDate)
times = models.ManyToManyField(SessionTime)
attendee = models.ManyToManyField(User, blank=True)
def get_absolute_url(self):
return reverse('session', args=[self.id])
def duration_in_minutes(self):
minutes = sum([_.duration_in_minutes() for _ in self.times.all()])
return minutes
def is_keynote(self):
if self.tags.filter(short='keynote'):
return True
return False
def begin_time(self):
return self.times.all()[0].begin.strftime("%H:%M")
def get_times(self):
times = self.times.all()
return '%s - %s' % (times[0].begin.strftime("%H:%M"), times[len(times) - 1].end.strftime("%H:%M"))
def get_organizations(self):
# note that set() does not preserve original order
unique = []
for s in self.speakers.all():
if s.organization not in unique:
unique.append(s.organization)
return unique
def get_category_tags(self):
return self.tags.filter(is_category=True)
def get_classes(self):
return ['ft-' + _.uid for _ in self.get_category_tags()]
def html_get_category_tags(self):
return mark_safe(''.join([_.html() for _ in self.get_category_tags()]))
def html_get_speakers(self):
return ', '.join([_.name for _ in self.speakers.all()])
def html_get_organizations(self):
return ', '.join([_.name for _ in self.get_organizations()])
def __unicode__(self):
return self.name
class EmailToken(models.Model):
email = models.EmailField(max_length=255)
token = models.CharField(max_length=64, db_index=True)
created = models.DateTimeField(auto_now_add=True)
def id_generator(self, size=64, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
def save(self, *args, **kwargs):
self.token = self.id_generator()
super(EmailToken, self).save(*args, **kwargs)
class Comment(models.Model):
user = models.ForeignKey(User)
message = models.CharField(max_length=255)
removed = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
session = models.ForeignKey(Session, null=True, blank=True)
speaker = models.ForeignKey(Speaker, null=True, blank=True)
class Meta:
ordering = ['-id']
| bsd-3-clause |
robclark/xbmc | tools/EventClients/lib/python/ps3/sixaxis.py | 1 | 10756 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2009 Team XBMC http://www.xbmc.org
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import time
import sys
import struct
import math
import binascii
from bluetooth import set_l2cap_mtu
SX_SELECT = 1 << 0
SX_L3 = 1 << 1
SX_R3 = 1 << 2
SX_START = 1 << 3
SX_DUP = 1 << 4
SX_DRIGHT = 1 << 5
SX_DDOWN = 1 << 6
SX_DLEFT = 1 << 7
SX_L2 = 1 << 8
SX_R2 = 1 << 9
SX_L1 = 1 << 10
SX_R1 = 1 << 11
SX_TRIANGLE = 1 << 12
SX_CIRCLE = 1 << 13
SX_X = 1 << 14
SX_SQUARE = 1 << 15
SX_POWER = 1 << 16
SX_LSTICK_X = 0
SX_LSTICK_Y = 1
SX_RSTICK_X = 2
SX_RSTICK_Y = 3
# (map, key, amount index, axis)
keymap_sixaxis = {
SX_X : ('XG', 'A', 0, 0),
SX_CIRCLE : ('XG', 'B', 0, 0),
SX_SQUARE : ('XG', 'X', 0, 0),
SX_TRIANGLE : ('XG', 'Y', 0, 0),
SX_DUP : ('XG', 'dpadup', 0, 0),
SX_DDOWN : ('XG', 'dpaddown', 0, 0),
SX_DLEFT : ('XG', 'dpadleft', 0, 0),
SX_DRIGHT : ('XG', 'dpadright', 0, 0),
SX_START : ('XG', 'start', 0, 0),
SX_SELECT : ('XG', 'back', 0, 0),
SX_R1 : ('XG', 'white', 0, 0),
SX_R2 : ('XG', 'rightanalogtrigger', 6, 1),
SX_L2 : ('XG', 'leftanalogtrigger', 5, 1),
SX_L1 : ('XG', 'black', 0, 0),
SX_L3 : ('XG', 'leftthumbbutton', 0, 0),
SX_R3 : ('XG', 'rightthumbbutton', 0, 0),
}
# (data index, left map, left action, right map, right action)
axismap_sixaxis = {
SX_LSTICK_X : ('XG', 'leftthumbstickleft' , 'leftthumbstickright'),
SX_LSTICK_Y : ('XG', 'leftthumbstickup' , 'leftthumbstickdown'),
SX_RSTICK_X : ('XG', 'rightthumbstickleft', 'rightthumbstickright'),
SX_RSTICK_Y : ('XG', 'rightthumbstickup' , 'rightthumbstickdown'),
}
# to make sure all combination keys are checked first
# we sort the keymap's button codes in reverse order
# this guranties that any bit combined button code
# will be processed first
keymap_sixaxis_keys = keymap_sixaxis.keys()
keymap_sixaxis_keys.sort()
keymap_sixaxis_keys.reverse()
def getkeys(bflags):
keys = [];
for k in keymap_sixaxis_keys:
if (k & bflags) == k:
keys.append(k)
bflags = bflags & ~k
return keys;
def normalize(val):
upperlimit = 65281
lowerlimit = 2
val_range = upperlimit - lowerlimit
offset = 10000
val = (val + val_range / 2) % val_range
upperlimit -= offset
lowerlimit += offset
if val < lowerlimit:
val = lowerlimit
if val > upperlimit:
val = upperlimit
val = ((float(val) - offset) / (float(upperlimit) -
lowerlimit)) * 65535.0
if val <= 0:
val = 1
return val
def normalize_axis(val, deadzone):
val = float(val) - 127.5
val = val / 127.5
if abs(val) < deadzone:
return 0.0
if val > 0.0:
val = (val - deadzone) / (1.0 - deadzone)
else:
val = (val + deadzone) / (1.0 - deadzone)
return 65536.0 * val
def normalize_angle(val, valrange):
valrange *= 2
val = val / valrange
if val > 1.0:
val = 1.0
if val < -1.0:
val = -1.0
return (val + 0.5) * 65535.0
def average(array):
val = 0
for i in array:
val += i
return val / len(array)
def smooth(arr, val):
cnt = len(arr)
arr.insert(0, val)
arr.pop(cnt)
return average(arr)
class sixaxis():
def __init__(self, xbmc, control_sock, interrupt_sock):
self.xbmc = xbmc
self.num_samples = 16
self.sumx = [0] * self.num_samples
self.sumy = [0] * self.num_samples
self.sumr = [0] * self.num_samples
self.axis_amount = [0, 0, 0, 0]
self.released = set()
self.pressed = set()
self.pending = set()
self.held = set()
self.psflags = 0
self.psdown = 0
self.mouse_enabled = 0
set_l2cap_mtu(control_sock, 64)
set_l2cap_mtu(interrupt_sock, 64)
time.sleep(0.25) # If we ask to quickly here, it sometimes doesn't start
# sixaxis needs this to enable it
# 0x53 => HIDP_TRANS_SET_REPORT | HIDP_DATA_RTYPE_FEATURE
control_sock.send("\x53\xf4\x42\x03\x00\x00")
data = control_sock.recv(1)
# This command will turn on the gyro and set the leds
# I wonder if turning on the gyro makes it draw more current??
# it's probably a flag somewhere in the following command
# HID Command: HIDP_TRANS_SET_REPORT | HIDP_DATA_RTYPE_OUTPUT
# HID Report:1
bytes = [0x52, 0x1]
bytes.extend([0x00, 0x00, 0x00])
bytes.extend([0xFF, 0x72])
bytes.extend([0x00, 0x00, 0x00, 0x00])
bytes.extend([0x02]) # 0x02 LED1, 0x04 LED2 ... 0x10 LED4
# The following sections should set the blink frequncy of
# the leds on the controller, but i've not figured out how.
# These values where suggusted in a mailing list, but no explination
# for how they should be combined to the 5 bytes per led
#0xFF = 0.5Hz
#0x80 = 1Hz
#0x40 = 2Hz
bytes.extend([0xFF, 0x00, 0x01, 0x00, 0x01]) #LED4 [0xff, 0xff, 0x10, 0x10, 0x10]
bytes.extend([0xFF, 0x00, 0x01, 0x00, 0x01]) #LED3 [0xff, 0x40, 0x08, 0x10, 0x10]
bytes.extend([0xFF, 0x00, 0x01, 0x00, 0x01]) #LED2 [0xff, 0x00, 0x10, 0x30, 0x30]
bytes.extend([0xFF, 0x00, 0x01, 0x00, 0x01]) #LED1 [0xff, 0x00, 0x10, 0x40, 0x10]
bytes.extend([0x00, 0x00, 0x00, 0x00, 0x00])
bytes.extend([0x00, 0x00, 0x00, 0x00, 0x00])
control_sock.send(struct.pack("42B", *bytes))
data = control_sock.recv(1)
def __del__(self):
self.close()
def close(self):
for key in (self.held | self.pressed):
(mapname, action, amount, axis) = keymap_sixaxis[key]
self.xbmc.send_button_state(map=mapname, button=action, amount=0, down=0, axis=axis)
self.held = set()
self.pressed = set()
def process_socket(self, isock):
data = isock.recv(50)
if data == None:
return False
return self.process_data(data)
def process_data(self, data):
if len(data) < 3:
return False
# make sure this is the correct report
if struct.unpack("BBB", data[0:3]) != (0xa1, 0x01, 0x00):
return False
if len(data) >= 48:
v1 = struct.unpack("h", data[42:44])
v2 = struct.unpack("h", data[44:46])
v3 = struct.unpack("h", data[46:48])
else:
v1 = [0,0]
v2 = [0,0]
v3 = [0,0]
if len(data) >= 50:
v4 = struct.unpack("h", data[48:50])
else:
v4 = [0,0]
ax = float(v1[0])
ay = float(v2[0])
az = float(v3[0])
rz = float(v4[0])
at = math.sqrt(ax*ax + ay*ay + az*az)
bflags = struct.unpack("<I", data[3:7])[0]
if len(data) > 27:
pressure = struct.unpack("BBBBBBBBBBBB", data[15:27])
else:
pressure = [0,0,0,0,0,0,0,0,0,0,0,0,0]
roll = -math.atan2(ax, math.sqrt(ay*ay + az*az))
pitch = math.atan2(ay, math.sqrt(ax*ax + az*az))
pitch -= math.radians(20);
xpos = normalize_angle(roll, math.radians(30))
ypos = normalize_angle(pitch, math.radians(30))
axis = struct.unpack("BBBB", data[7:11])
return self.process_input(bflags, pressure, axis, xpos, ypos)
def process_input(self, bflags, pressure, axis, xpos, ypos):
xval = smooth(self.sumx, xpos)
yval = smooth(self.sumy, ypos)
analog = False
for i in range(4):
config = axismap_sixaxis[i]
self.axis_amount[i] = self.send_singleaxis(axis[i], self.axis_amount[i], config[0], config[1], config[2])
if self.axis_amount[i] != 0:
analog = True
# send the mouse position to xbmc
if self.mouse_enabled == 1:
self.xbmc.send_mouse_position(xval, yval)
if (bflags & SX_POWER) == SX_POWER:
if self.psdown:
if (time.time() - self.psdown) > 5:
for key in (self.held | self.pressed):
(mapname, action, amount, axis) = keymap_sixaxis[key]
self.xbmc.send_button_state(map=mapname, button=action, amount=0, down=0, axis=axis)
raise Exception("PS3 Sixaxis powering off, user request")
else:
self.psdown = time.time()
else:
if self.psdown:
self.mouse_enabled = 1 - self.mouse_enabled
self.psdown = 0
keys = set(getkeys(bflags))
self.released = (self.pressed | self.held) - keys
self.held = (self.pressed | self.held) - self.released
self.pressed = (keys - self.held) & self.pending
self.pending = (keys - self.held)
for key in self.released:
(mapname, action, amount, axis) = keymap_sixaxis[key]
self.xbmc.send_button_state(map=mapname, button=action, amount=0, down=0, axis=axis)
for key in self.held:
(mapname, action, amount, axis) = keymap_sixaxis[key]
if amount > 0:
amount = pressure[amount-1] * 256
self.xbmc.send_button_state(map=mapname, button=action, amount=amount, down=1, axis=axis)
for key in self.pressed:
(mapname, action, amount, axis) = keymap_sixaxis[key]
if amount > 0:
amount = pressure[amount-1] * 256
self.xbmc.send_button_state(map=mapname, button=action, amount=amount, down=1, axis=axis)
if analog or keys or self.mouse_enabled:
return True
else:
return False
def send_singleaxis(self, axis, last_amount, mapname, action_min, action_pos):
amount = normalize_axis(axis, 0.30)
if last_amount < 0:
last_action = action_min
elif last_amount > 0:
last_action = action_pos
else:
last_action = None
if amount < 0:
new_action = action_min
elif amount > 0:
new_action = action_pos
else:
new_action = None
if last_action and new_action != last_action:
self.xbmc.send_button_state(map=mapname, button=last_action, amount=0, axis=1)
if new_action and amount != last_amount:
self.xbmc.send_button_state(map=mapname, button=new_action, amount=abs(amount), axis=1)
return amount
| gpl-2.0 |
ragupta-git/ImcSdk | tests/server/test_ldap.py | 1 | 4694 | # Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nose.tools import assert_equal, raises
from ..connection.info import custom_setup, custom_teardown
from imcsdk.apis.admin.ldap import ldap_enable, ldap_exists,\
ldap_role_group_create, ldap_role_group_exists, ldap_role_group_delete,\
ldap_certificate_management_enable, ldap_certificate_management_disable,\
is_ldap_certificate_management_enabled
handle = None
def setup_module():
global handle
handle = custom_setup()
def teardown_module():
global handle
custom_teardown(handle)
ldap_servers = [{"id": 1, "ip": "192.168.1.1", "port": 400},
{"id": 2, "ip": "192.168.1.2", "port": 500},
{"id": 3, "ip": "192.168.1.3", "port": 600},
{"id": 4, "ip": "1.2.3.4", "port": 700}]
ldap_servers_2 = [{"id": 1, "ip": "192.168.1.1", "port": 400},
{"id": 2, "ip": "192.168.1.2", "port": 500},
{"id": 3, "ip": "192.168.1.3", "port": 600},
{"id": 4, "ip": "1.2.3.4", "port": 700},
{"id": 5, "ip": "192.168.1.5", "port": 600},
{"id": 6, "ip": "192.168.1.63", "port": 600},
{"id": 7, "ip": "192.168.1.13", "port": 600}]
ldap_servers_3 = [{"id": 1, "ip": "192.168.1.1", "port": 400},
{"id": 2, "ip": "192.168.1.2", "port": 500},
{"id": 3, "ip": "192.168.1.100", "port": 600},
{"id": 4, "ip": "1.2.3.4", "port": 800}]
def test_ldap_enable():
ldap_enable(
handle,
basedn='DC=QATCSLABTPI02,DC=cisco,DC=com',
domain='QATCSLABTPI02.cisco.com',
timeout=20, group_auth="enabled",
bind_dn='CN=administrator,CN=Users,DC=QATCSLABTPI02,DC=cisco,DC=com',
password='abcdefg', ldap_servers=ldap_servers)
match, mo = ldap_exists(
handle,
basedn='DC=QATCSLABTPI02,DC=cisco,DC=com',
domain='QATCSLABTPI02.cisco.com',
timeout=20, group_auth="enabled",
bind_dn='CN=administrator,CN=Users,DC=QATCSLABTPI02,DC=cisco,DC=com',
password='abcdefg', ldap_servers=ldap_servers)
assert_equal(match, True)
def test_ldap_mismatch_config():
match, mo = ldap_exists(
handle,
basedn='DC=QATCSLABTPI02,DC=cisco,DC=com',
domain='QATCSLABTPI02.cisco.com',
timeout=100, group_auth="disabled",
bind_dn='CN=administrator,CN=Users,DC=QATCSLABTPI02,DC=cisco,DC=com',
password='abcdefg', ldap_servers=ldap_servers)
assert_equal(match, False)
def test_ldap_mismatch_config_servers():
match, mo = ldap_exists(
handle,
basedn='DC=QATCSLABTPI02,DC=cisco,DC=com',
domain='QATCSLABTPI02.cisco.com',
timeout=20, group_auth="enabled",
bind_dn='CN=administrator,CN=Users,DC=QATCSLABTPI02,DC=cisco,DC=com',
password='abcdefg', ldap_servers=ldap_servers_3)
assert_equal(match, False)
@raises(Exception)
def test_ldap_enable_invalid_servers():
ldap_enable(
handle,
basedn='DC=QATCSLABTPI02,DC=cisco,DC=com',
domain='QATCSLABTPI02.cisco.com',
timeout=20, group_auth="enabled",
bind_dn='CN=administrator,CN=Users,DC=QATCSLABTPI02,DC=cisco,DC=com',
password='abcdefg', ldap_servers=ldap_servers_2)
def test_ldap_role_group_create():
ldap_role_group_create(handle, domain='abcd.pqrs.com', name='abcd', role='user')
match, mo = ldap_role_group_exists(handle, domain='abcd.pqrs.com', name='abcd', role='user')
assert_equal(match, True)
def test_ldap_role_group_delete():
ldap_role_group_delete(handle, domain='abcd.pqrs.com', name='abcd')
match, mo = ldap_role_group_exists(handle, domain='abcd.pqrs.com', name='abcd', role='user')
assert_equal(match, False)
def test_ldap_cert_mgmt_enable():
ldap_certificate_management_enable(handle)
assert_equal(is_ldap_certificate_management_enabled(handle), True)
def test_ldap_cert_mgmt_disable():
ldap_certificate_management_disable(handle)
assert_equal(is_ldap_certificate_management_enabled(handle), False)
def test_ldap_disable():
ldap_enable(handle)
| apache-2.0 |
kirillzhuravlev/numpy | numpy/doc/internals.py | 163 | 9673 | """
===============
Array Internals
===============
Internal organization of numpy arrays
=====================================
It helps to understand a bit about how numpy arrays are handled under the covers to help understand numpy better. This section will not go into great detail. Those wishing to understand the full details are referred to Travis Oliphant's book "Guide to Numpy".
Numpy arrays consist of two major components, the raw array data (from now on,
referred to as the data buffer), and the information about the raw array data.
The data buffer is typically what people think of as arrays in C or Fortran,
a contiguous (and fixed) block of memory containing fixed sized data items.
Numpy also contains a significant set of data that describes how to interpret
the data in the data buffer. This extra information contains (among other things):
1) The basic data element's size in bytes
2) The start of the data within the data buffer (an offset relative to the
beginning of the data buffer).
3) The number of dimensions and the size of each dimension
4) The separation between elements for each dimension (the 'stride'). This
does not have to be a multiple of the element size
5) The byte order of the data (which may not be the native byte order)
6) Whether the buffer is read-only
7) Information (via the dtype object) about the interpretation of the basic
data element. The basic data element may be as simple as a int or a float,
or it may be a compound object (e.g., struct-like), a fixed character field,
or Python object pointers.
8) Whether the array is to interpreted as C-order or Fortran-order.
This arrangement allow for very flexible use of arrays. One thing that it allows
is simple changes of the metadata to change the interpretation of the array buffer.
Changing the byteorder of the array is a simple change involving no rearrangement
of the data. The shape of the array can be changed very easily without changing
anything in the data buffer or any data copying at all
Among other things that are made possible is one can create a new array metadata
object that uses the same data buffer
to create a new view of that data buffer that has a different interpretation
of the buffer (e.g., different shape, offset, byte order, strides, etc) but
shares the same data bytes. Many operations in numpy do just this such as
slices. Other operations, such as transpose, don't move data elements
around in the array, but rather change the information about the shape and strides so that the indexing of the array changes, but the data in the doesn't move.
Typically these new versions of the array metadata but the same data buffer are
new 'views' into the data buffer. There is a different ndarray object, but it
uses the same data buffer. This is why it is necessary to force copies through
use of the .copy() method if one really wants to make a new and independent
copy of the data buffer.
New views into arrays mean the the object reference counts for the data buffer
increase. Simply doing away with the original array object will not remove the
data buffer if other views of it still exist.
Multidimensional Array Indexing Order Issues
============================================
What is the right way to index
multi-dimensional arrays? Before you jump to conclusions about the one and
true way to index multi-dimensional arrays, it pays to understand why this is
a confusing issue. This section will try to explain in detail how numpy
indexing works and why we adopt the convention we do for images, and when it
may be appropriate to adopt other conventions.
The first thing to understand is
that there are two conflicting conventions for indexing 2-dimensional arrays.
Matrix notation uses the first index to indicate which row is being selected and
the second index to indicate which column is selected. This is opposite the
geometrically oriented-convention for images where people generally think the
first index represents x position (i.e., column) and the second represents y
position (i.e., row). This alone is the source of much confusion;
matrix-oriented users and image-oriented users expect two different things with
regard to indexing.
The second issue to understand is how indices correspond
to the order the array is stored in memory. In Fortran the first index is the
most rapidly varying index when moving through the elements of a two
dimensional array as it is stored in memory. If you adopt the matrix
convention for indexing, then this means the matrix is stored one column at a
time (since the first index moves to the next row as it changes). Thus Fortran
is considered a Column-major language. C has just the opposite convention. In
C, the last index changes most rapidly as one moves through the array as
stored in memory. Thus C is a Row-major language. The matrix is stored by
rows. Note that in both cases it presumes that the matrix convention for
indexing is being used, i.e., for both Fortran and C, the first index is the
row. Note this convention implies that the indexing convention is invariant
and that the data order changes to keep that so.
But that's not the only way
to look at it. Suppose one has large two-dimensional arrays (images or
matrices) stored in data files. Suppose the data are stored by rows rather than
by columns. If we are to preserve our index convention (whether matrix or
image) that means that depending on the language we use, we may be forced to
reorder the data if it is read into memory to preserve our indexing
convention. For example if we read row-ordered data into memory without
reordering, it will match the matrix indexing convention for C, but not for
Fortran. Conversely, it will match the image indexing convention for Fortran,
but not for C. For C, if one is using data stored in row order, and one wants
to preserve the image index convention, the data must be reordered when
reading into memory.
In the end, which you do for Fortran or C depends on
which is more important, not reordering data or preserving the indexing
convention. For large images, reordering data is potentially expensive, and
often the indexing convention is inverted to avoid that.
The situation with
numpy makes this issue yet more complicated. The internal machinery of numpy
arrays is flexible enough to accept any ordering of indices. One can simply
reorder indices by manipulating the internal stride information for arrays
without reordering the data at all. Numpy will know how to map the new index
order to the data without moving the data.
So if this is true, why not choose
the index order that matches what you most expect? In particular, why not define
row-ordered images to use the image convention? (This is sometimes referred
to as the Fortran convention vs the C convention, thus the 'C' and 'FORTRAN'
order options for array ordering in numpy.) The drawback of doing this is
potential performance penalties. It's common to access the data sequentially,
either implicitly in array operations or explicitly by looping over rows of an
image. When that is done, then the data will be accessed in non-optimal order.
As the first index is incremented, what is actually happening is that elements
spaced far apart in memory are being sequentially accessed, with usually poor
memory access speeds. For example, for a two dimensional image 'im' defined so
that im[0, 10] represents the value at x=0, y=10. To be consistent with usual
Python behavior then im[0] would represent a column at x=0. Yet that data
would be spread over the whole array since the data are stored in row order.
Despite the flexibility of numpy's indexing, it can't really paper over the fact
basic operations are rendered inefficient because of data order or that getting
contiguous subarrays is still awkward (e.g., im[:,0] for the first row, vs
im[0]), thus one can't use an idiom such as for row in im; for col in im does
work, but doesn't yield contiguous column data.
As it turns out, numpy is
smart enough when dealing with ufuncs to determine which index is the most
rapidly varying one in memory and uses that for the innermost loop. Thus for
ufuncs there is no large intrinsic advantage to either approach in most cases.
On the other hand, use of .flat with an FORTRAN ordered array will lead to
non-optimal memory access as adjacent elements in the flattened array (iterator,
actually) are not contiguous in memory.
Indeed, the fact is that Python
indexing on lists and other sequences naturally leads to an outside-to inside
ordering (the first index gets the largest grouping, the next the next largest,
and the last gets the smallest element). Since image data are normally stored
by rows, this corresponds to position within rows being the last item indexed.
If you do want to use Fortran ordering realize that
there are two approaches to consider: 1) accept that the first index is just not
the most rapidly changing in memory and have all your I/O routines reorder
your data when going from memory to disk or visa versa, or use numpy's
mechanism for mapping the first index to the most rapidly varying data. We
recommend the former if possible. The disadvantage of the latter is that many
of numpy's functions will yield arrays without Fortran ordering unless you are
careful to use the 'order' keyword. Doing this would be highly inconvenient.
Otherwise we recommend simply learning to reverse the usual order of indices
when accessing elements of an array. Granted, it goes against the grain, but
it is more in line with Python semantics and the natural order of the data.
"""
from __future__ import division, absolute_import, print_function
| bsd-3-clause |
georgemarshall/django | django/core/files/uploadedfile.py | 70 | 3890 | """
Classes representing uploaded files.
"""
import os
from io import BytesIO
from django.conf import settings
from django.core.files import temp as tempfile
from django.core.files.base import File
__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
'SimpleUploadedFile')
class UploadedFile(File):
"""
An abstract uploaded file (``TemporaryUploadedFile`` and
``InMemoryUploadedFile`` are the built-in concrete subclasses).
An ``UploadedFile`` object behaves somewhat like a file object and
represents some file data that the user submitted with a form.
"""
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
super().__init__(file, name)
self.size = size
self.content_type = content_type
self.charset = charset
self.content_type_extra = content_type_extra
def __repr__(self):
return "<%s: %s (%s)>" % (self.__class__.__name__, self.name, self.content_type)
def _get_name(self):
return self._name
def _set_name(self, name):
# Sanitize the file name so that it can't be dangerous.
if name is not None:
# Just use the basename of the file -- anything else is dangerous.
name = os.path.basename(name)
# File names longer than 255 characters can cause problems on older OSes.
if len(name) > 255:
name, ext = os.path.splitext(name)
ext = ext[:255]
name = name[:255 - len(ext)] + ext
self._name = name
name = property(_get_name, _set_name)
class TemporaryUploadedFile(UploadedFile):
"""
A file uploaded to a temporary location (i.e. stream-to-disk).
"""
def __init__(self, name, content_type, size, charset, content_type_extra=None):
_, ext = os.path.splitext(name)
file = tempfile.NamedTemporaryFile(suffix='.upload' + ext, dir=settings.FILE_UPLOAD_TEMP_DIR)
super().__init__(file, name, content_type, size, charset, content_type_extra)
def temporary_file_path(self):
"""Return the full path of this file."""
return self.file.name
def close(self):
try:
return self.file.close()
except FileNotFoundError:
# The file was moved or deleted before the tempfile could unlink
# it. Still sets self.file.close_called and calls
# self.file.file.close() before the exception.
pass
class InMemoryUploadedFile(UploadedFile):
"""
A file uploaded into memory (i.e. stream-to-memory).
"""
def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None):
super().__init__(file, name, content_type, size, charset, content_type_extra)
self.field_name = field_name
def open(self, mode=None):
self.file.seek(0)
return self
def chunks(self, chunk_size=None):
self.file.seek(0)
yield self.read()
def multiple_chunks(self, chunk_size=None):
# Since it's in memory, we'll never have multiple chunks.
return False
class SimpleUploadedFile(InMemoryUploadedFile):
"""
A simple representation of a file, which just has content, size, and a name.
"""
def __init__(self, name, content, content_type='text/plain'):
content = content or b''
super().__init__(BytesIO(content), None, name, content_type, len(content), None, None)
@classmethod
def from_dict(cls, file_dict):
"""
Create a SimpleUploadedFile object from a dictionary with keys:
- filename
- content-type
- content
"""
return cls(file_dict['filename'],
file_dict['content'],
file_dict.get('content-type', 'text/plain'))
| bsd-3-clause |
cloudbase/neutron | neutron/tests/tempest/api/test_security_groups.py | 5 | 2451 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.common.utils import data_utils
from tempest import test
from neutron.tests.tempest.api import base_security_groups as base
class SecGroupTest(base.BaseSecGroupTest):
@classmethod
@test.requires_ext(extension="security-group", service="network")
def resource_setup(cls):
super(SecGroupTest, cls).resource_setup()
@test.idempotent_id('bfd128e5-3c92-44b6-9d66-7fe29d22c802')
def test_create_list_update_show_delete_security_group(self):
group_create_body, name = self._create_security_group()
# List security groups and verify if created group is there in response
list_body = self.client.list_security_groups()
secgroup_list = list()
for secgroup in list_body['security_groups']:
secgroup_list.append(secgroup['id'])
self.assertIn(group_create_body['security_group']['id'], secgroup_list)
# Update the security group
new_name = data_utils.rand_name('security-')
new_description = data_utils.rand_name('security-description')
update_body = self.client.update_security_group(
group_create_body['security_group']['id'],
name=new_name,
description=new_description)
# Verify if security group is updated
self.assertEqual(update_body['security_group']['name'], new_name)
self.assertEqual(update_body['security_group']['description'],
new_description)
# Show details of the updated security group
show_body = self.client.show_security_group(
group_create_body['security_group']['id'])
self.assertEqual(show_body['security_group']['name'], new_name)
self.assertEqual(show_body['security_group']['description'],
new_description)
| apache-2.0 |
pombreda/pyfilesystem-4 | fs/tests/test_path.py | 6 | 6557 | """
fs.tests.test_path: testcases for the fs path functions
"""
import unittest
import fs.tests
from fs.path import *
class TestPathFunctions(unittest.TestCase):
"""Testcases for FS path functions."""
def test_normpath(self):
tests = [ ("\\a\\b\\c", "/a/b/c"),
(".", ""),
("./", ""),
("", ""),
("/a/b/c", "/a/b/c"),
("a/b/c", "a/b/c"),
("a/b/../c/", "a/c"),
("/","/"),
(u"a/\N{GREEK SMALL LETTER BETA}\\c",u"a/\N{GREEK SMALL LETTER BETA}/c"),
]
for path, result in tests:
self.assertEqual(normpath(path), result)
def test_pathjoin(self):
tests = [ ("", "a", "a"),
("a", "a", "a/a"),
("a/b", "../c", "a/c"),
("a/b/../c", "d", "a/c/d"),
("/a/b/c", "d", "/a/b/c/d"),
("/a/b/c", "../../../d", "/d"),
("a", "b", "c", "a/b/c"),
("a/b/c", "../d", "c", "a/b/d/c"),
("a/b/c", "../d", "/a", "/a"),
("aaa", "bbb/ccc", "aaa/bbb/ccc"),
("aaa", "bbb\ccc", "aaa/bbb/ccc"),
("aaa", "bbb", "ccc", "/aaa", "eee", "/aaa/eee"),
("a/b", "./d", "e", "a/b/d/e"),
("/", "/", "/"),
("/", "", "/"),
(u"a/\N{GREEK SMALL LETTER BETA}","c",u"a/\N{GREEK SMALL LETTER BETA}/c"),
]
for testpaths in tests:
paths = testpaths[:-1]
result = testpaths[-1]
self.assertEqual(pathjoin(*paths), result)
self.assertRaises(ValueError, pathjoin, "../")
self.assertRaises(ValueError, pathjoin, "./../")
self.assertRaises(ValueError, pathjoin, "a/b", "../../..")
self.assertRaises(ValueError, pathjoin, "a/b/../../../d")
def test_relpath(self):
tests = [ ("/a/b", "a/b"),
("a/b", "a/b"),
("/", "") ]
for path, result in tests:
self.assertEqual(relpath(path), result)
def test_abspath(self):
tests = [ ("/a/b", "/a/b"),
("a/b", "/a/b"),
("/", "/") ]
for path, result in tests:
self.assertEqual(abspath(path), result)
def test_iteratepath(self):
tests = [ ("a/b", ["a", "b"]),
("", [] ),
("aaa/bbb/ccc", ["aaa", "bbb", "ccc"]),
("a/b/c/../d", ["a", "b", "d"]) ]
for path, results in tests:
for path_component, expected in zip(iteratepath(path), results):
self.assertEqual(path_component, expected)
self.assertEqual(list(iteratepath("a/b/c/d", 1)), ["a", "b/c/d"])
self.assertEqual(list(iteratepath("a/b/c/d", 2)), ["a", "b", "c/d"])
def test_pathsplit(self):
tests = [ ("a/b", ("a", "b")),
("a/b/c", ("a/b", "c")),
("a", ("", "a")),
("", ("", "")),
("/", ("/", "")),
("/foo", ("/", "foo")),
("foo/bar", ("foo", "bar")),
("foo/bar/baz", ("foo/bar", "baz")),
]
for path, result in tests:
self.assertEqual(pathsplit(path), result)
def test_recursepath(self):
self.assertEquals(recursepath("/"),["/"])
self.assertEquals(recursepath("hello"),["/","/hello"])
self.assertEquals(recursepath("/hello/world/"),["/","/hello","/hello/world"])
self.assertEquals(recursepath("/hello/world/",reverse=True),["/hello/world","/hello","/"])
self.assertEquals(recursepath("hello",reverse=True),["/hello","/"])
self.assertEquals(recursepath("",reverse=True),["/"])
def test_isdotfile(self):
for path in ['.foo',
'.svn',
'foo/.svn',
'foo/bar/.svn',
'/foo/.bar']:
self.assert_(isdotfile(path))
for path in ['asfoo',
'df.svn',
'foo/er.svn',
'foo/bar/test.txt',
'/foo/bar']:
self.assertFalse(isdotfile(path))
def test_dirname(self):
tests = [('foo', ''),
('foo/bar', 'foo'),
('foo/bar/baz', 'foo/bar'),
('/', '')]
for path, test_dirname in tests:
self.assertEqual(dirname(path), test_dirname)
def test_basename(self):
tests = [('foo', 'foo'),
('foo/bar', 'bar'),
('foo/bar/baz', 'baz'),
('/', '')]
for path, test_basename in tests:
self.assertEqual(basename(path), test_basename)
class Test_PathMap(unittest.TestCase):
def test_basics(self):
map = PathMap()
map["hello"] = "world"
self.assertEquals(map["/hello"],"world")
self.assertEquals(map["/hello/"],"world")
self.assertEquals(map.get("hello"),"world")
def test_iteration(self):
map = PathMap()
map["hello/world"] = 1
map["hello/world/howareya"] = 2
map["hello/world/iamfine"] = 3
map["hello/kitty"] = 4
map["hello/kitty/islame"] = 5
map["batman/isawesome"] = 6
self.assertEquals(set(map.iterkeys()),set(("/hello/world","/hello/world/howareya","/hello/world/iamfine","/hello/kitty","/hello/kitty/islame","/batman/isawesome")))
self.assertEquals(sorted(map.values()),range(1,7))
self.assertEquals(sorted(map.items("/hello/world/")),[("/hello/world",1),("/hello/world/howareya",2),("/hello/world/iamfine",3)])
self.assertEquals(zip(map.keys(),map.values()),map.items())
self.assertEquals(zip(map.keys("batman"),map.values("batman")),map.items("batman"))
self.assertEquals(set(map.iternames("hello")),set(("world","kitty")))
self.assertEquals(set(map.iternames("/hello/kitty")),set(("islame",)))
del map["hello/kitty/islame"]
self.assertEquals(set(map.iternames("/hello/kitty")),set())
self.assertEquals(set(map.iterkeys()),set(("/hello/world","/hello/world/howareya","/hello/world/iamfine","/hello/kitty","/batman/isawesome")))
self.assertEquals(set(map.values()),set(range(1,7)) - set((5,)))
| bsd-3-clause |
WSDC-NITWarangal/django | tests/auth_tests/test_tokens.py | 297 | 2551 | import unittest
from datetime import date, timedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.test import TestCase
from django.utils.six import PY3
class TokenGeneratorTest(TestCase):
def test_make_token(self):
"""
Ensure that we can make a token and that it is valid
"""
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertTrue(p0.check_token(user, tk1))
def test_10265(self):
"""
Ensure that the token generated for a user created in the same request
will work correctly.
"""
# See ticket #10265
user = User.objects.create_user('comebackkid', 'test3@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
reload = User.objects.get(username='comebackkid')
tk2 = p0.make_token(reload)
self.assertEqual(tk1, tk2)
def test_timeout(self):
"""
Ensure we can use the token after n days, but no greater.
"""
# Uses a mocked version of PasswordResetTokenGenerator so we can change
# the value of 'today'
class Mocked(PasswordResetTokenGenerator):
def __init__(self, today):
self._today_val = today
def _today(self):
return self._today_val
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
p1 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS))
self.assertTrue(p1.check_token(user, tk1))
p2 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS + 1))
self.assertFalse(p2.check_token(user, tk1))
@unittest.skipIf(PY3, "Unnecessary test with Python 3")
def test_date_length(self):
"""
Make sure we don't allow overly long dates, causing a potential DoS.
"""
user = User.objects.create_user('ima1337h4x0r', 'test4@example.com', 'p4ssw0rd')
p0 = PasswordResetTokenGenerator()
# This will put a 14-digit base36 timestamp into the token, which is too large.
self.assertRaises(ValueError,
p0._make_token_with_timestamp,
user, 175455491841851871349)
| bsd-3-clause |
moacirnq/camdroid_server_usp | app/models.py | 1 | 5815 | import cv2
from . import login_manager, db
from flask import current_app, url_for
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
class Group_Members(db.Model):
__tablename__ = 'group_members'
__table_args__ = (db.ForeignKeyConstraint(['group_owner', 'group_name'], ['groups.owner_id','groups.name'], onupdate='CASCADE', ondelete='CASCADE'),
db.ForeignKeyConstraint(['member_id'], ['users.email'], onupdate='CASCADE', ondelete='CASCADE'))
group_owner = db.Column(db.String(64), primary_key=True)
group_name = db.Column( db.String(64), primary_key=True)
member_id = db.Column(db.String(64), primary_key=True)
group = db.relationship('Group')
class Group(db.Model):
__tablename__='groups'
owner_id = db.Column(db.String(64), primary_key=True)
name = db.Column(db.String(64), primary_key=True)
cameras = db.relationship('Camera')
# members = db.relationship('members',
# secondary='join(Group, Group_Members, Group.owner_id == Group_Members.group_owner, Group.name == Group_Members.group_name)',
# lazy='dynamic')
class User(UserMixin, db.Model):
__tablename__ = 'users'
email = db.Column(db.String(64), primary_key=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
confirmed = db.Column(db.Boolean, default=False)
cameras = db.relationship('Camera', backref='camera')
groups = db.relationship('Group_Members')
last_log = db.Column(db.DateTime)
@property
def id(self):
return self.email
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, expiration = 3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm':self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.session.add(self)
return True
@property
def shared_cameras(self):
ret = []
groups = Group_Members.query.filter_by(member_id=self.email).all()
for membership in groups:
for cam in membership.group.cameras:
ret.append(cam)
return ret
def __repr__(self):
return '<User %r>' % self.username
def to_json(self):
all_cameras = []
for cam in self.cameras:
all_cameras.append(cam.to_json())
for cam in self.shared_cameras:
all_cameras.append(cam.to_json)
json_user = {
'url': url_for('api.get_user'),
'username': self.username,
'email': self.email,
'cameras': all_cameras
}
return json_user
class Camera(db.Model):
__tablename__ = 'cameras'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
description = db.Column(db.String(200))
src = db.Column(db.String(100))
username = db.Column(db.String(64))
password = db.Column(db.String(64))
owner_id = db.Column(db.String(64), db.ForeignKey('users.email', onupdate='CASCADE', ondelete='CASCADE'))
height = db.Column(db.Integer)
width = db.Column(db.Integer)
group_name = db.Column(db.String(64))
group_owner = db.Column(db.String(64))
__table_args__ = (db.ForeignKeyConstraint(['group_owner', 'group_name'], ['groups.owner_id','groups.name'], onupdate='CASCADE', ondelete='CASCADE'),)
@property
def link(self):
ret = ''
if self.username and self.username != '':
ret += 'http://' + self.username + ':'
if self.password and self.password != '':
ret += self.password + '@'
if ret != '':
ret += self.src.replace('http://', '')
else:
ret += self.src
return ret
def to_json(self):
json_camera = {
'id': self.id,
'name' : self.name,
'owner': self.owner_id,
'group': self.group_name,
'link': self.link,
'height':self.height,
'width':self.width
}
return json_camera
class VideoFile(db.Model):
__tablename__ = 'video_files'
src = db.Column(db.Integer, db.ForeignKey('cameras.id', onupdate='CASCADE', ondelete='CASCADE'))
camera = db.relationship('Camera')
path = db.Column(db.String(100), primary_key=True)
start_time = db.Column(db.DateTime)
end_time = db.Column(db.DateTime)
def to_json(self):
return {'src':self.src, 'path': self.path, 'start_time': self.start_time, 'end_time': self.end_time}
class Alert(db.Model):
__tablename__ = 'alerts'
camera = db.Column(db.Integer, db.ForeignKey('cameras.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
time = db.Column(db.DateTime, primary_key=True)
video = db.Column(db.String(100))
def to_json(self):
return {'camera':self.camera,
'time':self.time,
'video':self.video}
class Camdroid(db.Model):
__tablename__ = 'camdroids'
id = db.Column(db.Integer, primary_key = True)
port = db.Column(db.Integer) | gpl-2.0 |
KaranToor/MA450 | google-cloud-sdk/lib/googlecloudsdk/api_lib/deployment_manager/runtime_configs/transforms.py | 6 | 2661 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runtime-config resource transforms and symbols dict.
NOTICE: Each TransformFoo() method is the implementation of a foo() transform
function. Even though the implementation here is in Python the usage in resource
projection and filter expressions is language agnostic. This affects the
Pythonicness of the Transform*() methods:
(1) The docstrings are used to generate external user documentation.
(2) The method prototypes are included in the documentation. In particular the
prototype formal parameter names are stylized for the documentation.
(3) The types of some args, like r, are not fixed until runtime. Other args
may have either a base type value or string representation of that type.
It is up to the transform implementation to silently do the string=>type
conversions. That's why you may see e.g. int(arg) in some of the methods.
(4) Unless it is documented to do so, a transform function must not raise any
exceptions. The `undefined' arg is used to handle all unusual conditions,
including ones that would raise exceptions.
"""
# The DEADLINE_EXCEEDED error code.
DEADLINE_EXCEEDED = 4
def TransformWaiterStatus(r, undefined=''):
"""Returns a short description of the status of a waiter or waiter operation.
Status will be one of WAITING, SUCCESS, FAILURE, or TIMEOUT.
Args:
r: a JSON-serializable object
undefined: Returns this value if the resource status cannot be determined.
Returns:
One of WAITING, SUCCESS, FAILURE, or TIMEOUT
Example:
`--format="table(name, status())"`:::
Displays the status in table column two.
"""
if not isinstance(r, dict):
return undefined
if not r.get('done'):
return 'WAITING'
error = r.get('error')
if not error:
return 'SUCCESS'
if error.get('code') == DEADLINE_EXCEEDED:
return 'TIMEOUT'
else:
return 'FAILURE'
_TRANSFORMS = {
'waiter_status': TransformWaiterStatus,
}
def GetTransforms():
"""Returns the runtimeconfig-specific resource transform symbol table."""
return _TRANSFORMS
| apache-2.0 |
ejona86/grpc | src/python/grpcio/grpc/beta/interfaces.py | 47 | 5956 | # Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants and interfaces of the Beta API of gRPC Python."""
import abc
import six
import grpc
ChannelConnectivity = grpc.ChannelConnectivity
# FATAL_FAILURE was a Beta-API name for SHUTDOWN
ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN
StatusCode = grpc.StatusCode
class GRPCCallOptions(object):
"""A value encapsulating gRPC-specific options passed on RPC invocation.
This class and its instances have no supported interface - it exists to
define the type of its instances and its instances exist to be passed to
other functions.
"""
def __init__(self, disable_compression, subcall_of, credentials):
self.disable_compression = disable_compression
self.subcall_of = subcall_of
self.credentials = credentials
def grpc_call_options(disable_compression=False, credentials=None):
"""Creates a GRPCCallOptions value to be passed at RPC invocation.
All parameters are optional and should always be passed by keyword.
Args:
disable_compression: A boolean indicating whether or not compression should
be disabled for the request object of the RPC. Only valid for
request-unary RPCs.
credentials: A CallCredentials object to use for the invoked RPC.
"""
return GRPCCallOptions(disable_compression, None, credentials)
GRPCAuthMetadataContext = grpc.AuthMetadataContext
GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin
class GRPCServicerContext(six.with_metaclass(abc.ABCMeta)):
"""Exposes gRPC-specific options and behaviors to code servicing RPCs."""
@abc.abstractmethod
def peer(self):
"""Identifies the peer that invoked the RPC being serviced.
Returns:
A string identifying the peer that invoked the RPC being serviced.
"""
raise NotImplementedError()
@abc.abstractmethod
def disable_next_response_compression(self):
"""Disables compression of the next response passed by the application."""
raise NotImplementedError()
class GRPCInvocationContext(six.with_metaclass(abc.ABCMeta)):
"""Exposes gRPC-specific options and behaviors to code invoking RPCs."""
@abc.abstractmethod
def disable_next_request_compression(self):
"""Disables compression of the next request passed by the application."""
raise NotImplementedError()
class Server(six.with_metaclass(abc.ABCMeta)):
"""Services RPCs."""
@abc.abstractmethod
def add_insecure_port(self, address):
"""Reserves a port for insecure RPC service once this Server becomes active.
This method may only be called before calling this Server's start method is
called.
Args:
address: The address for which to open a port.
Returns:
An integer port on which RPCs will be serviced after this link has been
started. This is typically the same number as the port number contained
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_secure_port(self, address, server_credentials):
"""Reserves a port for secure RPC service after this Server becomes active.
This method may only be called before calling this Server's start method is
called.
Args:
address: The address for which to open a port.
server_credentials: A ServerCredentials.
Returns:
An integer port on which RPCs will be serviced after this link has been
started. This is typically the same number as the port number contained
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
@abc.abstractmethod
def start(self):
"""Starts this Server's service of RPCs.
This method may only be called while the server is not serving RPCs (i.e. it
is not idempotent).
"""
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this Server's service of RPCs.
All calls to this method immediately stop service of new RPCs. When existing
RPCs are aborted is controlled by the grace period parameter passed to this
method.
This method may be called at any time and is idempotent. Passing a smaller
grace value than has been passed in a previous call will have the effect of
stopping the Server sooner. Passing a larger grace value than has been
passed in a previous call will not have the effect of stopping the server
later.
Args:
grace: A duration of time in seconds to allow existing RPCs to complete
before being aborted by this Server's stopping. May be zero for
immediate abortion of all in-progress RPCs.
Returns:
A threading.Event that will be set when this Server has completely
stopped. The returned event may not be set until after the full grace
period (if some ongoing RPC continues for the full length of the period)
of it may be set much sooner (such as if this Server had no RPCs underway
at the time it was stopped or if all RPCs that it had underway completed
very early in the grace period).
"""
raise NotImplementedError()
| apache-2.0 |
dfang/odoo | addons/website_crm/controllers/main.py | 10 | 3636 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import babel
from datetime import datetime, timedelta
from odoo import http
from odoo.http import request
from odoo.addons.website.controllers.backend import WebsiteBackend
from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT
class WebsiteCrmBackend(WebsiteBackend):
@http.route()
def fetch_dashboard_data(self, date_from, date_to):
results = super(WebsiteCrmBackend, self).fetch_dashboard_data(date_from, date_to)
results['groups']['sale_salesman'] = request.env['res.users'].has_group('sales_team.group_sale_salesman')
if not results['groups']['sale_salesman']:
return results
date_from = datetime.strptime(date_from, DEFAULT_SERVER_DATE_FORMAT)
date_to = datetime.strptime(date_to, DEFAULT_SERVER_DATE_FORMAT)
lead_domain = []
website_utm = request.env['ir.model.data'].xmlid_to_res_id('utm.utm_medium_website')
if website_utm:
lead_domain += [('medium_id', '=', website_utm)]
lead_ids = request.env['crm.lead'].search(
lead_domain + [
('create_date', '>=', date_from.strftime(DEFAULT_SERVER_DATE_FORMAT)),
('create_date', '<=', date_to.strftime(DEFAULT_SERVER_DATE_FORMAT))],
)
leads = [{
'create_date': datetime.strptime(lead.create_date, DEFAULT_SERVER_DATETIME_FORMAT).strftime(DEFAULT_SERVER_DATE_FORMAT),
'campaign_id': lead.campaign_id.name,
'medium_id': lead.medium_id.name,
'source_id': lead.source_id.name,
} for lead in lead_ids]
leads_graph = self._compute_lead_graph(date_from, date_to, lead_domain)
previous_leads_graph = self._compute_lead_graph(date_from - timedelta(days=(date_to - date_from).days), date_from, lead_domain, previous=True)
results['dashboards']['leads'] = {
'graph': [
{
'values': leads_graph,
'key': 'Leads',
},
{
'values': previous_leads_graph,
'key': 'Previous Leads',
},
],
'leads': leads,
'lead_fields': {
'create_date': 'Create Date',
'campaign_id': 'Campaign',
'medium_id': 'Medium',
'source_id': 'Source',
},
}
return results
def _compute_lead_graph(self, date_from, date_to, lead_domain, previous=False):
days_between = (date_to - date_from).days
date_list = [(date_from + timedelta(days=x)) for x in range(0, days_between + 1)]
daily_leads = request.env['crm.lead'].read_group(
domain=lead_domain + [
('create_date', '>=', date_from.strftime(DEFAULT_SERVER_DATE_FORMAT)),
('create_date', '<=', date_to.strftime(DEFAULT_SERVER_DATE_FORMAT))],
fields=['create_date'],
groupby='create_date:day')
daily_leads_dict = {l['create_date:day']: l['create_date_count'] for l in daily_leads}
leads_graph = [{
'0': d.strftime(DEFAULT_SERVER_DATE_FORMAT) if not previous else (d + timedelta(days=days_between)).strftime(DEFAULT_SERVER_DATE_FORMAT),
# Respect read_group format in models.py
'1': daily_leads_dict.get(babel.dates.format_date(d, format='dd MMM yyyy', locale=request.env.context.get('lang', 'en_US')), 0)
} for d in date_list]
return leads_graph
| agpl-3.0 |
Telefonica/vaultier-cli | vaultcli/client.py | 1 | 17295 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Adrián López Tejedor <adrianlzt@gmail.com>
# Óscar García Amor <ogarcia@connectical.com>
#
# Distributed under terms of the GNU GPLv3 license.
from vaultcli.workspace import Workspace
from vaultcli.vault import Vault
from vaultcli.card import Card
from vaultcli.secret import Secret
from vaultcli.cypher import Cypher
from vaultcli.exceptions import ResourceUnavailable, Unauthorized, Forbidden
from urllib.parse import urljoin
from os.path import basename
from mimetypes import MimeTypes
from functools import lru_cache
import json
import requests
class Client(object):
"""Base class for Vaultier API access"""
def __init__(self, server, token, key=None, verify=True):
self.server = server
self.token = token
self.key = key
self.verify = verify
def list_workspaces(self):
"""
Returns all Workspaces for your user
:return: a list of Python objects representing the workspaces
:rtype: Workspace
Each workspace has the following atributes:
- id: workspace unique id
- slug: workspace name slugged
- name: workspace name
- description: workspace description
- workspaceKey: workspace key
"""
json_obj = self.fetch_json('/api/workspaces')
return [Workspace.from_json(obj) for obj in json_obj]
def list_vaults(self, workspace_id):
"""
Returns all Vaults from a Workspace
:param workspace_id: Workspace unique ID given by list_workspaces
:return: a list of Python objects representing the vaults
:rtype: Vault
Each vault has the following atributes:
- id: vault unique id
- slug: vault name slugged
- name: vault name
- description: vault description
- color: vault color
- workspace: workspace that contains this vault
"""
json_obj = self.fetch_json('/api/vaults/?workspace={}'.format(workspace_id))
return [Vault.from_json(obj) for obj in json_obj]
def list_cards(self, vault_id):
"""
Returns all Cards from a Vault
:param vault_id: Vault unique ID given by list_vaults
:return: a list of Python objects representing the cards
:rtype: Card
Each card has the following atributes:
- id: card unique id
- slug: card name slugged
- name: card name
- description: card description
- vault: vault that contains this card
"""
json_obj = self.fetch_json('/api/cards/?vault={}'.format(vault_id))
return [Card.from_json(obj) for obj in json_obj]
def list_secrets(self, card_id):
"""
Returns all Secrets from a Card
:param card_id: Card unique ID given by list_vaults
:return: a list of Python objects representing the secrets
:rtype: Secret
Each secret has the following atributes:
- id: secret unique id
- type: secret type (100: Note, 200: Password, 300: File)
- name: secret name
- data: secret data
- blobMeta: secret meta (only in type 300/file)
- card: card that contains this secret
"""
json_obj = self.fetch_json('/api/secrets/?card={}'.format(card_id))
return [Secret.from_json(obj) for obj in json_obj]
def get_workspace(self, workspace_id):
"""
Returns a Workspace from an ID
:param workspace_id: Workspace unique ID given by list_workspaces
:return: a workspace object
:rtype: Workspace
The workspace has the following atributes:
- id: workspace unique id
- slug: workspace name slugged
- name: workspace name
- description: workspace description
- workspaceKey: workspace key
"""
json_obj = self.fetch_json('/api/workspaces/{}/'.format(workspace_id))
return Workspace.from_json(json_obj)
def get_secret(self, secret_id):
"""
Returns a Secret desencrypted from an secret ID
:param secret_id: Secret unique ID given by list_secrets
:return: a secret object
:rtype: Secret
"""
secret = Secret.from_json(self.fetch_json('/api/secrets/{}'.format(secret_id)))
vault_id = self.fetch_json('/api/cards/{}'.format(secret.card))['vault']
workspace_id = self.fetch_json('/api/vaults/{}'.format(vault_id))['workspace']
workspace_key = self.fetch_json('/api/workspaces/{}'.format(workspace_id))['membership']['workspace_key']
# If has data decrypt it with workspace_key
if secret.data:
secret.data = json.loads(Cypher(self.key).decrypt(workspace_key, secret.data))
# If has meta decrypt it with workspace_key
if secret.blobMeta:
secret.blobMeta = json.loads(Cypher(self.key).decrypt(workspace_key, secret.blobMeta))
return secret
def get_file(self, secret_id):
"""
Returns a secret file desencrypted from an secret ID
:param secret_id: Secret unique ID given by list_secrets
:return: data
:rtype: binary data
"""
secret = Secret.from_json(self.fetch_json('/api/secrets/{}'.format(secret_id)))
if secret.blobMeta:
vault_id = self.fetch_json('/api/cards/{}'.format(secret.card))['vault']
workspace_id = self.fetch_json('/api/vaults/{}'.format(vault_id))['workspace']
workspace_key = self.fetch_json('/api/workspaces/{}'.format(workspace_id))['membership']['workspace_key']
data = self.fetch_json('/api/secret_blobs/{}'.format(secret_id))['blob_data']
file_name = json.loads(Cypher(self.key).decrypt(workspace_key, secret.blobMeta))['filename']
file_data = bytes(json.loads(Cypher(self.key).decrypt(workspace_key, data))['filedata'], "iso-8859-1")
return [file_name, file_data]
else:
return [None, None]
def decrypt_secret(self, secret, workspace_key):
"""
Returns given Secret desencrypted
:param secret: secret object with data encrypted
:param workspace_key: key string to decrypt data
:return: a secret object
:rtype: Secret
"""
# If has data decrypt it with workspace_key
if secret.data:
secret.data = json.loads(Cypher(self.key).decrypt(workspace_key, secret.data))
# If has meta decrypt it with workspace_key
if secret.blobMeta:
secret.blobMeta = json.loads(Cypher(self.key).decrypt(workspace_key, secret.blobMeta))
return secret
def set_workspace(self, workspace_id, workspace_data):
"""
Send workspace contents to existing workspace ID
:param workspace_id: workspace unique ID given by list_workspaces
:param workspace_data: Python dict with new contents
"""
current_workspace_data = self.fetch_json('/api/workspaces/{}'.format(workspace_id))
if workspace_data.get('name', None) == None: workspace_data['name'] = current_workspace_data['name']
if workspace_data.get('description', None) == None: workspace_data['description'] = current_workspace_data['description']
self.fetch_json('/api/workspaces/{}/'.format(workspace_id), http_method='PUT', data=json.dumps(workspace_data))
def set_vault(self, vault_id, vault_data):
"""
Send vault contents to existing vault ID
:param vault_id: Vault unique ID given by list_vaults
:param vault_data: Python dict with new contents
"""
current_vault_data = self.fetch_json('/api/vaults/{}'.format(vault_id))
if vault_data.get('name', None) == None: vault_data['name'] = current_vault_data['name']
if vault_data.get('description', None) == None: vault_data['description'] = current_vault_data['description']
if vault_data.get('color', None) == None: vault_data['color'] = current_vault_data['color']
vault_data['workspace'] = current_vault_data['workspace']
self.fetch_json('/api/vaults/{}/'.format(vault_id), http_method='PUT', data=json.dumps(vault_data))
def set_card(self, card_id, card_data):
"""
Send card contents to existing card ID
:param card_id: Card unique ID given by list_cards
:param card_data: Python dict with new contents
"""
current_card_data = self.fetch_json('/api/cards/{}'.format(card_id))
if card_data.get('name', None) == None: card_data['name'] = current_card_data['name']
if card_data.get('description', None) == None: card_data['description'] = current_card_data['description']
card_data['vault'] = current_card_data['vault']
self.fetch_json('/api/cards/{}/'.format(card_id), http_method='PUT', data=json.dumps(card_data))
def set_secret(self, secret, file=None):
"""
Send secret contents to existing secret ID
:param secret: secret object that contains the data
"""
vault_id = self.fetch_json('/api/cards/{}'.format(secret.card))['vault']
workspace_id = self.fetch_json('/api/vaults/{}'.format(vault_id))['workspace']
workspace_key = self.fetch_json('/api/workspaces/{}'.format(workspace_id))['membership']['workspace_key']
encrypted_data = Cypher(self.key).encrypt(workspace_key, json.dumps(secret.data))
data = {
'name': secret.name,
'type': secret.type,
'card': secret.card,
'id': secret.id,
'data': encrypted_data
}
self.fetch_json('/api/secrets/{}/'.format(secret.id), http_method='PUT', data=json.dumps(data))
if file:
self.upload_file(secret.id, workspace_key, file)
def add_workspace(self, ws_name, ws_description=None):
"""
Create new workspace
:param ws_name: workspace name
:param ws_description: workspace description (optional)
"""
data = { 'name': ws_name }
if ws_description: data['description'] = ws_description
# Create new workspace
json_obj = self.fetch_json('/api/workspaces/', http_method='POST', data=json.dumps(data))
workspace_id = json_obj['membership']['id']
# Set a new key for the new workspace
data = {
'id': workspace_id,
'workspace_key': Cypher(self.key).gen_workspace_key()
}
return self.fetch_json('/api/workspace_keys/{}/'.format(workspace_id), http_method='PUT', data=json.dumps(data))
def add_vault(self, ws_id, v_name, v_description=None, v_color=None):
"""
Create new vault
:param ws_id: workspace id
:param v_name: vault name
:param v_description: vault description (optional)
:param v_color: vault color (optional)
"""
data = {
'workspace': ws_id,
'name': v_name
}
if v_description: data['description'] = v_description
if v_color: data['color'] = v_color
return self.fetch_json('/api/vaults/', http_method='POST', data=json.dumps(data))
def add_card(self, v_id, c_name, c_description=None):
"""
Create new card
:param v_id: vault id
:param c_name: card name
:param c_description: card description (optional)
"""
data = {
'vault': v_id,
'name': c_name
}
if c_description: data['description'] = c_description
return self.fetch_json('/api/cards/', http_method='POST', data=json.dumps(data))
def add_secret(self, card_id, secret_name, json_obj, type='password', file=None):
"""
Create new secret
:param card_id: card id
:param secret_name: secret name
:param json_obj: json object with secret contents
:param type: type of secret (note, password or file)
"""
types = {'note':100, 'password': 200, 'file': 300}
vault_id = self.fetch_json('/api/cards/{}'.format(card_id))['vault']
workspace_id = self.fetch_json('/api/vaults/{}'.format(vault_id))['workspace']
workspace_key = self.fetch_json('/api/workspaces/{}'.format(workspace_id))['membership']['workspace_key']
encrypted_data = Cypher(self.key).encrypt(workspace_key, json.dumps(json_obj))
data = {
'card': card_id,
'type': types[type],
'name': secret_name,
'data': encrypted_data
}
new_secret = self.fetch_json('/api/secrets/', http_method='POST', data=json.dumps(data))
if type == 'file' and file:
r = self.upload_file(new_secret['id'], workspace_key, file)
return {"secret": new_secret, "upload": r}
return new_secret
def delete_secret(self, secret_id):
"""
Delete a Secret
:param secret_id: Secret unique ID given by list_secrets
"""
return self.fetch_json('/api/secrets/{}/'.format(secret_id), http_method='DELETE')
def delete_card(self, card_id):
"""
Delete a card
:param card_id: card unique ID given by list_cards
"""
return self.fetch_json('/api/cards/{}/'.format(card_id), http_method='DELETE')
def delete_vault(self, vault_id):
"""
Delete a vault
:param vault_id: vault unique ID given by list_vaults
"""
return self.fetch_json('/api/vaults/{}/'.format(vault_id), http_method='DELETE')
def delete_workspace(self, workspace_id):
"""
Delete a workspace
:param workspace_id: workspace unique ID given by list_workspaces
"""
return self.fetch_json('/api/workspaces/{}/'.format(workspace_id), http_method='DELETE')
def upload_file(self, secret_id, workspace_key, file):
with file as f:
filedata = {'filedata': str(f.read(), "iso-8859-1")}
filemeta = {'filename': basename(f.name), 'filesize': f.tell()}
filemeta['filetype'] = MimeTypes().guess_type(f.name)[0] if MimeTypes().guess_type(f.name)[0] else ''
encrypted_filedata = Cypher(self.key).encrypt(workspace_key, json.dumps(filedata))
encrypted_filemeta = Cypher(self.key).encrypt(workspace_key, json.dumps(filemeta))
files = {'blob_data': ('blob', encrypted_filedata, 'application/octet-stream'), 'blob_meta': (None, encrypted_filemeta)}
return self.fetch_json('/api/secret_blobs/{}/'.format(secret_id), http_method='PUT', headers={}, files=files)
def fetch_json(self, uri_path, http_method='GET', headers={}, params={}, data=None, files=None):
"""
To be able to cache requests, no param could be a dict o an array.
This function split requests in cacheables and not cacheables.
We also filter to only cache GET functions. Other verbs should not be cached (we don't want to skip a delete)
"""
if http_method == 'GET' and headers == {} and params == {} and data == None and files == None:
return self.fetch_json_cached(uri_path)#, http_method=http_method)
else:
return self.fetch_json_uncached(uri_path, http_method, headers, params, data, files)
@lru_cache(maxsize=150)
def fetch_json_cached(self, uri_path):#, http_method='GET', headers={}, params={}, data=None, files=None):
"""
Function with caching.
Cache will remember uri_path for the last 150 requests and return the stored response.
This speed up vaultier FUSE
"""
return self.fetch_json_uncached(uri_path)#, http_method, headers, params, data, files)
def fetch_json_uncached(self, uri_path, http_method='GET', headers={}, params={}, data=None, files=None):
"""Fetch JSON from API"""
if self.verify == False:
requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
headers['X-Vaultier-Token'] = self.token
if http_method in ('POST', 'PUT', 'DELETE') and not files:
headers['Content-Type'] = 'application/json; charset=utf-8'
"""Construct the full URL"""
url = urljoin(self.server, uri_path)
"""Perform the HTTP request"""
try:
response = requests.request(http_method, url, params=params, headers=headers, data=data, files=files, verify=self.verify)
except requests.exceptions.SSLError as e:
raise SystemExit(e)
if response.status_code == 401:
raise Unauthorized('{0} at {1}'.format(response.text, url), response)
if response.status_code == 403:
raise Forbidden('{0} at {1}'.format(response.text, url), response)
if response.status_code not in {200, 201, 204, 206}:
raise ResourceUnavailable('{0} at {1}'.format(response.text, url), response)
if response.status_code == 204:
return {}
else:
return response.json()
| gpl-3.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.4/Lib/idlelib/IdleHistory.py | 11 | 3041 | class History:
def __init__(self, text, output_sep = "\n"):
self.text = text
self.history = []
self.history_prefix = None
self.history_pointer = None
self.output_sep = output_sep
text.bind("<<history-previous>>", self.history_prev)
text.bind("<<history-next>>", self.history_next)
def history_next(self, event):
self.history_do(0)
return "break"
def history_prev(self, event):
self.history_do(1)
return "break"
def _get_source(self, start, end):
# Get source code from start index to end index. Lines in the
# text control may be separated by sys.ps2 .
lines = self.text.get(start, end).split(self.output_sep)
return "\n".join(lines)
def _put_source(self, where, source):
output = self.output_sep.join(source.split("\n"))
self.text.insert(where, output)
def history_do(self, reverse):
nhist = len(self.history)
pointer = self.history_pointer
prefix = self.history_prefix
if pointer is not None and prefix is not None:
if self.text.compare("insert", "!=", "end-1c") or \
self._get_source("iomark", "end-1c") != self.history[pointer]:
pointer = prefix = None
if pointer is None or prefix is None:
prefix = self._get_source("iomark", "end-1c")
if reverse:
pointer = nhist
else:
pointer = -1
nprefix = len(prefix)
while 1:
if reverse:
pointer = pointer - 1
else:
pointer = pointer + 1
if pointer < 0 or pointer >= nhist:
self.text.bell()
if self._get_source("iomark", "end-1c") != prefix:
self.text.delete("iomark", "end-1c")
self._put_source("iomark", prefix)
pointer = prefix = None
break
item = self.history[pointer]
if item[:nprefix] == prefix and len(item) > nprefix:
self.text.delete("iomark", "end-1c")
self._put_source("iomark", item)
break
self.text.mark_set("insert", "end-1c")
self.text.see("insert")
self.text.tag_remove("sel", "1.0", "end")
self.history_pointer = pointer
self.history_prefix = prefix
def history_store(self, source):
source = source.strip()
if len(source) > 2:
# avoid duplicates
try:
self.history.remove(source)
except ValueError:
pass
self.history.append(source)
self.history_pointer = None
self.history_prefix = None
def recall(self, s):
s = s.strip()
self.text.tag_remove("sel", "1.0", "end")
self.text.delete("iomark", "end-1c")
self.text.mark_set("insert", "end-1c")
self.text.insert("insert", s)
self.text.see("insert")
| mit |
ryandougherty/mwa-capstone | MWA_Tools/build/matplotlib/lib/mpl_examples/api/colorbar_only.py | 3 | 1941 | '''
Make a colorbar as a separate figure.
'''
from matplotlib import pyplot, mpl
# Make a figure and axes with dimensions as desired.
fig = pyplot.figure(figsize=(8,3))
ax1 = fig.add_axes([0.05, 0.65, 0.9, 0.15])
ax2 = fig.add_axes([0.05, 0.25, 0.9, 0.15])
# Set the colormap and norm to correspond to the data for which
# the colorbar will be used.
cmap = mpl.cm.cool
norm = mpl.colors.Normalize(vmin=5, vmax=10)
# ColorbarBase derives from ScalarMappable and puts a colorbar
# in a specified axes, so it has everything needed for a
# standalone colorbar. There are many more kwargs, but the
# following gives a basic continuous colorbar with ticks
# and labels.
cb1 = mpl.colorbar.ColorbarBase(ax1, cmap=cmap,
norm=norm,
orientation='horizontal')
cb1.set_label('Some Units')
# The second example illustrates the use of a ListedColormap, a
# BoundaryNorm, and extended ends to show the "over" and "under"
# value colors.
cmap = mpl.colors.ListedColormap(['r', 'g', 'b', 'c'])
cmap.set_over('0.25')
cmap.set_under('0.75')
# If a ListedColormap is used, the length of the bounds array must be
# one greater than the length of the color list. The bounds must be
# monotonically increasing.
bounds = [1, 2, 4, 7, 8]
norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
cb2 = mpl.colorbar.ColorbarBase(ax2, cmap=cmap,
norm=norm,
# to use 'extend', you must
# specify two extra boundaries:
boundaries=[0]+bounds+[13],
extend='both',
ticks=bounds, # optional
spacing='proportional',
orientation='horizontal')
cb2.set_label('Discrete intervals, some other units')
pyplot.show()
| gpl-2.0 |
cwisecarver/osf.io | osf/models/user.py | 1 | 54135 | import datetime as dt
import logging
import re
import urllib
import urlparse
import uuid
from copy import deepcopy
from flask import Request as FlaskRequest
from framework import analytics
# OSF imports
import framework.mongo
import itsdangerous
import pytz
from dirtyfields import DirtyFieldsMixin
from django.conf import settings
from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager
from django.contrib.auth.hashers import check_password
from django.contrib.auth.models import PermissionsMixin
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.db import models
from django.utils import timezone
from django_extensions.db.models import TimeStampedModel
from framework.auth import Auth, signals
from framework.auth.core import generate_verification_key
from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError,
InvalidTokenError,
MergeConfirmedRequiredError,
MergeConflictError)
from framework.exceptions import PermissionsError
from framework.sessions.utils import remove_sessions_for_user
from framework.mongo import get_cache_key
from modularodm.exceptions import NoResultsFound
from osf.exceptions import reraise_django_validation_errors
from osf.models.base import BaseModel, GuidMixin, GuidMixinQuerySet
from osf.models.contributor import RecentlyAddedContributor
from osf.models.institution import Institution
from osf.models.mixins import AddonModelMixin
from osf.models.session import Session
from osf.models.tag import Tag
from osf.models.validators import validate_email, validate_social, validate_history_item
from osf.modm_compat import Q
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
from osf.utils.fields import NonNaiveDateTimeField, LowercaseEmailField
from osf.utils.names import impute_names
from website import settings as website_settings
from website import filters, mails
from website.project import new_bookmark_collection
logger = logging.getLogger(__name__)
def get_default_mailing_lists():
return {'Open Science Framework Help': True}
name_formatters = {
'long': lambda user: user.fullname,
'surname': lambda user: user.family_name if user.family_name else user.fullname,
'initials': lambda user: u'{surname}, {initial}.'.format(
surname=user.family_name,
initial=user.given_name_initial,
),
}
class OSFUserManager(BaseUserManager):
def create_user(self, username, password=None):
if not username:
raise ValueError('Users must have a username')
user = self.model(
username=self.normalize_email(username),
is_active=True,
date_registered=timezone.now()
)
user.set_password(password)
user.save(using=self._db)
return user
_queryset_class = GuidMixinQuerySet
def all(self):
qs = super(OSFUserManager, self).all()
qs.annotate_query_with_guids()
return qs
def eager(self, *fields):
fk_fields = set(self.model.get_fk_field_names()) & set(fields)
m2m_fields = set(self.model.get_m2m_field_names()) & set(fields)
return self.select_related(*fk_fields).prefetch_related(*m2m_fields)
def create_superuser(self, username, password):
user = self.create_user(username, password=password)
user.is_superuser = True
user.is_staff = True
user.is_active = True
user.save(using=self._db)
return user
class Email(BaseModel, TimeStampedModel):
address = LowercaseEmailField(unique=True, db_index=True, validators=[validate_email])
user = models.ForeignKey('OSFUser', related_name='emails', on_delete=models.CASCADE)
def __unicode__(self):
return self.address
class OSFUser(DirtyFieldsMixin, GuidMixin, BaseModel, AbstractBaseUser, PermissionsMixin, AddonModelMixin):
FIELD_ALIASES = {
'_id': 'guids___id',
'system_tags': 'tags',
}
settings_type = 'user' # Needed for addons
USERNAME_FIELD = 'username'
# Node fields that trigger an update to the search engine on save
SEARCH_UPDATE_FIELDS = {
'fullname',
'given_name',
'middle_names',
'family_name',
'suffix',
'merged_by',
'date_disabled',
'date_confirmed',
'jobs',
'schools',
'social',
}
TRACK_FIELDS = SEARCH_UPDATE_FIELDS.copy()
TRACK_FIELDS.update({'password', 'last_login'})
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
SOCIAL_FIELDS = {
'orcid': u'http://orcid.org/{}',
'github': u'http://github.com/{}',
'scholar': u'http://scholar.google.com/citations?user={}',
'twitter': u'http://twitter.com/{}',
'profileWebsites': [],
'linkedIn': u'https://www.linkedin.com/{}',
'impactStory': u'https://impactstory.org/u/{}',
'researcherId': u'http://researcherid.com/rid/{}',
'researchGate': u'https://researchgate.net/profile/{}',
'academiaInstitution': u'https://{}',
'academiaProfileID': u'.academia.edu/{}',
'baiduScholar': u'http://xueshu.baidu.com/scholarID/{}'
}
# The primary email address for the account.
# This value is unique, but multiple "None" records exist for:
# * unregistered contributors where an email address was not provided.
# TODO: Update mailchimp subscription on username change in user.save()
# TODO: Consider making this a FK to Email with to_field='address'
# Django supports this (https://docs.djangoproject.com/en/1.11/topics/auth/customizing/#django.contrib.auth.models.CustomUser.USERNAME_FIELD)
# but some third-party apps may not.
username = models.CharField(max_length=255, db_index=True, unique=True)
# Hashed. Use `User.set_password` and `User.check_password`
# password = models.CharField(max_length=255)
fullname = models.CharField(max_length=255)
# user has taken action to register the account
is_registered = models.BooleanField(db_index=True, default=False)
# user has claimed the account
# TODO: This should be retired - it always reflects is_registered.
# While a few entries exist where this is not the case, they appear to be
# the result of a bug, as they were all created over a small time span.
is_claimed = models.BooleanField(default=False, db_index=True)
# a list of strings - for internal use
tags = models.ManyToManyField('Tag', blank=True)
# security emails that have been sent
# TODO: This should be removed and/or merged with system_tags
security_messages = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <message label>: <datetime>
# ...
# }
# user was invited (as opposed to registered unprompted)
is_invited = models.BooleanField(default=False, db_index=True)
# Per-project unclaimed user data:
# TODO: add validation
unclaimed_records = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <project_id>: {
# 'name': <name that referrer provided>,
# 'referrer_id': <user ID of referrer>,
# 'token': <token used for verification urls>,
# 'email': <email the referrer provided or None>,
# 'claimer_email': <email the claimer entered or None>,
# 'last_sent': <timestamp of last email sent to referrer or None>
# }
# ...
# }
# Time of last sent notification email to newly added contributors
# Format : {
# <project_id>: {
# 'last_sent': time.time()
# }
# ...
# }
contributor_added_email_records = DateTimeAwareJSONField(default=dict, blank=True)
# The user into which this account was merged
merged_by = models.ForeignKey('self', null=True, blank=True, related_name='merger')
# verification key v1: only the token string, no expiration time
# used for cas login with username and verification key
verification_key = models.CharField(max_length=255, null=True, blank=True)
# verification key v2: token, and expiration time
# used for password reset, confirm account/email, claim account/contributor-ship
verification_key_v2 = DateTimeAwareJSONField(default=dict, blank=True, null=True)
# Format: {
# 'token': <verification token>
# 'expires': <verification expiration time>
# }
email_last_sent = NonNaiveDateTimeField(null=True, blank=True)
# email verification tokens
# see also ``unconfirmed_emails``
email_verifications = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <token> : {'email': <email address>,
# 'expiration': <datetime>}
# }
# email lists to which the user has chosen a subscription setting
mailchimp_mailing_lists = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# email lists to which the user has chosen a subscription setting,
# being sent from osf, rather than mailchimp
osf_mailing_lists = DateTimeAwareJSONField(default=get_default_mailing_lists, blank=True)
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# the date this user was registered
date_registered = NonNaiveDateTimeField(db_index=True, auto_now_add=True)
# list of collaborators that this user recently added to nodes as a contributor
# recently_added = fields.ForeignField("user", list=True)
recently_added = models.ManyToManyField('self',
through=RecentlyAddedContributor,
through_fields=('user', 'contributor'),
symmetrical=False)
# Attached external accounts (OAuth)
# external_accounts = fields.ForeignField("externalaccount", list=True)
external_accounts = models.ManyToManyField('ExternalAccount', blank=True)
# CSL names
given_name = models.CharField(max_length=255, blank=True)
middle_names = models.CharField(max_length=255, blank=True)
family_name = models.CharField(max_length=255, blank=True)
suffix = models.CharField(max_length=255, blank=True)
# identity for user logged in through external idp
external_identity = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <external_id_provider>: {
# <external_id>: <status from ('VERIFIED, 'CREATE', 'LINK')>,
# ...
# },
# ...
# }
# Employment history
jobs = DateTimeAwareJSONField(default=list, blank=True, validators=[validate_history_item])
# Format: list of {
# 'title': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Educational history
schools = DateTimeAwareJSONField(default=list, blank=True, validators=[validate_history_item])
# Format: list of {
# 'degree': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Social links
social = DateTimeAwareJSONField(default=dict, blank=True, validators=[validate_social])
# Format: {
# 'profileWebsites': <list of profile websites>
# 'twitter': <twitter id>,
# }
# date the user last sent a request
date_last_login = NonNaiveDateTimeField(null=True, blank=True)
# date the user first successfully confirmed an email address
date_confirmed = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
# When the user was disabled.
date_disabled = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
# when comments were last viewed
comments_viewed_timestamp = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# 'Comment.root_target._id': 'timestamp',
# ...
# }
# timezone for user's locale (e.g. 'America/New_York')
timezone = models.CharField(blank=True, default='Etc/UTC', max_length=255)
# user language and locale data (e.g. 'en_US')
locale = models.CharField(blank=True, max_length=255, default='en_US')
# whether the user has requested to deactivate their account
requested_deactivation = models.BooleanField(default=False)
affiliated_institutions = models.ManyToManyField('Institution', blank=True)
notifications_configured = DateTimeAwareJSONField(default=dict, blank=True)
objects = OSFUserManager()
is_active = models.BooleanField(default=False)
is_staff = models.BooleanField(default=False)
def __repr__(self):
return '<OSFUser({0!r}) with guid {1!r}>'.format(self.username, self._id)
@property
def deep_url(self):
"""Used for GUID resolution."""
return '/profile/{}/'.format(self._primary_key)
@property
def url(self):
return '/{}/'.format(self._id)
@property
def absolute_url(self):
return urlparse.urljoin(website_settings.DOMAIN, self.url)
@property
def absolute_api_v2_url(self):
from website import util
return util.api_v2_url('users/{}/'.format(self._id))
@property
def api_url(self):
return '/api/v1/profile/{}/'.format(self._id)
@property
def profile_url(self):
return '/{}/'.format(self._id)
@property
def is_disabled(self):
return self.date_disabled is not None
@is_disabled.setter
def is_disabled(self, val):
"""Set whether or not this account has been disabled."""
if val and not self.date_disabled:
self.date_disabled = timezone.now()
elif val is False:
self.date_disabled = None
@property
def is_confirmed(self):
return bool(self.date_confirmed)
@property
def is_merged(self):
"""Whether or not this account has been merged into another account.
"""
return self.merged_by is not None
@property
def unconfirmed_emails(self):
# Handle when email_verifications field is None
email_verifications = self.email_verifications or {}
return [
each['email']
for each
in email_verifications.values()
]
@property
def social_links(self):
social_user_fields = {}
for key, val in self.social.items():
if val and key in self.SOCIAL_FIELDS:
if not isinstance(val, basestring):
social_user_fields[key] = val
else:
social_user_fields[key] = self.SOCIAL_FIELDS[key].format(val)
return social_user_fields
@property
def given_name_initial(self):
"""
The user's preferred initialization of their given name.
Some users with common names may choose to distinguish themselves from
their colleagues in this way. For instance, there could be two
well-known researchers in a single field named "Robert Walker".
"Walker, R" could then refer to either of them. "Walker, R.H." could
provide easy disambiguation.
NOTE: The internal representation for this should never end with a
period. "R" and "R.H" would be correct in the prior case, but
"R.H." would not.
"""
return self.given_name[0]
@property
def email(self):
if self.has_usable_username():
return self.username
else:
return None
@property
def all_tags(self):
"""Return a queryset containing all of this user's tags (incl. system tags)."""
# Tag's default manager only returns non-system tags, so we can't use self.tags
return Tag.all_tags.filter(osfuser=self)
@property
def system_tags(self):
"""The system tags associated with this node. This currently returns a list of string
names for the tags, for compatibility with v1. Eventually, we can just return the
QuerySet.
"""
return self.all_tags.filter(system=True).values_list('name', flat=True)
@property
def csl_given_name(self):
parts = [self.given_name]
if self.middle_names:
parts.extend(each[0] for each in re.split(r'\s+', self.middle_names))
return ' '.join(parts)
@property
def csl_name(self):
return {
'family': self.family_name,
'given': self.csl_given_name,
}
@property
def contributor_to(self):
return self.nodes.filter(is_deleted=False).exclude(type='osf.collection')
@property
def visible_contributor_to(self):
return self.nodes.filter(is_deleted=False, contributor__visible=True).exclude(type='osf.collection')
def set_unusable_username(self):
"""Sets username to an unusable value. Used for, e.g. for invited contributors
and merged users.
NOTE: This is necessary because Django does not allow the username column to be nullable.
"""
if self._id:
self.username = self._id
else:
self.username = str(uuid.uuid4())
return self.username
def has_usable_username(self):
return '@' in self.username
@property
def is_authenticated(self): # Needed for django compat
return True
@property
def is_anonymous(self):
return False
def get_addon_names(self):
return []
# django methods
def get_full_name(self):
return self.fullname
def get_short_name(self):
return self.username
def __unicode__(self):
return self.get_short_name()
def __str__(self):
return self.get_short_name()
@property
def contributed(self):
return self.nodes.all()
@property
def can_be_merged(self):
"""The ability of the `merge_user` method to fully merge the user"""
return all((addon.can_be_merged for addon in self.get_addons()))
def merge_user(self, user):
"""Merge a registered user into this account. This user will be
a contributor on any project. if the registered user and this account
are both contributors of the same project. Then it will remove the
registered user and set this account to the highest permission of the two
and set this account to be visible if either of the two are visible on
the project.
:param user: A User object to be merged.
"""
# Fail if the other user has conflicts.
if not user.can_be_merged:
raise MergeConflictError('Users cannot be merged')
# Move over the other user's attributes
# TODO: confirm
for system_tag in user.system_tags.all():
self.add_system_tag(system_tag)
self.is_claimed = self.is_claimed or user.is_claimed
self.is_invited = self.is_invited or user.is_invited
# copy over profile only if this user has no profile info
if user.jobs and not self.jobs:
self.jobs = user.jobs
if user.schools and not self.schools:
self.schools = user.schools
if user.social and not self.social:
self.social = user.social
unclaimed = user.unclaimed_records.copy()
unclaimed.update(self.unclaimed_records)
self.unclaimed_records = unclaimed
# - unclaimed records should be connected to only one user
user.unclaimed_records = {}
security_messages = user.security_messages.copy()
security_messages.update(self.security_messages)
self.security_messages = security_messages
notifications_configured = user.notifications_configured.copy()
notifications_configured.update(self.notifications_configured)
self.notifications_configured = notifications_configured
if not website_settings.RUNNING_MIGRATION:
for key, value in user.mailchimp_mailing_lists.iteritems():
# subscribe to each list if either user was subscribed
subscription = value or self.mailchimp_mailing_lists.get(key)
signals.user_merged.send(self, list_name=key, subscription=subscription)
# clear subscriptions for merged user
signals.user_merged.send(user, list_name=key, subscription=False, send_goodbye=False)
for target_id, timestamp in user.comments_viewed_timestamp.iteritems():
if not self.comments_viewed_timestamp.get(target_id):
self.comments_viewed_timestamp[target_id] = timestamp
elif timestamp > self.comments_viewed_timestamp[target_id]:
self.comments_viewed_timestamp[target_id] = timestamp
# Give old user's emails to self
user.emails.update(user=self)
for k, v in user.email_verifications.iteritems():
email_to_confirm = v['email']
if k not in self.email_verifications and email_to_confirm != user.username:
self.email_verifications[k] = v
user.email_verifications = {}
self.affiliated_institutions.add(*user.affiliated_institutions.values_list('pk', flat=True))
for service in user.external_identity:
for service_id in user.external_identity[service].iterkeys():
if not (
service_id in self.external_identity.get(service, '') and
self.external_identity[service][service_id] == 'VERIFIED'
):
# Prevent 'CREATE', merging user has already been created.
external = user.external_identity[service][service_id]
status = 'VERIFIED' if external == 'VERIFIED' else 'LINK'
if self.external_identity.get(service):
self.external_identity[service].update(
{service_id: status}
)
else:
self.external_identity[service] = {
service_id: status
}
user.external_identity = {}
# FOREIGN FIELDS
self.external_accounts.add(*user.external_accounts.values_list('pk', flat=True))
# - addons
# Note: This must occur before the merged user is removed as a
# contributor on the nodes, as an event hook is otherwise fired
# which removes the credentials.
for addon in user.get_addons():
user_settings = self.get_or_add_addon(addon.config.short_name)
user_settings.merge(addon)
user_settings.save()
# - projects where the user was a contributor
for node in user.contributed:
# Skip bookmark collection node
if node.is_bookmark_collection:
continue
# if both accounts are contributor of the same project
if node.is_contributor(self) and node.is_contributor(user):
user_permissions = node.get_permissions(user)
self_permissions = node.get_permissions(self)
permissions = max([user_permissions, self_permissions])
node.set_permissions(user=self, permissions=permissions)
visible1 = self._id in node.visible_contributor_ids
visible2 = user._id in node.visible_contributor_ids
if visible1 != visible2:
node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self))
node.contributor_set.filter(user=user).delete()
else:
node.contributor_set.filter(user=user).update(user=self)
node.save()
# - projects where the user was the creator
user.created.filter(is_bookmark_collection=False).update(creator=self)
# - file that the user has checked_out, import done here to prevent import error
from osf.models import BaseFileNode
for file_node in BaseFileNode.files_checked_out(user=user):
file_node.checkout = self
file_node.save()
# finalize the merge
remove_sessions_for_user(user)
# - username is set to the GUID so the merging user can set it primary
# in the future (note: it cannot be set to None due to non-null constraint)
user.set_unusable_username()
user.set_unusable_password()
user.verification_key = None
user.osf_mailing_lists = {}
user.merged_by = self
user.save()
def disable_account(self):
"""
Disables user account, making is_disabled true, while also unsubscribing user
from mailchimp emails, remove any existing sessions.
Ported from framework/auth/core.py
"""
from website import mailchimp_utils
from framework.auth import logout
try:
mailchimp_utils.unsubscribe_mailchimp(
list_name=website_settings.MAILCHIMP_GENERAL_LIST,
user_id=self._id,
username=self.username
)
except mailchimp_utils.mailchimp.ListNotSubscribedError:
pass
except mailchimp_utils.mailchimp.InvalidApiKeyError:
if not website_settings.ENABLE_EMAIL_SUBSCRIPTIONS:
pass
else:
raise
except mailchimp_utils.mailchimp.EmailNotExistsError:
pass
# Call to `unsubscribe` above saves, and can lead to stale data
self.reload()
self.is_disabled = True
# we must call both methods to ensure the current session is cleared and all existing
# sessions are revoked.
req = get_cache_key()
if isinstance(req, FlaskRequest):
logout()
remove_sessions_for_user(self)
def update_is_active(self):
"""Update ``is_active`` to be consistent with the fields that
it depends on.
"""
# The user can log in if they have set a password OR
# have a verified external ID, e.g an ORCID
can_login = self.has_usable_password() or (
'VERIFIED' in sum([each.values() for each in self.external_identity.values()], [])
)
self.is_active = (
self.is_registered and
self.is_confirmed and
can_login and
not self.is_merged and
not self.is_disabled
)
# Overrides BaseModel
def save(self, *args, **kwargs):
self.update_is_active()
self.username = self.username.lower().strip() if self.username else None
dirty_fields = set(self.get_dirty_fields(check_relationship=True))
ret = super(OSFUser, self).save(*args, **kwargs)
if self.SEARCH_UPDATE_FIELDS.intersection(dirty_fields) and self.is_confirmed:
self.update_search()
self.update_search_nodes_contributors()
return ret
# Legacy methods
@classmethod
def create(cls, username, password, fullname):
validate_email(username) # Raises ValidationError if spam address
user = cls(
username=username,
fullname=fullname,
)
user.update_guessed_names()
user.set_password(password)
return user
def set_password(self, raw_password, notify=True):
"""Set the password for this user to the hash of ``raw_password``.
If this is a new user, we're done. If this is a password change,
then email the user about the change and clear all the old sessions
so that users will have to log in again with the new password.
:param raw_password: the plaintext value of the new password
:param notify: Only meant for unit tests to keep extra notifications from being sent
:rtype: list
:returns: Changed fields from the user save
"""
had_existing_password = bool(self.has_usable_password() and self.is_confirmed)
if self.username == raw_password:
raise ChangePasswordError(['Password cannot be the same as your email address'])
super(OSFUser, self).set_password(raw_password)
if had_existing_password and notify:
mails.send_mail(
to_addr=self.username,
mail=mails.PASSWORD_RESET,
mimetype='plain',
user=self
)
remove_sessions_for_user(self)
@classmethod
def create_unconfirmed(cls, username, password, fullname, external_identity=None,
do_confirm=True, campaign=None):
"""Create a new user who has begun registration but needs to verify
their primary email address (username).
"""
user = cls.create(username, password, fullname)
user.add_unconfirmed_email(username, external_identity=external_identity)
user.is_registered = False
if external_identity:
user.external_identity.update(external_identity)
if campaign:
# needed to prevent cirular import
from framework.auth.campaigns import system_tag_for_campaign # skipci
# User needs to be saved before adding system tags (due to m2m relationship)
user.save()
user.add_system_tag(system_tag_for_campaign(campaign))
return user
@classmethod
def create_confirmed(cls, username, password, fullname):
user = cls.create(username, password, fullname)
user.is_registered = True
user.is_claimed = True
user.save() # Must save before using auto_now_add field
user.date_confirmed = user.date_registered
user.emails.create(address=username.lower().strip())
return user
def get_unconfirmed_email_for_token(self, token):
"""Return email if valid.
:rtype: bool
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: InvalidTokenError if trying to access a token that is invalid.
"""
if token not in self.email_verifications:
raise InvalidTokenError
verification = self.email_verifications[token]
# Not all tokens are guaranteed to have expiration dates
if (
'expiration' in verification and
verification['expiration'].replace(tzinfo=pytz.utc) < timezone.now()
):
raise ExpiredTokenError
return verification['email']
def get_unconfirmed_emails_exclude_external_identity(self):
"""Return a list of unconfirmed emails that are not related to external identity."""
unconfirmed_emails = []
if self.email_verifications:
for token, value in self.email_verifications.iteritems():
if not value.get('external_identity'):
unconfirmed_emails.append(value.get('email'))
return unconfirmed_emails
@property
def unconfirmed_email_info(self):
"""Return a list of dictionaries containing information about each of this
user's unconfirmed emails.
"""
unconfirmed_emails = []
email_verifications = self.email_verifications or []
for token in email_verifications:
if self.email_verifications[token].get('confirmed', False):
try:
user_merge = OSFUser.find_one(
Q('emails__address', 'eq', self.email_verifications[token]['email'].lower())
)
except NoResultsFound:
user_merge = False
unconfirmed_emails.append({'address': self.email_verifications[token]['email'],
'token': token,
'confirmed': self.email_verifications[token]['confirmed'],
'user_merge': user_merge.email if user_merge else False})
return unconfirmed_emails
def clean_email_verifications(self, given_token=None):
email_verifications = deepcopy(self.email_verifications or {})
for token in self.email_verifications or {}:
try:
self.get_unconfirmed_email_for_token(token)
except (KeyError, ExpiredTokenError):
email_verifications.pop(token)
continue
if token == given_token:
email_verifications.pop(token)
self.email_verifications = email_verifications
def verify_password_token(self, token):
"""
Verify that the password reset token for this user is valid.
:param token: the token in verification key
:return `True` if valid, otherwise `False`
"""
if token and self.verification_key_v2:
try:
return (self.verification_key_v2['token'] == token and
self.verification_key_v2['expires'] > timezone.now())
except AttributeError:
return False
return False
def verify_claim_token(self, token, project_id):
"""Return whether or not a claim token is valid for this user for
a given node which they were added as a unregistered contributor for.
"""
try:
record = self.get_unclaimed_record(project_id)
except ValueError: # No unclaimed record for given pid
return False
return record['token'] == token
@classmethod
def create_unregistered(cls, fullname, email=None):
"""Create a new unregistered user.
"""
user = cls(
username=email,
fullname=fullname,
is_invited=True,
is_registered=False,
)
if not email:
user.set_unusable_username()
user.set_unusable_password()
user.update_guessed_names()
return user
def update_guessed_names(self):
"""Updates the CSL name fields inferred from the the full name.
"""
parsed = impute_names(self.fullname)
self.given_name = parsed['given']
self.middle_names = parsed['middle']
self.family_name = parsed['family']
self.suffix = parsed['suffix']
def add_unconfirmed_email(self, email, expiration=None, external_identity=None):
"""
Add an email verification token for a given email.
:param email: the email to confirm
:param email: overwrite default expiration time
:param external_identity: the user's external identity
:return: a token
:raises: ValueError if email already confirmed, except for login through external idp.
"""
# Note: This is technically not compliant with RFC 822, which requires
# that case be preserved in the "local-part" of an address. From
# a practical standpoint, the vast majority of email servers do
# not preserve case.
# ref: https://tools.ietf.org/html/rfc822#section-6
email = email.lower().strip()
if not external_identity and self.emails.filter(address=email).exists():
raise ValueError('Email already confirmed to this user.')
with reraise_django_validation_errors():
validate_email(email)
# If the unconfirmed email is already present, refresh the token
if email in self.unconfirmed_emails:
self.remove_unconfirmed_email(email)
verification_key = generate_verification_key(verification_type='confirm')
# handle when email_verifications is None
if not self.email_verifications:
self.email_verifications = {}
self.email_verifications[verification_key['token']] = {
'email': email,
'confirmed': False,
'expiration': expiration if expiration else verification_key['expires'],
'external_identity': external_identity,
}
return verification_key['token']
def remove_unconfirmed_email(self, email):
"""Remove an unconfirmed email addresses and their tokens."""
for token, value in self.email_verifications.iteritems():
if value.get('email') == email:
del self.email_verifications[token]
return True
return False
def remove_email(self, email):
"""Remove a confirmed email"""
if email == self.username:
raise PermissionsError("Can't remove primary email")
if self.emails.filter(address=email):
self.emails.filter(address=email).delete()
signals.user_email_removed.send(self, email=email)
def get_confirmation_token(self, email, force=False, renew=False):
"""Return the confirmation token for a given email.
:param str email: The email to get the token for.
:param bool force: If an expired token exists for the given email, generate a new one and return it.
:param bool renew: Generate a new token and return it.
:return Return the confirmation token.
:raises: ExpiredTokenError if trying to access a token that is expired and force=False.
:raises: KeyError if there no token for the email.
"""
# TODO: Refactor "force" flag into User.get_or_add_confirmation_token
for token, info in self.email_verifications.items():
if info['email'].lower() == email.lower():
# Old records will not have an expiration key. If it's missing,
# assume the token is expired
expiration = info.get('expiration')
if renew:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
if not expiration or (expiration and expiration < timezone.now()):
if not force:
raise ExpiredTokenError('Token for email "{0}" is expired'.format(email))
else:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
return token
raise KeyError('No confirmation token for email "{0}"'.format(email))
def get_confirmation_url(self, email,
external=True,
force=False,
renew=False,
external_id_provider=None,
destination=None):
"""Return the confirmation url for a given email.
:param email: The email to confirm.
:param external: Use absolute or relative url.
:param force: If an expired token exists for the given email, generate a new one and return it.
:param renew: Generate a new token and return it.
:param external_id_provider: The external identity provider that authenticates the user.
:param destination: The destination page to redirect after confirmation
:return: Return the confirmation url.
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: KeyError if there is no token for the email.
"""
base = website_settings.DOMAIN if external else '/'
token = self.get_confirmation_token(email, force=force, renew=renew)
external = 'external/' if external_id_provider else ''
destination = '?{}'.format(urllib.urlencode({'destination': destination})) if destination else ''
return '{0}confirm/{1}{2}/{3}/{4}'.format(base, external, self._primary_key, token, destination)
def register(self, username, password=None):
"""Registers the user.
"""
self.username = username
if password:
self.set_password(password)
if not self.emails.filter(address=username):
self.emails.create(address=username)
self.is_registered = True
self.is_claimed = True
self.date_confirmed = timezone.now()
self.update_search()
self.update_search_nodes()
# Emit signal that a user has confirmed
signals.user_confirmed.send(self)
return self
def confirm_email(self, token, merge=False):
"""Confirm the email address associated with the token"""
email = self.get_unconfirmed_email_for_token(token)
# If this email is confirmed on another account, abort
try:
user_to_merge = OSFUser.find_one(Q('emails__address', 'eq', email))
except NoResultsFound:
user_to_merge = None
if user_to_merge and merge:
self.merge_user(user_to_merge)
elif user_to_merge:
raise MergeConfirmedRequiredError(
'Merge requires confirmation',
user=self,
user_to_merge=user_to_merge,
)
# If another user has this email as its username, get it
try:
unregistered_user = OSFUser.find_one(Q('username', 'eq', email) &
Q('_id', 'ne', self._id))
except NoResultsFound:
unregistered_user = None
if unregistered_user:
self.merge_user(unregistered_user)
self.save()
unregistered_user.username = None
if not self.emails.filter(address=email).exists():
self.emails.create(address=email)
# Complete registration if primary email
if email.lower() == self.username.lower():
self.register(self.username)
self.date_confirmed = timezone.now()
# Revoke token
del self.email_verifications[token]
# TODO: We can't assume that all unclaimed records are now claimed.
# Clear unclaimed records, so user's name shows up correctly on
# all projects
self.unclaimed_records = {}
self.save()
self.update_search_nodes()
return True
def update_search(self):
from website.search.search import update_user
update_user(self)
def update_search_nodes_contributors(self):
"""
Bulk update contributor name on all nodes on which the user is
a contributor.
:return:
"""
from website.search import search
search.update_contributors_async(self.id)
def update_search_nodes(self):
"""Call `update_search` on all nodes on which the user is a
contributor. Needed to add self to contributor lists in search upon
registration or claiming.
"""
for node in self.contributed:
node.update_search()
def update_date_last_login(self):
self.date_last_login = timezone.now()
def get_summary(self, formatter='long'):
return {
'user_fullname': self.fullname,
'user_profile_url': self.profile_url,
'user_display_name': name_formatters[formatter](self),
'user_is_claimed': self.is_claimed
}
def check_password(self, raw_password):
"""
Return a boolean of whether the raw_password was correct. Handles
hashing formats behind the scenes.
Source: https://github.com/django/django/blob/master/django/contrib/auth/base_user.py#L104
"""
def setter(raw_password):
self.set_password(raw_password, notify=False)
# Password hash upgrades shouldn't be considered password changes.
self._password = None
self.save(update_fields=['password'])
return check_password(raw_password, self.password, setter)
def change_password(self, raw_old_password, raw_new_password, raw_confirm_password):
"""Change the password for this user to the hash of ``raw_new_password``."""
raw_old_password = (raw_old_password or '').strip()
raw_new_password = (raw_new_password or '').strip()
raw_confirm_password = (raw_confirm_password or '').strip()
# TODO: Move validation to set_password
issues = []
if not self.check_password(raw_old_password):
issues.append('Old password is invalid')
elif raw_old_password == raw_new_password:
issues.append('Password cannot be the same')
elif raw_new_password == self.username:
issues.append('Password cannot be the same as your email address')
if not raw_old_password or not raw_new_password or not raw_confirm_password:
issues.append('Passwords cannot be blank')
elif len(raw_new_password) < 8:
issues.append('Password should be at least eight characters')
elif len(raw_new_password) > 256:
issues.append('Password should not be longer than 256 characters')
if raw_new_password != raw_confirm_password:
issues.append('Password does not match the confirmation')
if issues:
raise ChangePasswordError(issues)
self.set_password(raw_new_password)
def profile_image_url(self, size=None):
"""A generalized method for getting a user's profile picture urls.
We may choose to use some service other than gravatar in the future,
and should not commit ourselves to using a specific service (mostly
an API concern).
As long as we use gravatar, this is just a proxy to User.gravatar_url
"""
return self._gravatar_url(size)
def _gravatar_url(self, size):
return filters.gravatar(
self,
use_ssl=True,
size=size
)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
def display_full_name(self, node=None):
"""Return the full name , as it would display in a contributor list for a
given node.
NOTE: Unclaimed users may have a different name for different nodes.
"""
if node:
unclaimed_data = self.unclaimed_records.get(str(node._id), None)
if unclaimed_data:
return unclaimed_data['name']
return self.fullname
def add_system_tag(self, tag):
if not isinstance(tag, Tag):
tag_instance, created = Tag.all_tags.get_or_create(name=tag.lower(), system=True)
else:
tag_instance = tag
if not tag_instance.system:
raise ValueError('Non-system tag passed to add_system_tag')
if not self.all_tags.filter(id=tag_instance.id).exists():
self.tags.add(tag_instance)
return tag_instance
def get_recently_added(self):
return (
each.contributor
for each in self.recentlyaddedcontributor_set.order_by('-date_added')
)
def _projects_in_common_query(self, other_user):
return (self.nodes
.filter(is_deleted=False)
.exclude(type='osf.collection')
.filter(_contributors=other_user)
.distinct())
def get_projects_in_common(self, other_user):
"""Returns either a collection of "shared projects" (projects that both users are contributors for)
or just their primary keys
"""
query = self._projects_in_common_query(other_user)
return set(query.all())
def n_projects_in_common(self, other_user):
"""Returns number of "shared projects" (projects that both users are contributors for)"""
return self._projects_in_common_query(other_user).count()
def add_unclaimed_record(self, node, referrer, given_name, email=None):
"""Add a new project entry in the unclaimed records dictionary.
:param Node node: Node this unclaimed user was added to.
:param User referrer: User who referred this user.
:param str given_name: The full name that the referrer gave for this user.
:param str email: The given email address.
:returns: The added record
"""
if not node.can_edit(user=referrer):
raise PermissionsError(
'Referrer does not have permission to add a contributor to project {0}'.format(node._primary_key)
)
project_id = str(node._id)
referrer_id = str(referrer._id)
if email:
clean_email = email.lower().strip()
else:
clean_email = None
verification_key = generate_verification_key(verification_type='claim')
record = {
'name': given_name,
'referrer_id': referrer_id,
'token': verification_key['token'],
'expires': verification_key['expires'],
'email': clean_email,
}
self.unclaimed_records[project_id] = record
return record
def get_unclaimed_record(self, project_id):
"""Get an unclaimed record for a given project_id.
:raises: ValueError if there is no record for the given project.
"""
try:
return self.unclaimed_records[project_id]
except KeyError: # reraise as ValueError
raise ValueError('No unclaimed record for user {self._id} on node {project_id}'
.format(**locals()))
def get_claim_url(self, project_id, external=False):
"""Return the URL that an unclaimed user should use to claim their
account. Return ``None`` if there is no unclaimed_record for the given
project ID.
:param project_id: The project ID for the unclaimed record
:raises: ValueError if a record doesn't exist for the given project ID
:rtype: dict
:returns: The unclaimed record for the project
"""
uid = self._primary_key
base_url = website_settings.DOMAIN if external else '/'
unclaimed_record = self.get_unclaimed_record(project_id)
token = unclaimed_record['token']
return '{base_url}user/{uid}/{project_id}/claim/?token={token}'\
.format(**locals())
def is_affiliated_with_institution(self, institution):
"""Return if this user is affiliated with ``institution``."""
return self.affiliated_institutions.filter(id=institution.id).exists()
def update_affiliated_institutions_by_email_domain(self):
"""
Append affiliated_institutions by email domain.
:return:
"""
try:
email_domains = [email.split('@')[1].lower() for email in self.emails.values_list('address', flat=True)]
insts = Institution.objects.filter(email_domains__overlap=email_domains)
if insts.exists():
self.affiliated_institutions.add(*insts)
except IndexError:
pass
def remove_institution(self, inst_id):
try:
inst = self.affiliated_institutions.get(_id=inst_id)
except Institution.DoesNotExist:
return False
else:
self.affiliated_institutions.remove(inst)
return True
def get_activity_points(self, db=None):
db = db or framework.mongo.database
return analytics.get_total_activity_count(self._primary_key, db=db)
def get_or_create_cookie(self, secret=None):
"""Find the cookie for the given user
Create a new session if no cookie is found
:param str secret: The key to sign the cookie with
:returns: The signed cookie
"""
secret = secret or settings.SECRET_KEY
sessions = Session.find(
Q('data.auth_user_id', 'eq', self._id)
).sort(
'-date_modified'
).limit(1)
if sessions.exists():
user_session = sessions[0]
else:
user_session = Session(data={
'auth_user_id': self._id,
'auth_user_username': self.username,
'auth_user_fullname': self.fullname,
})
user_session.save()
signer = itsdangerous.Signer(secret)
return signer.sign(user_session._id)
@classmethod
def from_cookie(cls, cookie, secret=None):
"""Attempt to load a user from their signed cookie
:returns: None if a user cannot be loaded else User
"""
if not cookie:
return None
secret = secret or settings.SECRET_KEY
try:
token = itsdangerous.Signer(secret).unsign(cookie)
except itsdangerous.BadSignature:
return None
user_session = Session.load(token)
if user_session is None:
return None
return cls.load(user_session.data.get('auth_user_id'))
def is_watching(self, node):
return self.watched.filter(id=node.id).exists()
def get_node_comment_timestamps(self, target_id):
""" Returns the timestamp for when comments were last viewed on a node, file or wiki.
"""
default_timestamp = dt.datetime(1970, 1, 1, 12, 0, 0, tzinfo=pytz.utc)
return self.comments_viewed_timestamp.get(target_id, default_timestamp)
class Meta:
# custom permissions for use in the OSF Admin App
permissions = (
('view_osfuser', 'Can view user details'),
)
@receiver(post_save, sender=OSFUser)
def create_bookmark_collection(sender, instance, created, **kwargs):
if created:
new_bookmark_collection(instance)
| apache-2.0 |
TanguyPatte/phantomjs-packaging | src/qt/qtwebkit/Tools/BuildSlaveSupport/build.webkit.org-config/committer_auth_unittest.py | 123 | 9892 | #!/usr/bin/env python
#
# Copyright (C) 2011 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import StringIO
import __builtin__
import buildbot.status.web.auth
import contextlib
import os
import unittest
from committer_auth import CommitterAuth
# This subclass of StringIO supports the context manager protocol so it works
# with "with" statements, just like real files.
class CMStringIO(StringIO.StringIO):
def __enter__(self):
return self
def __exit__(self, exception, value, traceback):
self.close()
@contextlib.contextmanager
def open_override(func):
original_open = __builtin__.open
__builtin__.open = func
yield
__builtin__.open = original_open
class CommitterAuthTest(unittest.TestCase):
def setUp(self):
self.auth = CommitterAuth('path/to/auth.json')
self.auth.open_auth_json_file = self.fake_auth_json_file
self.auth.open_webkit_committers_file = self.fake_committers_file
self.auth.open_trac_credentials_file = self.fake_htdigest_file
def fake_open_function(self, expected_filename):
def fake_open(name, mode='r'):
self.fake_open_was_called = True
self.assertEqual(expected_filename, name)
return fake_open
def test_authentication_success(self):
self.assertTrue(self.auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('', self.auth.errmsg())
self.assertTrue(self.auth.authenticate('committer2@example.com', 'committer2password'))
self.assertEqual('', self.auth.errmsg())
def test_committer_without_trac_credentials_fails(self):
self.assertFalse(self.auth.authenticate('committer3@webkit.org', 'committer3password'))
self.assertEqual('Invalid username/password', self.auth.errmsg())
def test_fail_to_open_auth_json_file(self):
def raise_IOError():
raise IOError(2, 'No such file or directory', 'path/to/auth.json')
auth = CommitterAuth('path/to/auth.json')
auth.open_auth_json_file = raise_IOError
self.assertFalse(auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('Error opening auth.json file: No such file or directory', auth.errmsg())
def test_fail_to_open_trac_credentials_file(self):
def raise_IOError():
raise IOError(2, 'No such file or directory', 'path/to/trac/credentials')
self.auth.open_trac_credentials_file = raise_IOError
self.assertFalse(self.auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('Error opening Trac credentials file: No such file or directory', self.auth.errmsg())
def test_fail_to_open_webkit_committers_file(self):
def raise_IOError():
raise IOError(2, 'No such file or directory', 'path/to/webkit/committers')
self.auth.open_webkit_committers_file = raise_IOError
self.assertFalse(self.auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('Error opening WebKit committers file: No such file or directory', self.auth.errmsg())
def test_implements_IAuth(self):
self.assertTrue(buildbot.status.web.auth.IAuth.implementedBy(CommitterAuth))
def test_invalid_auth_json_file(self):
auth = CommitterAuth('path/to/auth.json')
auth.open_auth_json_file = self.invalid_auth_json_file
self.assertFalse(auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('Error parsing auth.json file: No JSON object could be decoded', auth.errmsg())
def test_invalid_committers_file(self):
self.auth.open_webkit_committers_file = self.invalid_committers_file
self.assertFalse(self.auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('Error parsing WebKit committers file', self.auth.errmsg())
def test_invalid_trac_credentials_file(self):
self.auth.open_trac_credentials_file = self.invalid_htdigest_file
self.assertFalse(self.auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('Error parsing Trac credentials file', self.auth.errmsg())
def test_missing_auth_json_keys(self):
auth = CommitterAuth('path/to/auth.json')
auth.open_auth_json_file = lambda: CMStringIO('{ "trac_credentials": "path/to/trac/credentials" }')
self.assertFalse(auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('auth.json file is missing "webkit_committers" key', auth.errmsg())
auth.open_auth_json_file = lambda: CMStringIO('{ "webkit_committers": "path/to/webkit/committers" }')
auth.open_webkit_committers_file = self.fake_committers_file
self.assertFalse(auth.authenticate('committer@webkit.org', 'committerpassword'))
self.assertEqual('auth.json file is missing "trac_credentials" key', auth.errmsg())
def test_open_auth_json_file(self):
auth = CommitterAuth('path/to/auth.json')
self.fake_open_was_called = False
with open_override(self.fake_open_function(auth.auth_json_filename())):
auth.open_auth_json_file()
self.assertTrue(self.fake_open_was_called)
def test_open_trac_credentials_file(self):
auth = CommitterAuth('path/to/auth.json')
auth.trac_credentials_filename = lambda: 'trac credentials filename'
self.fake_open_was_called = False
with open_override(self.fake_open_function(auth.trac_credentials_filename())):
auth.open_trac_credentials_file()
self.assertTrue(self.fake_open_was_called)
def test_open_webkit_committers_file(self):
auth = CommitterAuth('path/to/auth.json')
auth.webkit_committers_filename = lambda: 'webkit committers filename'
self.fake_open_was_called = False
with open_override(self.fake_open_function(auth.webkit_committers_filename())):
auth.open_webkit_committers_file()
self.assertTrue(self.fake_open_was_called)
def test_non_committer_fails(self):
self.assertFalse(self.auth.authenticate('noncommitter@example.com', 'noncommitterpassword'))
self.assertEqual('Invalid username/password', self.auth.errmsg())
def test_trac_credentials_filename(self):
self.assertEqual('path/to/trac/credentials', self.auth.trac_credentials_filename())
def test_unknown_user_fails(self):
self.assertFalse(self.auth.authenticate('nobody@example.com', 'nobodypassword'))
self.assertEqual('Invalid username/password', self.auth.errmsg())
def test_username_is_prefix_of_valid_user(self):
self.assertFalse(self.auth.authenticate('committer@webkit.orgg', 'committerpassword'))
self.assertEqual('Invalid username/password', self.auth.errmsg())
def test_webkit_committers(self):
self.assertEqual(['committer@webkit.org', 'committer2@example.com', 'committer3@webkit.org'], self.auth.webkit_committers())
def test_webkit_committers_filename(self):
self.assertEqual('path/to/webkit/committers', self.auth.webkit_committers_filename())
def test_wrong_password_fails(self):
self.assertFalse(self.auth.authenticate('committer@webkit.org', 'wrongpassword'))
self.assertEqual('Invalid username/password', self.auth.errmsg())
def fake_auth_json_file(self):
return CMStringIO("""{
"trac_credentials": "path/to/trac/credentials",
"webkit_committers": "path/to/webkit/committers"
}""")
def invalid_auth_json_file(self):
return CMStringIO('~!@#$%^&*()_+')
def fake_committers_file(self):
return CMStringIO("""[groups]
group1 = user@example.com,user2@example.com
group2 = user3@example.com
group3 =
group4 =
webkit = committer@webkit.org,committer2@example.com,committer3@webkit.org
[service:/]
* = r
""")
def invalid_committers_file(self):
return CMStringIO("""[groups]
[[groups2]
""")
def fake_htdigest_file(self):
return CMStringIO("""committer@webkit.org:Mac OS Forge:761c8dcb7d9b5908007ed142f62fe73a
committer2@example.com:Mac OS Forge:faeee69acc2e49af3a0dbb15bd593ef4
noncommitter@example.com:Mac OS Forge:b99aa7ad32306a654ca4d57839fde9c1
""")
def invalid_htdigest_file(self):
return CMStringIO("""committer@webkit.org:Mac OS Forge:761c8dcb7d9b5908007ed142f62fe73a
committer2@example.com:Mac OS Forge:faeee69acc2e49af3a0dbb15bd593ef4
noncommitter@example.com:Mac OS Forge:b99aa7ad32306a654ca4d57839fde9c1
committer4@example.com:Mac OS Forge:::
""")
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
Russell-IO/ansible | lib/ansible/modules/files/tempfile.py | 66 | 3141 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Krzysztof Magosa <krzysztof@magosa.pl>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tempfile
version_added: "2.3"
author:
- Krzysztof Magosa
short_description: Creates temporary files and directories.
description:
- The C(tempfile) module creates temporary files and directories. C(mktemp) command takes different parameters on various systems, this module helps
to avoid troubles related to that. Files/directories created by module are accessible only by creator. In case you need to make them world-accessible
you need to use M(file) module.
- For Windows targets, use the M(win_tempfile) module instead.
options:
state:
description:
- Whether to create file or directory.
choices: [ directory, file ]
default: file
path:
description:
- Location where temporary file or directory should be created. If path is not specified default system temporary directory will be used.
prefix:
description:
- Prefix of file/directory name created by module.
default: ansible.
suffix:
description:
- Suffix of file/directory name created by module.
default: ""
notes:
- For Windows targets, use the M(win_tempfile) module instead.
'''
EXAMPLES = """
- name: create temporary build directory
tempfile:
state: directory
suffix: build
- name: create temporary file
tempfile:
state: file
suffix: temp
"""
RETURN = '''
path:
description: Path to created file or directory
returned: success
type: string
sample: "/tmp/ansible.bMlvdk"
'''
from os import close
from tempfile import mkstemp, mkdtemp
from traceback import format_exc
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='file', choices=['file', 'directory']),
path=dict(type='path'),
prefix=dict(type='str', default='ansible.'),
suffix=dict(type='str', default=''),
),
)
try:
if module.params['state'] == 'file':
handle, path = mkstemp(
prefix=module.params['prefix'],
suffix=module.params['suffix'],
dir=module.params['path'],
)
close(handle)
elif module.params['state'] == 'directory':
path = mkdtemp(
prefix=module.params['prefix'],
suffix=module.params['suffix'],
dir=module.params['path'],
)
module.exit_json(changed=True, path=path)
except Exception as e:
module.fail_json(msg=to_native(e), exception=format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 |
837468220/python-for-android | python3-alpha/python3-src/Lib/encodings/latin_1.py | 853 | 1264 | """ Python 'latin-1' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.latin_1_encode
decode = codecs.latin_1_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.latin_1_encode(input,self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.latin_1_decode(input,self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
class StreamConverter(StreamWriter,StreamReader):
encode = codecs.latin_1_decode
decode = codecs.latin_1_encode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-1',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| apache-2.0 |
kevinmel2000/sl4a | python/src/Lib/UserList.py | 327 | 3644 | """A more or less complete user-defined wrapper around list objects."""
import collections
class UserList(collections.MutableSequence):
def __init__(self, initlist=None):
self.data = []
if initlist is not None:
# XXX should this accept an arbitrary sequence?
if type(initlist) == type(self.data):
self.data[:] = initlist
elif isinstance(initlist, UserList):
self.data[:] = initlist.data[:]
else:
self.data = list(initlist)
def __repr__(self): return repr(self.data)
def __lt__(self, other): return self.data < self.__cast(other)
def __le__(self, other): return self.data <= self.__cast(other)
def __eq__(self, other): return self.data == self.__cast(other)
def __ne__(self, other): return self.data != self.__cast(other)
def __gt__(self, other): return self.data > self.__cast(other)
def __ge__(self, other): return self.data >= self.__cast(other)
def __cast(self, other):
if isinstance(other, UserList): return other.data
else: return other
def __cmp__(self, other):
return cmp(self.data, self.__cast(other))
__hash__ = None # Mutable sequence, so not hashable
def __contains__(self, item): return item in self.data
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __setitem__(self, i, item): self.data[i] = item
def __delitem__(self, i): del self.data[i]
def __getslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
return self.__class__(self.data[i:j])
def __setslice__(self, i, j, other):
i = max(i, 0); j = max(j, 0)
if isinstance(other, UserList):
self.data[i:j] = other.data
elif isinstance(other, type(self.data)):
self.data[i:j] = other
else:
self.data[i:j] = list(other)
def __delslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
del self.data[i:j]
def __add__(self, other):
if isinstance(other, UserList):
return self.__class__(self.data + other.data)
elif isinstance(other, type(self.data)):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + list(other))
def __radd__(self, other):
if isinstance(other, UserList):
return self.__class__(other.data + self.data)
elif isinstance(other, type(self.data)):
return self.__class__(other + self.data)
else:
return self.__class__(list(other) + self.data)
def __iadd__(self, other):
if isinstance(other, UserList):
self.data += other.data
elif isinstance(other, type(self.data)):
self.data += other
else:
self.data += list(other)
return self
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __imul__(self, n):
self.data *= n
return self
def append(self, item): self.data.append(item)
def insert(self, i, item): self.data.insert(i, item)
def pop(self, i=-1): return self.data.pop(i)
def remove(self, item): self.data.remove(item)
def count(self, item): return self.data.count(item)
def index(self, item, *args): return self.data.index(item, *args)
def reverse(self): self.data.reverse()
def sort(self, *args, **kwds): self.data.sort(*args, **kwds)
def extend(self, other):
if isinstance(other, UserList):
self.data.extend(other.data)
else:
self.data.extend(other)
| apache-2.0 |
madslonnberg/blog | node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/lexers/shell.py | 287 | 15340 | # -*- coding: utf-8 -*-
"""
pygments.lexers.shell
~~~~~~~~~~~~~~~~~~~~~
Lexers for various shells.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'PowerShellLexer', 'ShellSessionLexer']
line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|)sh shell scripts.
*New in Pygments 0.6.*
"""
name = 'Bash'
aliases = ['bash', 'sh', 'ksh']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'.bashrc', 'bashrc', '.bash_*', 'bash_*']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)\s*\b',
Keyword),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<<', Operator), # here-string
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r'&&|\|\|', Operator),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
(r'<', Text),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+', Number),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
def analyse_text(text):
if shebang_matches(text, r'(ba|z|)sh'):
return 1
if text.startswith('$ '):
return 0.2
class BashSessionLexer(Lexer):
"""
Lexer for simplistic shell sessions.
*New in Pygments 1.1.*
"""
name = 'Bash Session'
aliases = ['console']
filenames = ['*.sh-session']
mimetypes = ['application/x-shell-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)'
r'?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)' , line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
elif line.startswith('>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:1])]))
curcode += line[1:]
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class ShellSessionLexer(Lexer):
"""
Lexer for shell sessions that works with different command prompts
*New in Pygments 1.6.*
"""
name = 'Shell Session'
aliases = ['shell-session']
filenames = ['*.shell-session']
mimetypes = ['application/x-sh-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\[?\S+@[^$#%]+)[$#%])(.*\n?)', line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
*New in Pygments 0.7.*
"""
name = 'Batchfile'
aliases = ['bat', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
# Lines can start with @ to prevent echo
(r'^\s*@', Punctuation),
(r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
# If made more specific, make sure you still allow expansions
# like %~$VAR:zlt
(r'%%?[~$:\w]+%?', Name.Variable),
(r'::.*', Comment), # Technically :: only works at BOL
(r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
(r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
(r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
(r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
include('basic'),
(r'.', Text),
],
'echo': [
# Escapes only valid within echo args?
(r'\^\^|\^<|\^>|\^\|', String.Escape),
(r'\n', Text, '#pop'),
include('basic'),
(r'[^\'"^]+', Text),
],
'basic': [
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
(r'`.*?`', String.Backtick),
(r'-?\d+', Number),
(r',', Punctuation),
(r'=', Operator),
(r'/\S+', Name),
(r':\w+', Name.Label),
(r'\w:\w+', Text),
(r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
],
}
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
*New in Pygments 0.10.*
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
tokens = {
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
(r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|endif|else|while|then|foreach|case|default|'
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
Keyword),
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
r'source|stop|suspend|source|suspend|telltc|time|'
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
(r'}', Keyword, '#pop'),
(r':-', Keyword),
(r'[a-zA-Z0-9_]+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
*New in Pygments 1.5.*
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1','*.psm1']
mimetypes = ['text/x-powershell']
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
keywords = (
'while validateset validaterange validatepattern validatelength '
'validatecount until trap switch return ref process param parameter in '
'if global: function foreach for finally filter end elseif else '
'dynamicparam do default continue cmdletbinding break begin alias \\? '
'% #script #private #local #global mandatory parametersetname position '
'valuefrompipeline valuefrompipelinebypropertyname '
'valuefromremainingarguments helpmessage try catch throw').split()
operators = (
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
'lt match ne not notcontains notlike notmatch or regex replace '
'wildcard').split()
verbs = (
'write where wait use update unregister undo trace test tee take '
'suspend stop start split sort skip show set send select scroll resume '
'restore restart resolve resize reset rename remove register receive '
'read push pop ping out new move measure limit join invoke import '
'group get format foreach export expand exit enter enable disconnect '
'disable debug cxnew copy convertto convertfrom convert connect '
'complete compare clear checkpoint aggregate add').split()
commenthelp = (
'component description example externalhelp forwardhelpcategory '
'forwardhelptargetname functionality inputs link '
'notes outputs parameter remotehelprunspace role synopsis').split()
tokens = {
'root': [
# we need to count pairs of parentheses for correct highlight
# of '$(...)' blocks in strings
(r'\(', Punctuation, 'child'),
(r'\s+', Text),
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(<|<)#', Comment.Multiline, 'multline'),
(r'@"\n', String.Heredoc, 'heredoc-double'),
(r"@'\n.*?\n'@", String.Heredoc),
# escaped syntax
(r'`[\'"$@-]', Punctuation),
(r'"', String.Double, 'string'),
(r"'([^']|'')*'", String.Single),
(r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
Name.Variable),
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
(r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
(r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_][a-z0-9_]*', Name),
(r'\w+', Name),
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
include('root'),
],
'multline': [
(r'[^#&.]+', Comment.Multiline),
(r'#(>|>)', Comment.Multiline, '#pop'),
(r'\.(%s)' % '|'.join(commenthelp), String.Doc),
(r'[#&.]', Comment.Multiline),
],
'string': [
(r"`[0abfnrtv'\"\$]", String.Escape),
(r'[^$`"]+', String.Double),
(r'\$\(', Punctuation, 'child'),
(r'""', String.Double),
(r'[`$]', String.Double),
(r'"', String.Double, '#pop'),
],
'heredoc-double': [
(r'\n"@', String.Heredoc, '#pop'),
(r'\$\(', Punctuation, 'child'),
(r'[^@\n]+"]', String.Heredoc),
(r".", String.Heredoc),
]
}
| mit |
cyyber/QRL | src/qrl/services/PublicAPIService.py | 1 | 31720 | # coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import traceback
import os
from statistics import variance, mean
from pyqrllib.pyqrllib import hstr2bin, QRLHelper, QRLDescriptor
from qrl.core import config
from qrl.core.AddressState import AddressState
from qrl.core.misc import logger
from qrl.core.qrlnode import QRLNode
from qrl.core.txs.Transaction import Transaction, CODEMAP
from qrl.generated import qrl_pb2
from qrl.generated.qrl_pb2_grpc import PublicAPIServicer
from qrl.services.grpcHelper import GrpcExceptionWrapper
class PublicAPIService(PublicAPIServicer):
MAX_REQUEST_QUANTITY = 100
# TODO: Separate the Service from the node model
def __init__(self, qrlnode: QRLNode):
self.qrlnode = qrlnode
@GrpcExceptionWrapper(qrl_pb2.GetAddressFromPKResp)
def GetAddressFromPK(self, request: qrl_pb2.GetAddressFromPKReq, context) -> qrl_pb2.GetAddressFromPKResp:
return qrl_pb2.GetAddressFromPKResp(address=bytes(QRLHelper.getAddress(request.pk)))
@GrpcExceptionWrapper(qrl_pb2.GetPeersStatResp)
def GetPeersStat(self, request: qrl_pb2.GetPeersStatReq, context) -> qrl_pb2.GetPeersStatResp:
peers_stat_resp = qrl_pb2.GetPeersStatResp()
peers_stat = self.qrlnode.get_peers_stat()
for stat in peers_stat:
peers_stat_resp.peers_stat.extend([stat])
return peers_stat_resp
@GrpcExceptionWrapper(qrl_pb2.IsSlaveResp)
def IsSlave(self, request: qrl_pb2.IsSlaveReq, context) -> qrl_pb2.IsSlaveResp:
return qrl_pb2.IsSlaveResp(result=self.qrlnode.is_slave(request.master_address, request.slave_pk))
@GrpcExceptionWrapper(qrl_pb2.GetNodeStateResp)
def GetNodeState(self, request: qrl_pb2.GetNodeStateReq, context) -> qrl_pb2.GetNodeStateResp:
return qrl_pb2.GetNodeStateResp(info=self.qrlnode.get_node_info())
@GrpcExceptionWrapper(qrl_pb2.GetKnownPeersResp)
def GetKnownPeers(self, request: qrl_pb2.GetKnownPeersReq, context) -> qrl_pb2.GetKnownPeersResp:
response = qrl_pb2.GetKnownPeersResp()
response.node_info.CopyFrom(self.qrlnode.get_node_info())
response.known_peers.extend([qrl_pb2.Peer(ip=p) for p in self.qrlnode.peer_manager.known_peer_addresses])
return response
@GrpcExceptionWrapper(qrl_pb2.GetStatsResp)
def GetStats(self, request: qrl_pb2.GetStatsReq, context) -> qrl_pb2.GetStatsResp:
response = qrl_pb2.GetStatsResp()
response.node_info.CopyFrom(self.qrlnode.get_node_info())
response.epoch = self.qrlnode.epoch
response.uptime_network = self.qrlnode.uptime_network
response.block_last_reward = self.qrlnode.block_last_reward
response.coins_total_supply = int(self.qrlnode.coin_supply_max)
response.coins_emitted = int(self.qrlnode.coin_supply)
response.block_time_mean = 0
response.block_time_sd = 0
if request.include_timeseries:
tmp = list(self.qrlnode.get_block_timeseries(config.dev.block_timeseries_size))
response.block_timeseries.extend(tmp)
if len(tmp) > 2:
vals = [v.time_last for v in tmp[1:]]
response.block_time_mean = int(mean(vals))
response.block_time_sd = int(variance(vals) ** 0.5)
return response
@GrpcExceptionWrapper(qrl_pb2.GetChainStatsResp)
def GetChainStats(self, request: qrl_pb2.GetChainStatsReq, context) -> qrl_pb2.GetChainStatsResp:
response = qrl_pb2.GetChainStatsResp()
for (path, dirs, files) in os.walk(config.user.data_dir + "/state"):
for f in files:
filename = os.path.join(path, f)
response.state_size += os.path.getsize(filename)
response.state_size_mb = str(response.state_size / (1024 * 1024))
response.state_size_gb = str(response.state_size / (1024 * 1024 * 1024))
return response
@GrpcExceptionWrapper(qrl_pb2.ParseAddressResp)
def ParseAddress(self, request: qrl_pb2.ParseAddressReq, context) -> qrl_pb2.ParseAddressResp:
response = qrl_pb2.ParseAddressResp()
response.is_valid = QRLHelper.addressIsValid(request.address)
descriptor = QRLDescriptor.fromBytes(request.address[:3])
hf_dict = {0: 'SHA2-256', 1: 'SHAKE-128', 2: 'SHAKE-256', 3: 'RESERVED'}
ss_dict = {0: 'XMSS', 1: 'XMSS-MT'}
af_dict = {0: 'SHA2-256', 1: 'RESERVED', 3: 'RESERVED'}
response.desc.hash_function = hf_dict[descriptor.getHashFunction()]
response.desc.tree_height = descriptor.getHeight()
response.desc.signatures = 2**response.desc.tree_height
response.desc.signature_scheme = ss_dict[descriptor.getSignatureType()]
response.desc.address_format = af_dict[descriptor.getAddrFormatType()]
return response
@GrpcExceptionWrapper(qrl_pb2.GetAddressStateResp)
def GetAddressState(self, request: qrl_pb2.GetAddressStateReq, context) -> qrl_pb2.GetAddressStateResp:
address_state = self.qrlnode.get_address_state(request.address)
return qrl_pb2.GetAddressStateResp(state=address_state.pbdata)
@GrpcExceptionWrapper(qrl_pb2.GetOptimizedAddressStateResp)
def GetOptimizedAddressState(self,
request: qrl_pb2.GetAddressStateReq,
context) -> qrl_pb2.GetOptimizedAddressStateResp:
address_state = self.qrlnode.get_optimized_address_state(request.address)
return qrl_pb2.GetOptimizedAddressStateResp(state=address_state.pbdata)
@GrpcExceptionWrapper(qrl_pb2.GetMultiSigAddressStateResp)
def GetMultiSigAddressState(self,
request: qrl_pb2.GetMultiSigAddressStateReq,
context) -> qrl_pb2.GetMultiSigAddressStateResp:
multi_sig_address_state = self.qrlnode.get_multi_sig_address_state(request.address)
if multi_sig_address_state is None:
return qrl_pb2.GetMultiSigAddressStateResp()
return qrl_pb2.GetMultiSigAddressStateResp(state=multi_sig_address_state.pbdata)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def TransferCoins(self, request: qrl_pb2.TransferCoinsReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] TransferCoins")
tx = self.qrlnode.create_send_tx(addrs_to=request.addresses_to,
amounts=request.amounts,
message_data=request.message_data,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.PushTransactionResp)
def PushTransaction(self, request: qrl_pb2.PushTransactionReq, context) -> qrl_pb2.PushTransactionResp:
logger.debug("[PublicAPI] PushTransaction")
answer = qrl_pb2.PushTransactionResp()
try:
tx = Transaction.from_pbdata(request.transaction_signed)
tx.update_txhash()
# FIXME: Full validation takes too much time. At least verify there is a signature
# the validation happens later in the tx pool
if len(tx.signature) > 1000:
self.qrlnode.submit_send_tx(tx)
answer.error_code = qrl_pb2.PushTransactionResp.SUBMITTED
answer.tx_hash = tx.txhash
else:
answer.error_description = 'Signature too short'
answer.error_code = qrl_pb2.PushTransactionResp.VALIDATION_FAILED
except Exception as e:
error_str = traceback.format_exception(None, e, e.__traceback__)
answer.error_description = str(''.join(error_str))
answer.error_code = qrl_pb2.PushTransactionResp.ERROR
return answer
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetMultiSigCreateTxn(self, request: qrl_pb2.MultiSigCreateTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetMultiSigCreateTxnReq")
tx = self.qrlnode.create_multi_sig_txn(signatories=request.signatories,
weights=request.weights,
threshold=request.threshold,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetMultiSigSpendTxn(self, request: qrl_pb2.MultiSigSpendTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetMultiSigSpendTxnReq")
tx = self.qrlnode.create_multi_sig_spend_txn(multi_sig_address=request.multi_sig_address,
addrs_to=request.addrs_to,
amounts=request.amounts,
expiry_block_number=request.expiry_block_number,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetMultiSigVoteTxn(self, request: qrl_pb2.MultiSigVoteTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetMultiSigSpendTxnReq")
tx = self.qrlnode.create_multi_sig_vote_txn(shared_key=request.shared_key,
unvote=request.unvote,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetMessageTxn(self, request: qrl_pb2.MessageTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetMessageTxn")
tx = self.qrlnode.create_message_txn(message_hash=request.message,
addr_to=request.addr_to,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetTokenTxn(self, request: qrl_pb2.TokenTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetTokenTxn")
tx = self.qrlnode.create_token_txn(symbol=request.symbol,
name=request.name,
owner=request.owner,
decimals=request.decimals,
initial_balances=request.initial_balances,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetTransferTokenTxn(self, request: qrl_pb2.TransferTokenTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetTransferTokenTxn")
bin_token_txhash = bytes(hstr2bin(request.token_txhash.decode()))
tx = self.qrlnode.create_transfer_token_txn(addrs_to=request.addresses_to,
token_txhash=bin_token_txhash,
amounts=request.amounts,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetSlaveTxn(self, request: qrl_pb2.SlaveTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetSlaveTxn")
tx = self.qrlnode.create_slave_tx(slave_pks=request.slave_pks,
access_types=request.access_types,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.TransferCoinsResp)
def GetLatticeTxn(self, request: qrl_pb2.LatticeTxnReq, context) -> qrl_pb2.TransferCoinsResp:
logger.debug("[PublicAPI] GetLatticeTxn")
tx = self.qrlnode.create_lattice_tx(pk1=request.pk1,
pk2=request.pk2,
pk3=request.pk3,
fee=request.fee,
xmss_pk=request.xmss_pk,
master_addr=request.master_addr)
extended_transaction_unsigned = qrl_pb2.TransactionExtended(tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
return qrl_pb2.TransferCoinsResp(extended_transaction_unsigned=extended_transaction_unsigned)
@GrpcExceptionWrapper(qrl_pb2.GetObjectResp)
def GetObject(self, request: qrl_pb2.GetObjectReq, context) -> qrl_pb2.GetObjectResp:
logger.debug("[PublicAPI] GetObject")
answer = qrl_pb2.GetObjectResp()
answer.found = False
# FIXME: We need a unified way to access and validate data.
query = bytes(request.query) # query will be as a string, if Q is detected convert, etc.
try:
if AddressState.address_is_valid(query):
if self.qrlnode.get_address_is_used(query):
address_state = self.qrlnode.get_optimized_address_state(query)
if address_state is not None:
answer.found = True
answer.address_state.CopyFrom(address_state.pbdata)
return answer
except ValueError:
pass
transaction_block_number = self.qrlnode.get_transaction(query)
transaction = None
blockheader = None
if transaction_block_number:
transaction, block_number = transaction_block_number
answer.found = True
block = self.qrlnode.get_block_from_index(block_number)
blockheader = block.blockheader.pbdata
timestamp = block.blockheader.timestamp
else:
transaction_timestamp = self.qrlnode.get_unconfirmed_transaction(query)
if transaction_timestamp:
transaction, timestamp = transaction_timestamp
answer.found = True
if transaction:
txextended = qrl_pb2.TransactionExtended(header=blockheader,
tx=transaction.pbdata,
addr_from=transaction.addr_from,
size=transaction.size,
timestamp_seconds=timestamp)
answer.transaction.CopyFrom(txextended)
return answer
# NOTE: This is temporary, indexes are accepted for blocks
try:
block = self.qrlnode.get_block_from_hash(query)
if block is None or (block.block_number == 0 and block.prev_headerhash != config.user.genesis_prev_headerhash):
query_str = query.decode()
query_index = int(query_str)
block = self.qrlnode.get_block_from_index(query_index)
if not block:
return answer
answer.found = True
block_extended = qrl_pb2.BlockExtended()
block_extended.header.CopyFrom(block.blockheader.pbdata)
block_extended.size = block.size
for transaction in block.transactions:
tx = Transaction.from_pbdata(transaction)
extended_tx = qrl_pb2.TransactionExtended(tx=transaction,
addr_from=tx.addr_from,
size=tx.size,
timestamp_seconds=block.blockheader.timestamp)
block_extended.extended_transactions.extend([extended_tx])
answer.block_extended.CopyFrom(block_extended)
return answer
except Exception:
pass
return answer
@GrpcExceptionWrapper(qrl_pb2.GetLatestDataResp)
def GetLatestData(self, request: qrl_pb2.GetLatestDataReq, context) -> qrl_pb2.GetLatestDataResp:
logger.debug("[PublicAPI] GetLatestData")
response = qrl_pb2.GetLatestDataResp()
all_requested = request.filter == qrl_pb2.GetLatestDataReq.ALL
quantity = min(request.quantity, self.MAX_REQUEST_QUANTITY)
if all_requested or request.filter == qrl_pb2.GetLatestDataReq.BLOCKHEADERS:
result = []
for blk in self.qrlnode.get_latest_blocks(offset=request.offset, count=quantity):
transaction_count = qrl_pb2.TransactionCount()
for tx in blk.transactions:
transaction_count.count[CODEMAP[tx.WhichOneof('transactionType')]] += 1
result.append(qrl_pb2.BlockHeaderExtended(header=blk.blockheader.pbdata,
transaction_count=transaction_count))
response.blockheaders.extend(result)
if all_requested or request.filter == qrl_pb2.GetLatestDataReq.TRANSACTIONS:
result = []
for tx in self.qrlnode.get_latest_transactions(offset=request.offset, count=quantity):
# FIXME: Improve this once we have a proper database schema
block_index = self.qrlnode.get_blockidx_from_txhash(tx.txhash)
block = self.qrlnode.get_block_from_index(block_index)
header = None
if block:
header = block.blockheader.pbdata
txextended = qrl_pb2.TransactionExtended(header=header,
tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size)
result.append(txextended)
response.transactions.extend(result)
if all_requested or request.filter == qrl_pb2.GetLatestDataReq.TRANSACTIONS_UNCONFIRMED:
result = []
for tx_info in self.qrlnode.get_latest_transactions_unconfirmed(offset=request.offset, count=quantity):
tx = tx_info.transaction
txextended = qrl_pb2.TransactionExtended(header=None,
tx=tx.pbdata,
addr_from=tx.addr_from,
size=tx.size,
timestamp_seconds=tx_info.timestamp)
result.append(txextended)
response.transactions_unconfirmed.extend(result)
return response
# Obsolete
# @GrpcExceptionWrapper(qrl_pb2.GetTransactionsByAddressResp)
# def GetTransactionsByAddress(self,
# request: qrl_pb2.GetTransactionsByAddressReq,
# context) -> qrl_pb2.GetTransactionsByAddressResp:
# logger.debug("[PublicAPI] GetTransactionsByAddress")
# response = qrl_pb2.GetTransactionsByAddressResp()
# mini_transactions, balance = self.qrlnode.get_transactions_by_address(request.address)
# response.mini_transactions.extend(mini_transactions)
# response.balance = balance
# return response
@GrpcExceptionWrapper(qrl_pb2.GetMiniTransactionsByAddressResp)
def GetMiniTransactionsByAddress(self,
request: qrl_pb2.GetMiniTransactionsByAddressReq,
context) -> qrl_pb2.GetMiniTransactionsByAddressResp:
logger.debug("[PublicAPI] GetTransactionsByAddress")
return self.qrlnode.get_mini_transactions_by_address(request.address,
request.item_per_page,
request.page_number)
@GrpcExceptionWrapper(qrl_pb2.GetTransactionsByAddressResp)
def GetTransactionsByAddress(self,
request: qrl_pb2.GetTransactionsByAddressReq,
context) -> qrl_pb2.GetTransactionsByAddressResp:
logger.debug("[PublicAPI] GetTransactionsByAddress")
return self.qrlnode.get_transactions_by_address(request.address,
request.item_per_page,
request.page_number)
@GrpcExceptionWrapper(qrl_pb2.GetTokensByAddressResp)
def GetTokensByAddress(self,
request: qrl_pb2.GetTransactionsByAddressReq,
context) -> qrl_pb2.GetTokensByAddressResp:
logger.debug("[PublicAPI] GetTokensByAddress")
return self.qrlnode.get_tokens_by_address(request.address,
request.item_per_page,
request.page_number)
@GrpcExceptionWrapper(qrl_pb2.GetSlavesByAddressResp)
def GetSlavesByAddress(self,
request: qrl_pb2.GetTransactionsByAddressReq,
context) -> qrl_pb2.GetSlavesByAddressResp:
logger.debug("[PublicAPI] GetSlavesByAddress")
return self.qrlnode.get_slaves_by_address(request.address,
request.item_per_page,
request.page_number)
@GrpcExceptionWrapper(qrl_pb2.GetLatticePKsByAddressResp)
def GetLatticePKsByAddress(self,
request: qrl_pb2.GetTransactionsByAddressReq,
context) -> qrl_pb2.GetLatticePKsByAddressResp:
logger.debug("[PublicAPI] GetLatticePKsByAddress")
return self.qrlnode.get_lattice_pks_by_address(request.address,
request.item_per_page,
request.page_number)
@GrpcExceptionWrapper(qrl_pb2.GetMultiSigAddressesByAddressResp)
def GetMultiSigAddressesByAddress(self,
request: qrl_pb2.GetTransactionsByAddressReq,
context) -> qrl_pb2.GetMultiSigAddressesByAddressResp:
logger.debug("[PublicAPI] GetMultiSigAddressesByAddress")
return self.qrlnode.get_multi_sig_addresses_by_address(request.address,
request.item_per_page,
request.page_number)
@GrpcExceptionWrapper(qrl_pb2.GetMultiSigSpendTxsByAddressResp)
def GetMultiSigSpendTxsByAddress(self,
request: qrl_pb2.GetMultiSigSpendTxsByAddressReq,
context) -> qrl_pb2.GetMultiSigSpendTxsByAddressResp:
logger.debug("[PublicAPI] GetMultiSigSpendTxsByAddress")
return self.qrlnode.get_multi_sig_spend_txs_by_address(request.address,
request.item_per_page,
request.page_number,
request.filter_type)
@GrpcExceptionWrapper(qrl_pb2.GetInboxMessagesByAddressResp)
def GetInboxMessagesByAddress(self,
request: qrl_pb2.GetTransactionsByAddressReq,
context) -> qrl_pb2.GetInboxMessagesByAddressResp:
logger.debug("[PublicAPI] GetInboxMessagesByAddress")
return self.qrlnode.get_inbox_messages_by_address(request.address,
request.item_per_page,
request.page_number)
@GrpcExceptionWrapper(qrl_pb2.GetVoteStatsResp)
def GetVoteStats(self,
request: qrl_pb2.GetVoteStatsReq,
context) -> qrl_pb2.GetVoteStatsResp:
logger.debug("[PublicAPI] GetVoteStats")
return self.qrlnode.get_vote_stats(request.multi_sig_spend_tx_hash)
@GrpcExceptionWrapper(qrl_pb2.GetTransactionResp)
def GetTransaction(self, request: qrl_pb2.GetTransactionReq, context) -> qrl_pb2.GetTransactionResp:
logger.debug("[PublicAPI] GetTransaction")
response = qrl_pb2.GetTransactionResp()
tx_blocknumber = self.qrlnode.get_transaction(request.tx_hash)
if tx_blocknumber:
response.tx.MergeFrom(tx_blocknumber[0].pbdata)
response.confirmations = self.qrlnode.block_height - tx_blocknumber[1] + 1
response.block_number = tx_blocknumber[1]
response.block_header_hash = self.qrlnode.get_block_header_hash_by_number(tx_blocknumber[1])
else:
tx_timestamp = self.qrlnode.get_unconfirmed_transaction(request.tx_hash)
if tx_timestamp:
response.tx.MergeFrom(tx_timestamp[0].pbdata)
response.confirmations = 0
return response
@GrpcExceptionWrapper(qrl_pb2.GetBalanceResp)
def GetBalance(self, request: qrl_pb2.GetBalanceReq, context) -> qrl_pb2.GetBalanceResp:
logger.debug("[PublicAPI] GetBalance")
address_state = self.qrlnode.get_optimized_address_state(request.address)
response = qrl_pb2.GetBalanceResp(balance=address_state.balance)
return response
@GrpcExceptionWrapper(qrl_pb2.GetTotalBalanceResp)
def GetTotalBalance(self, request: qrl_pb2.GetTotalBalanceReq, context) -> qrl_pb2.GetTotalBalanceResp:
logger.debug("[PublicAPI] GetTotalBalance")
response = qrl_pb2.GetBalanceResp(balance=0)
for address in request.addresses:
address_state = self.qrlnode.get_optimized_address_state(address)
response.balance += address_state.balance
return response
@GrpcExceptionWrapper(qrl_pb2.GetOTSResp)
def GetOTS(self, request: qrl_pb2.GetOTSReq, context) -> qrl_pb2.GetOTSResp:
logger.debug("[PublicAPI] GetOTS")
ots_bitfield_by_page, next_unused_ots_index, unused_ots_index_found = \
self.qrlnode.get_ots(request.address,
request.page_from,
request.page_count,
request.unused_ots_index_from)
response = qrl_pb2.GetOTSResp(ots_bitfield_by_page=ots_bitfield_by_page,
next_unused_ots_index=next_unused_ots_index,
unused_ots_index_found=unused_ots_index_found)
return response
@GrpcExceptionWrapper(qrl_pb2.GetHeightResp)
def GetHeight(self, request: qrl_pb2.GetHeightReq, context) -> qrl_pb2.GetHeightResp:
logger.debug("[PublicAPI] GetHeight")
return qrl_pb2.GetHeightResp(height=self.qrlnode.block_height)
@GrpcExceptionWrapper(qrl_pb2.GetBlockResp)
def GetBlock(self, request: qrl_pb2.GetBlockReq, context) -> qrl_pb2.GetBlockResp:
logger.debug("[PublicAPI] GetBlock")
block = self.qrlnode.get_block_from_hash(request.header_hash)
if block:
return qrl_pb2.GetBlockResp(block=block.pbdata)
return qrl_pb2.GetBlockResp()
@GrpcExceptionWrapper(qrl_pb2.GetBlockByNumberResp)
def GetBlockByNumber(self, request: qrl_pb2.GetBlockByNumberReq, context) -> qrl_pb2.GetBlockByNumberResp:
logger.debug("[PublicAPI] GetBlockFromNumber")
block = self.qrlnode.get_block_from_index(request.block_number)
if block:
return qrl_pb2.GetBlockByNumberResp(block=block.pbdata)
return qrl_pb2.GetBlockByNumberResp()
| mit |
nrz/ylikuutio | external/bullet3/examples/pybullet/gym/pybullet_envs/minitaur/envs_v2/env_wrappers/imitation_wrapper_env.py | 2 | 3100 | """A wrapper for motion imitation environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gin
import gym
import numpy as np
@gin.configurable
class ImitationWrapperEnv(object):
"""An env using for training policy with motion imitation."""
def __init__(self, gym_env):
"""Initialzes the wrapped env.
Args:
gym_env: An instance of LocomotionGymEnv.
"""
self._gym_env = gym_env
self.observation_space = self._build_observation_space()
self.seed()
return
def __getattr__(self, attr):
return getattr(self._gym_env, attr)
def step(self, action):
"""Steps the wrapped environment.
Args:
action: Numpy array. The input action from an NN agent.
Returns:
The tuple containing the modified observation, the reward, the epsiode end
indicator.
Raises:
ValueError if input action is None.
"""
original_observation, reward, done, _ = self._gym_env.step(action)
observation = self._modify_observation(original_observation)
return observation, reward, done, _
@gin.configurable('imitation_wrapper_env.ImitationWrapperEnv.reset')
def reset(self, initial_motor_angles=None, reset_duration=1.0):
"""Resets the robot's position in the world or rebuild the sim world.
The simulation world will be rebuilt if self._hard_reset is True.
Args:
initial_motor_angles: A list of Floats. The desired joint angles after
reset. If None, the robot will use its built-in value.
reset_duration: Float. The time (in seconds) needed to rotate all motors
to the desired initial values.
Returns:
A numpy array contains the initial observation after reset.
"""
original_observation = self._gym_env.reset(initial_motor_angles, reset_duration)
observation = self._modify_observation(original_observation)
return observation
def _modify_observation(self, original_observation):
"""Appends target observations from the reference motion to the observations.
Args:
original_observation: A numpy array containing the original observations.
Returns:
A numpy array contains the initial original concatenated with target
observations from the reference motion.
"""
target_observation = self._task.build_target_obs()
observation = np.concatenate([original_observation, target_observation], axis=-1)
return observation
def _build_observation_space(self):
"""Constructs the observation space, including target observations from
the reference motion.
Returns:
Observation space representing the concatenations of the original
observations and target observations.
"""
obs_space0 = self._gym_env.observation_space
low0 = obs_space0.low
high0 = obs_space0.high
task_low, task_high = self._task.get_target_obs_bounds()
low = np.concatenate([low0, task_low], axis=-1)
high = np.concatenate([high0, task_high], axis=-1)
obs_space = gym.spaces.Box(low, high)
return obs_space
| agpl-3.0 |
ketjow4/NOV | Lib/site-packages/numpy/distutils/fcompiler/hpux.py | 75 | 1395 | from numpy.distutils.fcompiler import FCompiler
compilers = ['HPUXFCompiler']
class HPUXFCompiler(FCompiler):
compiler_type = 'hpux'
description = 'HP Fortran 90 Compiler'
version_pattern = r'HP F90 (?P<version>[^\s*,]*)'
executables = {
'version_cmd' : ["<F90>", "+version"],
'compiler_f77' : ["f90"],
'compiler_fix' : ["f90"],
'compiler_f90' : ["f90"],
'linker_so' : None,
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
module_dir_switch = None #XXX: fix me
module_include_switch = None #XXX: fix me
pic_flags = ['+pic=long']
def get_flags(self):
return self.pic_flags + ['+ppu', '+DD64']
def get_flags_opt(self):
return ['-O3']
def get_libraries(self):
return ['m']
def get_library_dirs(self):
opt = ['/usr/lib/hpux64']
return opt
def get_version(self, force=0, ok_status=[256,0,1]):
# XXX status==256 may indicate 'unrecognized option' or
# 'no input file'. So, version_cmd needs more work.
return FCompiler.get_version(self,force,ok_status)
if __name__ == '__main__':
from distutils import log
log.set_verbosity(10)
from numpy.distutils.fcompiler import new_fcompiler
compiler = new_fcompiler(compiler='hpux')
compiler.customize()
print(compiler.get_version())
| gpl-3.0 |
cloudnull/ansible | lib/ansible/plugins/connection/ssh.py | 11 | 28761 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fcntl
import os
import pipes
import pty
import pwd
import select
import shlex
import subprocess
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connection import ConnectionBase
from ansible.utils.path import unfrackpath, makedirs_safe
SSHPASS_AVAILABLE = None
class Connection(ConnectionBase):
''' ssh based connections '''
transport = 'ssh'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS).difference(['runas'])
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self.host = self._play_context.remote_addr
self.ssh_extra_args = ''
self.ssh_args = ''
def set_host_overrides(self, host):
v = host.get_vars()
if 'ansible_ssh_extra_args' in v:
self.ssh_extra_args = v['ansible_ssh_extra_args']
if 'ansible_ssh_args' in v:
self.ssh_args = v['ansible_ssh_args']
# The connection is created by running ssh/scp/sftp from the exec_command,
# put_file, and fetch_file methods, so we don't need to do any connection
# management here.
def _connect(self):
self._connected = True
return self
@staticmethod
def _sshpass_available():
global SSHPASS_AVAILABLE
# We test once if sshpass is available, and remember the result. It
# would be nice to use distutils.spawn.find_executable for this, but
# distutils isn't always available; shutils.which() is Python3-only.
if SSHPASS_AVAILABLE is None:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
SSHPASS_AVAILABLE = True
except OSError:
SSHPASS_AVAILABLE = False
return SSHPASS_AVAILABLE
@staticmethod
def _persistence_controls(command):
'''
Takes a command array and scans it for ControlPersist and ControlPath
settings and returns two booleans indicating whether either was found.
This could be smarter, e.g. returning false if ControlPersist is 'no',
but for now we do it simple way.
'''
controlpersist = False
controlpath = False
for arg in command:
if 'controlpersist' in arg.lower():
controlpersist = True
elif 'controlpath' in arg.lower():
controlpath = True
return controlpersist, controlpath
@staticmethod
def _split_args(argstring):
"""
Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a
list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to
the argument list. The list will not contain any empty elements.
"""
return [x.strip() for x in shlex.split(argstring) if x.strip()]
def _add_args(self, explanation, args):
"""
Adds the given args to self._command and displays a caller-supplied
explanation of why they were added.
"""
self._command += args
self._display.vvvvv('SSH: ' + explanation + ': (%s)' % ')('.join(args), host=self._play_context.remote_addr)
def _build_command(self, binary, *other_args):
'''
Takes a binary (ssh, scp, sftp) and optional extra arguments and returns
a command line as an array that can be passed to subprocess.Popen.
'''
self._command = []
## First, the command name.
# If we want to use password authentication, we have to set up a pipe to
# write the password to sshpass.
if self._play_context.password:
if not self._sshpass_available():
raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
self.sshpass_pipe = os.pipe()
self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])]
self._command += [binary]
## Next, additional arguments based on the configuration.
# sftp batch mode allows us to correctly catch failed transfers, but can
# be disabled if the client side doesn't support the option. FIXME: is
# this still a real concern?
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
self._command += ['-b', '-']
elif binary == 'ssh':
self._command += ['-C']
if self._play_context.verbosity > 3:
self._command += ['-vvv']
elif binary == 'ssh':
# Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q.
self._command += ['-q']
# Next, we add ansible_ssh_args from the inventory if it's set, or
# [ssh_connection]ssh_args from ansible.cfg, or the default Control*
# settings.
if self.ssh_args:
args = self._split_args(self.ssh_args)
self._add_args("inventory set ansible_ssh_args", args)
elif C.ANSIBLE_SSH_ARGS:
args = self._split_args(C.ANSIBLE_SSH_ARGS)
self._add_args("ansible.cfg set ssh_args", args)
else:
args = (
"-o", "ControlMaster=auto",
"-o", "ControlPersist=60s"
)
self._add_args("default arguments", args)
# Now we add various arguments controlled by configuration file settings
# (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or
# a combination thereof.
if not C.HOST_KEY_CHECKING:
self._add_args(
"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled",
("-o", "StrictHostKeyChecking=no")
)
if self._play_context.port is not None:
self._add_args(
"ANSIBLE_REMOTE_PORT/remote_port/ansible_ssh_port set",
("-o", "Port={0}".format(self._play_context.port))
)
key = self._play_context.private_key_file
if key:
self._add_args(
"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set",
("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))
)
if not self._play_context.password:
self._add_args(
"ansible_password/ansible_ssh_pass not set", (
"-o", "KbdInteractiveAuthentication=no",
"-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey",
"-o", "PasswordAuthentication=no"
)
)
user = self._play_context.remote_user
if user and user != pwd.getpwuid(os.geteuid())[0]:
self._add_args(
"ANSIBLE_REMOTE_USER/remote_user/ansible_ssh_user/user/-u set",
("-o", "User={0}".format(self._play_context.remote_user))
)
self._add_args(
"ANSIBLE_TIMEOUT/timeout set",
("-o", "ConnectTimeout={0}".format(self._play_context.timeout))
)
# If any extra SSH arguments are specified in the inventory for
# this host, or specified as an override on the command line,
# add them in.
if self._play_context.ssh_extra_args:
args = self._split_args(self._play_context.ssh_extra_args)
self._add_args("command-line added --ssh-extra-args", args)
elif self.ssh_extra_args:
args = self._split_args(self.ssh_extra_args)
self._add_args("inventory added ansible_ssh_extra_args", args)
# Check if ControlPersist is enabled (either by default, or using
# ssh_args or ssh_extra_args) and add a ControlPath if one hasn't
# already been set.
controlpersist, controlpath = self._persistence_controls(self._command)
if controlpersist:
self._persistent = True
if not controlpath:
cpdir = unfrackpath('$HOME/.ansible/cp')
# The directory must exist and be writable.
makedirs_safe(cpdir, 0o700)
if not os.access(cpdir, os.W_OK):
raise AnsibleError("Cannot write to ControlPath %s" % cpdir)
args = ("-o", "ControlPath={0}".format(
C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir))
)
self._add_args("found only ControlPersist; added ControlPath", args)
## Finally, we add any caller-supplied extras.
if other_args:
self._command += other_args
return self._command
def _send_initial_data(self, fh, in_data):
'''
Writes initial data to the stdin filehandle of the subprocess and closes
it. (The handle must be closed; otherwise, for example, "sftp -b -" will
just hang forever waiting for more commands.)
'''
self._display.debug('Sending initial data')
try:
fh.write(in_data)
fh.close()
except (OSError, IOError):
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
self._display.debug('Sent initial data (%d bytes)' % len(in_data))
# Used by _run() to kill processes on failures
@staticmethod
def _terminate_process(p):
""" Terminate a process, ignoring errors """
try:
p.terminate()
except (OSError, IOError):
pass
# This is separate from _run() because we need to do the same thing for stdout
# and stderr.
def _examine_output(self, source, state, chunk, sudoable):
'''
Takes a string, extracts complete lines from it, tests to see if they
are a prompt, error message, etc., and sets appropriate flags in self.
Prompt and success lines are removed.
Returns the processed (i.e. possibly-edited) output and the unprocessed
remainder (to be processed with the next chunk) as strings.
'''
output = []
for l in chunk.splitlines(True):
suppress_output = False
# self._display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
if self._play_context.prompt and self.check_password_prompt(l):
self._display.debug("become_prompt: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_prompt'] = True
suppress_output = True
elif self._play_context.success_key and self.check_become_success(l):
self._display.debug("become_success: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_success'] = True
suppress_output = True
elif sudoable and self.check_incorrect_password(l):
self._display.debug("become_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_error'] = True
elif sudoable and self.check_missing_password(l):
self._display.debug("become_nopasswd_error: (source=%s, state=%s): '%s'" % (source, state, l.rstrip('\r\n')))
self._flags['become_nopasswd_error'] = True
if not suppress_output:
output.append(l)
# The chunk we read was most likely a series of complete lines, but just
# in case the last line was incomplete (and not a prompt, which we would
# have removed from the output), we retain it to be processed with the
# next chunk.
remainder = ''
if output and not output[-1].endswith('\n'):
remainder = output[-1]
output = output[:-1]
return ''.join(output), remainder
def _run(self, cmd, in_data, sudoable=True):
'''
Starts the command and communicates with it until it ends.
'''
display_cmd = map(pipes.quote, cmd[:-1]) + [cmd[-1]]
self._display.vvv('SSH: EXEC {0}'.format(' '.join(display_cmd)), host=self.host)
# Start the given command. If we don't need to pipeline data, we can try
# to use a pseudo-tty (ssh will have been invoked with -tt). If we are
# pipelining data, or can't create a pty, we fall back to using plain
# old pipes.
p = None
if not in_data:
try:
# Make sure stdin is a proper pty to avoid tcgetattr errors
master, slave = pty.openpty()
p = subprocess.Popen(cmd, stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = os.fdopen(master, 'w', 0)
os.close(slave)
except (OSError, IOError):
p = None
if not p:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
# If we are using SSH password authentication, write the password into
# the pipe we opened in _build_command.
if self._play_context.password:
os.close(self.sshpass_pipe[0])
os.write(self.sshpass_pipe[1], "{0}\n".format(self._play_context.password))
os.close(self.sshpass_pipe[1])
## SSH state machine
#
# Now we read and accumulate output from the running process until it
# exits. Depending on the circumstances, we may also need to write an
# escalation password and/or pipelined input to the process.
states = [
'awaiting_prompt', 'awaiting_escalation', 'ready_to_send', 'awaiting_exit'
]
# Are we requesting privilege escalation? Right now, we may be invoked
# to execute sftp/scp with sudoable=True, but we can request escalation
# only when using ssh. Otherwise we can send initial data straightaway.
state = states.index('ready_to_send')
if 'ssh' in cmd:
if self._play_context.prompt:
# We're requesting escalation with a password, so we have to
# wait for a password prompt.
state = states.index('awaiting_prompt')
self._display.debug('Initial state: %s: %s' % (states[state], self._play_context.prompt))
elif self._play_context.become and self._play_context.success_key:
# We're requesting escalation without a password, so we have to
# detect success/failure before sending any initial data.
state = states.index('awaiting_escalation')
self._display.debug('Initial state: %s: %s' % (states[state], self._play_context.success_key))
# We store accumulated stdout and stderr output from the process here,
# but strip any privilege escalation prompt/confirmation lines first.
# Output is accumulated into tmp_*, complete lines are extracted into
# an array, then checked and removed or copied to stdout or stderr. We
# set any flags based on examining the output in self._flags.
stdout = stderr = ''
tmp_stdout = tmp_stderr = ''
self._flags = dict(
become_prompt=False, become_success=False,
become_error=False, become_nopasswd_error=False
)
timeout = self._play_context.timeout
rpipes = [p.stdout, p.stderr]
for fd in rpipes:
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
# If we can send initial data without waiting for anything, we do so
# before we call select.
if states[state] == 'ready_to_send' and in_data:
self._send_initial_data(stdin, in_data)
state += 1
while True:
rfd, wfd, efd = select.select(rpipes, [], [], timeout)
# We pay attention to timeouts only while negotiating a prompt.
if not rfd:
if state <= states.index('awaiting_escalation'):
self._terminate_process(p)
raise AnsibleError('Timeout (%ds) waiting for privilege escalation prompt: %s' % (timeout, stdout))
# Read whatever output is available on stdout and stderr, and stop
# listening to the pipe if it's been closed.
if p.stdout in rfd:
chunk = p.stdout.read()
if chunk == '':
rpipes.remove(p.stdout)
tmp_stdout += chunk
self._display.debug("stdout chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
if p.stderr in rfd:
chunk = p.stderr.read()
if chunk == '':
rpipes.remove(p.stderr)
tmp_stderr += chunk
self._display.debug("stderr chunk (state=%s):\n>>>%s<<<\n" % (state, chunk))
# We examine the output line-by-line until we have negotiated any
# privilege escalation prompt and subsequent success/error message.
# Afterwards, we can accumulate output without looking at it.
if state < states.index('ready_to_send'):
if tmp_stdout:
output, unprocessed = self._examine_output('stdout', states[state], tmp_stdout, sudoable)
stdout += output
tmp_stdout = unprocessed
if tmp_stderr:
output, unprocessed = self._examine_output('stderr', states[state], tmp_stderr, sudoable)
stderr += output
tmp_stderr = unprocessed
else:
stdout += tmp_stdout
stderr += tmp_stderr
tmp_stdout = tmp_stderr = ''
# If we see a privilege escalation prompt, we send the password.
if states[state] == 'awaiting_prompt' and self._flags['become_prompt']:
self._display.debug('Sending become_pass in response to prompt')
stdin.write(self._play_context.become_pass + '\n')
self._flags['become_prompt'] = False
state += 1
# We've requested escalation (with or without a password), now we
# wait for an error message or a successful escalation.
if states[state] == 'awaiting_escalation':
if self._flags['become_success']:
self._display.debug('Escalation succeeded')
self._flags['become_success'] = False
state += 1
elif self._flags['become_error']:
self._display.debug('Escalation failed')
self._terminate_process(p)
self._flags['become_error'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
elif self._flags['become_nopasswd_error']:
self._display.debug('Escalation requires password')
self._terminate_process(p)
self._flags['become_nopasswd_error'] = False
raise AnsibleError('Missing %s password' % self._play_context.become_method)
elif self._flags['become_prompt']:
# This shouldn't happen, because we should see the "Sorry,
# try again" message first.
self._display.debug('Escalation prompt repeated')
self._terminate_process(p)
self._flags['become_prompt'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
# Once we're sure that the privilege escalation prompt, if any, has
# been dealt with, we can send any initial data and start waiting
# for output.
if states[state] == 'ready_to_send':
if in_data:
self._send_initial_data(stdin, in_data)
state += 1
# Now we're awaiting_exit: has the child process exited? If it has,
# and we've read all available output from it, we're done.
if p.poll() is not None:
if not rpipes or not rfd:
break
# When ssh has ControlMaster (+ControlPath/Persist) enabled, the
# first connection goes into the background and we never see EOF
# on stderr. If we see EOF on stdout and the process has exited,
# we're probably done. We call select again with a zero timeout,
# just to make certain we don't miss anything that may have been
# written to stderr between the time we called select() and when
# we learned that the process had finished.
if not p.stdout in rpipes:
timeout = 0
continue
# If the process has not yet exited, but we've already read EOF from
# its stdout and stderr (and thus removed both from rpipes), we can
# just wait for it to exit.
elif not rpipes:
p.wait()
break
# Otherwise there may still be outstanding data to read.
# close stdin after process is terminated and stdout/stderr are read
# completely (see also issue #848)
stdin.close()
if C.HOST_KEY_CHECKING:
if cmd[0] == "sshpass" and p.returncode == 6:
raise AnsibleError('Using a SSH password instead of a key is not possible because Host Key checking is enabled and sshpass does not support this. Please add this host\'s fingerprint to your known_hosts file to manage this host.')
controlpersisterror = 'Bad configuration option: ControlPersist' in stderr or 'unknown configuration option: ControlPersist' in stderr
if p.returncode != 0 and controlpersisterror:
raise AnsibleError('using -c ssh on certain older ssh versions may not support ControlPersist, set ANSIBLE_SSH_ARGS="" (or ssh_args in [ssh_connection] section of the config file) before running again')
if p.returncode == 255 and in_data:
raise AnsibleConnectionFailure('SSH Error: data could not be sent to the remote host. Make sure this host can be reached over ssh')
return (p.returncode, stdout, stderr)
def _exec_command(self, cmd, in_data=None, sudoable=True):
''' run a command on the remote host '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
self._display.vvv("ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr)
# we can only use tty when we are not pipelining the modules. piping
# data into /usr/bin/python inside a tty automatically invokes the
# python interactive-mode but the modules are not compatible with the
# interactive-mode ("unexpected indent" mainly because of empty lines)
if in_data:
cmd = self._build_command('ssh', self.host, cmd)
else:
cmd = self._build_command('ssh', '-tt', self.host, cmd)
(returncode, stdout, stderr) = self._run(cmd, in_data, sudoable=sudoable)
return (returncode, stdout, stderr)
#
# Main public methods
#
def exec_command(self, *args, **kwargs):
"""
Wrapper around _exec_command to retry in the case of an ssh failure
Will retry if:
* an exception is caught
* ssh returns 255
Will not retry if
* remaining_tries is <2
* retries limit reached
"""
remaining_tries = int(C.ANSIBLE_SSH_RETRIES) + 1
cmd_summary = "%s..." % args[0]
for attempt in xrange(remaining_tries):
try:
return_tuple = self._exec_command(*args, **kwargs)
# 0 = success
# 1-254 = remote command return code
# 255 = failure from the ssh command itself
if return_tuple[0] != 255 or attempt == (remaining_tries - 1):
break
else:
raise AnsibleConnectionFailure("Failed to connect to the host via ssh.")
except (AnsibleConnectionFailure, Exception) as e:
if attempt == remaining_tries - 1:
raise e
else:
pause = 2 ** attempt - 1
if pause > 30:
pause = 30
if isinstance(e, AnsibleConnectionFailure):
msg = "ssh_retry: attempt: %d, ssh return code is 255. cmd (%s), pausing for %d seconds" % (attempt, cmd_summary, pause)
else:
msg = "ssh_retry: attempt: %d, caught exception(%s) from cmd (%s), pausing for %d seconds" % (attempt, e, cmd_summary, pause)
self._display.vv(msg)
time.sleep(pause)
continue
return return_tuple
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
super(Connection, self).put_file(in_path, out_path)
self._display.vvv("PUT {0} TO {1}".format(in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(in_path))
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', in_path, '{0}:{1}'.format(host, pipes.quote(out_path)))
in_data = None
else:
cmd = self._build_command('sftp', host)
in_data = "put {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(out_path, stdout, stderr))
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self.host)
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
if C.DEFAULT_SCP_IF_SSH:
cmd = self._build_command('scp', '{0}:{1}'.format(host, pipes.quote(in_path)), out_path)
in_data = None
else:
cmd = self._build_command('sftp', host)
in_data = "get {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
raise AnsibleError("failed to transfer file from {0}:\n{1}\n{2}".format(in_path, stdout, stderr))
def close(self):
# If we have a persistent ssh connection (ControlPersist), we can ask it
# to stop listening. Otherwise, there's nothing to do here.
# TODO: reenable once winrm issues are fixed
# temporarily disabled as we are forced to currently close connections after every task because of winrm
# if self._connected and self._persistent:
# cmd = self._build_command('ssh', '-O', 'stop', self.host)
# p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# stdout, stderr = p.communicate()
self._connected = False
| gpl-3.0 |
stone5495/NewsBlur | vendor/yaml/nodes.py | 985 | 1440 |
class Node(object):
def __init__(self, tag, value, start_mark, end_mark):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
value = self.value
#if isinstance(value, list):
# if len(value) == 0:
# value = '<empty>'
# elif len(value) == 1:
# value = '<1 item>'
# else:
# value = '<%d items>' % len(value)
#else:
# if len(value) > 75:
# value = repr(value[:70]+u' ... ')
# else:
# value = repr(value)
value = repr(value)
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
class ScalarNode(Node):
id = 'scalar'
def __init__(self, tag, value,
start_mark=None, end_mark=None, style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class CollectionNode(Node):
def __init__(self, tag, value,
start_mark=None, end_mark=None, flow_style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class SequenceNode(CollectionNode):
id = 'sequence'
class MappingNode(CollectionNode):
id = 'mapping'
| mit |
NullSoldier/django | tests/admin_custom_urls/tests.py | 276 | 6381 | from __future__ import unicode_literals
import datetime
from django.contrib.admin.utils import quote
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.template.response import TemplateResponse
from django.test import TestCase, override_settings
from .models import Action, Car, Person
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_custom_urls.urls',)
class AdminCustomUrlsTest(TestCase):
"""
Remember that:
* The Action model has a CharField PK.
* The ModelAdmin for Action customizes the add_view URL, it's
'<app name>/<model name>/!add/'
"""
@classmethod
def setUpTestData(cls):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
Action.objects.create(name='delete', description='Remove things.')
Action.objects.create(name='rename', description='Gives things other names.')
Action.objects.create(name='add', description='Add things.')
Action.objects.create(name='path/to/file/', description="An action with '/' in its name.")
Action.objects.create(
name='path/to/html/document.html',
description='An action with a name similar to a HTML doc path.'
)
Action.objects.create(
name='javascript:alert(\'Hello world\');">Click here</a>',
description='An action with a name suspected of being a XSS attempt'
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_basic_add_GET(self):
"""
Ensure GET on the add_view works.
"""
add_url = reverse('admin_custom_urls:admin_custom_urls_action_add')
self.assertTrue(add_url.endswith('/!add/'))
response = self.client.get(add_url)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
"""
Ensure GET on the add_view plus specifying a field value in the query
string works.
"""
response = self.client.get(reverse('admin_custom_urls:admin_custom_urls_action_add'), {'name': 'My Action'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'value="My Action"')
def test_basic_add_POST(self):
"""
Ensure POST on add_view works.
"""
post_data = {
'_popup': '1',
"name": 'Action added through a popup',
"description": "Description of added action",
}
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_action_add'), post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddRelatedObjectPopup')
self.assertContains(response, 'Action added through a popup')
def test_admin_URLs_no_clash(self):
"""
Test that some admin URLs work correctly.
"""
# Should get the change_view for model instance with PK 'add', not show
# the add_view
url = reverse('admin_custom_urls:%s_action_change' % Action._meta.app_label,
args=(quote('add'),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
# Should correctly get the change_view for the model instance with the
# funny-looking PK (the one with a 'path/to/html/document.html' value)
url = reverse('admin_custom_urls:%s_action_change' % Action._meta.app_label,
args=(quote("path/to/html/document.html"),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
self.assertContains(response, 'value="path/to/html/document.html"')
def test_post_save_add_redirect(self):
"""
Ensures that ModelAdmin.response_post_save_add() controls the
redirection after the 'Save' button has been pressed when adding a
new object.
Refs 8001, 18310, 19505.
"""
post_data = {'name': 'John Doe'}
self.assertEqual(Person.objects.count(), 0)
response = self.client.post(
reverse('admin_custom_urls:admin_custom_urls_person_add'), post_data)
persons = Person.objects.all()
self.assertEqual(len(persons), 1)
self.assertRedirects(
response, reverse('admin_custom_urls:admin_custom_urls_person_history', args=[persons[0].pk]))
def test_post_save_change_redirect(self):
"""
Ensures that ModelAdmin.response_post_save_change() controls the
redirection after the 'Save' button has been pressed when editing an
existing object.
Refs 8001, 18310, 19505.
"""
Person.objects.create(name='John Doe')
self.assertEqual(Person.objects.count(), 1)
person = Person.objects.all()[0]
post_data = {'name': 'Jack Doe'}
response = self.client.post(
reverse('admin_custom_urls:admin_custom_urls_person_change', args=[person.pk]), post_data)
self.assertRedirects(
response, reverse('admin_custom_urls:admin_custom_urls_person_delete', args=[person.pk]))
def test_post_url_continue(self):
"""
Ensures that the ModelAdmin.response_add()'s parameter `post_url_continue`
controls the redirection after an object has been created.
"""
post_data = {'name': 'SuperFast', '_continue': '1'}
self.assertEqual(Car.objects.count(), 0)
response = self.client.post(
reverse('admin_custom_urls:admin_custom_urls_car_add'), post_data)
cars = Car.objects.all()
self.assertEqual(len(cars), 1)
self.assertRedirects(
response, reverse('admin_custom_urls:admin_custom_urls_car_history', args=[cars[0].pk]))
| bsd-3-clause |
BertrandBordage/django-filer | filer/admin/forms.py | 33 | 3604 | from django import forms
from django.db import models
from django.contrib.admin import widgets
from filer.utils.files import get_valid_filename
from django.utils.translation import ugettext as _
from django.core.exceptions import ValidationError
from django.conf import settings
if 'cmsplugin_filer_image' in settings.INSTALLED_APPS:
from cmsplugin_filer_image.models import ThumbnailOption
class AsPWithHelpMixin(object):
def as_p_with_help(self):
"Returns this form rendered as HTML <p>s with help text formated for admin."
return self._html_output(
normal_row=u'<p%(html_class_attr)s>%(label)s %(field)s</p>%(help_text)s',
error_row=u'%s',
row_ender='</p>',
help_text_html=u'<p class="help">%s</p>',
errors_on_separate_row=True)
class CopyFilesAndFoldersForm(forms.Form, AsPWithHelpMixin):
suffix = forms.CharField(required=False, help_text=_("Suffix which will be appended to filenames of copied files."))
# TODO: We have to find a way to overwrite files with different storage backends first.
#overwrite_files = forms.BooleanField(required=False, help_text=_("Overwrite a file if there already exists a file with the same filename?"))
def clean_suffix(self):
valid = get_valid_filename(self.cleaned_data['suffix'])
if valid != self.cleaned_data['suffix']:
raise forms.ValidationError(_('Suffix should be a valid, simple and lowercase filename part, like "%(valid)s".') % {'valid': valid})
return self.cleaned_data['suffix']
class RenameFilesForm(forms.Form, AsPWithHelpMixin):
rename_format = forms.CharField(required=True)
def clean_rename_format(self):
try:
self.cleaned_data['rename_format'] % {
'original_filename': 'filename',
'original_basename': 'basename',
'original_extension': 'ext',
'current_filename': 'filename',
'current_basename': 'basename',
'current_extension': 'ext',
'current_folder': 'folder',
'counter': 42,
'global_counter': 42,
}
except KeyError, e:
raise forms.ValidationError(_('Unknown rename format value key "%(key)s".') % {'key': e.args[0]})
except Exception, e:
raise forms.ValidationError(_('Invalid rename format: %(error)s.') % {'error': e})
return self.cleaned_data['rename_format']
class ResizeImagesForm(forms.Form, AsPWithHelpMixin):
if 'cmsplugin_filer_image' in settings.INSTALLED_APPS:
thumbnail_option = models.ForeignKey(ThumbnailOption, null=True, blank=True, verbose_name=_("thumbnail option")).formfield()
width = models.PositiveIntegerField(_("width"), null=True, blank=True).formfield(widget=widgets.AdminIntegerFieldWidget)
height = models.PositiveIntegerField(_("height"), null=True, blank=True).formfield(widget=widgets.AdminIntegerFieldWidget)
crop = models.BooleanField(_("crop"), default=True).formfield()
upscale = models.BooleanField(_("upscale"), default=True).formfield()
def clean(self):
if not (self.cleaned_data.get('thumbnail_option') or ((self.cleaned_data.get('width') or 0) + (self.cleaned_data.get('height') or 0))):
if 'cmsplugin_filer_image' in settings.INSTALLED_APPS:
raise ValidationError(_('Thumbnail option or resize parameters must be choosen.'))
else:
raise ValidationError(_('Resize parameters must be choosen.'))
return self.cleaned_data
| bsd-3-clause |
nezihyigitbasi/presto | presto-benchto-benchmarks/generate_schemas/generate-tpcds.py | 38 | 1109 | #!/usr/bin/env python
schemas = [
# (new_schema, source_schema)
('tpcds_10gb_orc', 'tpcds.sf10'),
('tpcds_100gb_orc', 'tpcds.sf100'),
('tpcds_1tb_orc', 'tpcds.sf1000'),
]
tables = [
'call_center',
'catalog_page',
'catalog_returns',
'catalog_sales',
'customer',
'customer_address',
'customer_demographics',
'date_dim',
'household_demographics',
'income_band',
'inventory',
'item',
'promotion',
'reason',
'ship_mode',
'store',
'store_returns',
'store_sales',
'time_dim',
'warehouse',
'web_page',
'web_returns',
'web_sales',
'web_site',
]
for (new_schema, source_schema) in schemas:
if new_schema.endswith('_orc'):
format = 'ORC'
elif new_schema.endswith('_text'):
format = 'TEXTFILE'
else:
raise ValueError(new_schema)
print 'CREATE SCHEMA hive.%s;' % (new_schema,)
for table in tables:
print 'CREATE TABLE "hive"."%s"."%s" WITH (format = \'%s\') AS SELECT * FROM %s."%s";' % \
(new_schema, table, format, source_schema, table)
| apache-2.0 |
TansyArron/pants | src/python/pants/backend/android/tasks/aapt_gen.py | 4 | 8322 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import os
import subprocess
from pants.backend.android.targets.android_library import AndroidLibrary
from pants.backend.android.targets.android_resources import AndroidResources
from pants.backend.android.tasks.aapt_task import AaptTask
from pants.backend.jvm.targets.jar_dependency import JarDependency
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.base.address import SyntheticAddress
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnitLabel
from pants.util.dirutil import safe_mkdir
logger = logging.getLogger(__name__)
class AaptGen(AaptTask):
"""
Handle the processing of resources for Android targets with the Android Asset Packaging Tool
(aapt). The aapt tool supports 6 major commands: [dump, list, add, remove, crunch, package]
For right now, pants supports 'package'.
Commands and flags for aapt can be seen here:
https://android.googlesource.com/platform/frameworks/base/+/master/tools/aapt/Command.cpp
The resources are processed against a set of APIs found in the android.jar that corresponds to
the target's target_sdk. AndroidBinary files must declare a target_sdk in their manifest.
AndroidLibrary targets are processed with the target_sdk of the dependee AndroidBinary.
An AndroidLibrary will need to be processed once for every target_sdk that it supports.
Each AndroidLibrary is processed individually. AndroidBinary targets are processed along with
all of the AndroidLibrary targets in its transitive closure. The output of an AaptGen invocation
is an R.java file that allows programmatic access to resources, one each for all AndroidBinary
and AndroidLibrary targets.
"""
@classmethod
def _relative_genfile(cls, target):
"""Name of the file produced by aapt."""
return os.path.join(cls.package_path(target.manifest.package_name), 'R.java')
@classmethod
def prepare(cls, options, round_manager):
super(AaptGen, cls).prepare(options, round_manager)
round_manager.require_data('unpacked_libraries')
def __init__(self, *args, **kwargs):
super(AaptGen, self).__init__(*args, **kwargs)
self._jar_library_by_sdk = {}
self._created_library_targets = {}
def create_sdk_jar_deps(self, binaries):
"""Create a JarLibrary target for every sdk in play.
:param list binaries: A list of AndroidBinary targets.
"""
# Prepare exactly N android jar targets where N is the number of SDKs in-play.
for binary in binaries:
sdk = binary.target_sdk
if sdk not in self._jar_library_by_sdk:
jar_url = 'file://{0}'.format(self.android_jar(binary))
jar = JarDependency(org='com.google', name='android', rev=sdk, url=jar_url)
address = SyntheticAddress(self.workdir, 'android-{0}.jar'.format(sdk))
self._jar_library_by_sdk[sdk] = self.context.add_new_target(address, JarLibrary, jars=[jar])
def _render_args(self, binary, manifest, resource_dirs):
"""Compute the args that will be passed to the aapt tool.
:param AndroidBinary binary: The target that depends on the processed resources.
:param AndroidManifest manifest: Manifest of the target that owns the resources.
:param list resource_dirs: List of resource_dirs to include in this invocation of the aapt tool.
"""
# Glossary of used aapt flags.
# : 'package' is the main aapt operation (see class docstring for more info).
# : '-m' is to "make" a package directory under location '-J'.
# : '-J' Points to the output directory.
# : '-M' is the AndroidManifest.xml of the project.
# : '--auto-add-overlay' automatically add resources that are only in overlays.
# : '-S' points to each dir in resource_dirs, aapt 'scans' them in order while
# collecting resources (resource priority is left -> right).
# : '-I' packages to add to base 'include' set, here it is the android.jar of the target sdk.
# : '--ignore-assets' the aapt tool will disregard any files matching that pattern.
args = [self.aapt_tool(binary)]
args.extend(['package', '-m', '-J', self.aapt_out(binary)])
args.extend(['-M', manifest.path])
args.append('--auto-add-overlay')
for resource_dir in resource_dirs:
args.extend(['-S', resource_dir])
args.extend(['-I', self.android_jar(binary)])
args.extend(['--ignore-assets', self.ignored_assets])
logger.debug('Executing: {0}'.format(' '.join(args)))
return args
def execute(self):
# The number of R.java files produced from each library is == |sdks in play for its dependees|.
# The number of R.java files produced for each android_binary == |android_library deps| + 1
binaries = self.context.targets(self.is_android_binary)
self.create_sdk_jar_deps(binaries)
for binary in binaries:
# TODO(mateo) add invalidation framework. Adding it here doesn't work right now because the
# framework can't differentiate between one library that has to be compiled by multiple sdks.
gentargets = [binary]
def gather_gentargets(tgt):
"""Gather all AndroidLibrary targets that have a manifest."""
if isinstance(tgt, AndroidLibrary) and tgt.manifest:
gentargets.append(tgt)
binary.walk(gather_gentargets)
for gen in gentargets:
aapt_output = self._relative_genfile(gen)
aapt_file = os.path.join(self.aapt_out(binary), aapt_output)
resource_deps = self.context.build_graph.transitive_subgraph_of_addresses([gen.address])
resource_dirs = [t.resource_dir for t in resource_deps if isinstance(t, AndroidResources)]
if resource_dirs:
if aapt_file not in self._created_library_targets:
# Priority for resources is left->right, so reverse collection order (DFS preorder).
args = self._render_args(binary, gen.manifest, reversed(resource_dirs))
with self.context.new_workunit(name='aaptgen', labels=[WorkUnitLabel.MULTITOOL]) as workunit:
returncode = subprocess.call(args,
stdout=workunit.output('stdout'),
stderr=workunit.output('stderr'))
if returncode:
raise TaskError('The AaptGen process exited non-zero: {}'.format(returncode))
new_target = self.create_target(binary, gen)
self._created_library_targets[aapt_file] = new_target
gen.inject_dependency(self._created_library_targets[aapt_file].address)
def create_target(self, binary, gentarget):
"""Create a JavaLibrary target for the R.java files created by the aapt tool.
:param AndroidBinary binary: AndroidBinary target whose target_sdk is used.
:param AndroidTarget gentarget: AndroidBinary or Library that owns the processed resources.
:returns new_target: Synthetic target for the R.java output of the aapt tool.
:rtype::class:`pants.backend.jvm.targets.java_library.JavaLibrary`
"""
spec_path = os.path.join(os.path.relpath(self.aapt_out(binary), get_buildroot()))
address = SyntheticAddress(spec_path=spec_path, target_name=gentarget.id)
deps = [self._jar_library_by_sdk[binary.target_sdk]]
new_target = self.context.add_new_target(address,
JavaLibrary,
derived_from=gentarget,
sources=[self._relative_genfile(gentarget)],
dependencies=deps)
return new_target
def aapt_out(self, binary):
"""Location for the output of an aapt invocation.
:param AndroidBinary binary: AndroidBinary target that depends upon the aapt output.
:returns outdir: full path of output directory
:rtype string
"""
outdir = os.path.join(self.workdir, binary.target_sdk)
safe_mkdir(outdir)
return outdir
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.