text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import sqlalchemy.types as types
from nailgun.openstack.common import jsonutils
class JSON(types.TypeDecorator):
impl = types.Text
def process_bind_param(self, value, dialect):
if value is not None:
value = jsonutils.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = jsonutils.loads(value)
return value
class LowercaseString(types.TypeDecorator):
impl = types.String
def process_bind_param(self, value, dialect):
if value is None:
return None
return value.lower()
|
{
"content_hash": "119142b3e9f38310cba314963d96041e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 51,
"avg_line_length": 22.571428571428573,
"alnum_prop": 0.6487341772151899,
"repo_name": "Axam/nsx-web",
"id": "38b4d6f653c97defe550599eaa2e921f46495ba0",
"size": "1267",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nailgun/nailgun/db/sqlalchemy/models/fields.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "99402"
},
{
"name": "JavaScript",
"bytes": "553275"
},
{
"name": "Python",
"bytes": "2623980"
},
{
"name": "Ruby",
"bytes": "33345"
},
{
"name": "Shell",
"bytes": "29681"
}
],
"symlink_target": ""
}
|
import collections
from oslo_log import log as logging
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine.resources import signal_responder
LOG = logging.getLogger(__name__)
class BaseWaitConditionHandle(signal_responder.SignalResponder):
"""Base WaitConditionHandle resource.
The main point of this class is to :
- have no dependencies (so the instance can reference it)
- create credentials to allow for signalling from the instance.
- handle signals from the instance, validate and store result
"""
properties_schema = {}
WAIT_STATUSES = (
STATUS_FAILURE,
STATUS_SUCCESS,
) = (
'FAILURE',
'SUCCESS',
)
def handle_create(self):
super(BaseWaitConditionHandle, self).handle_create()
self.resource_id_set(self._get_user_id())
def _status_ok(self, status):
return status in self.WAIT_STATUSES
def _metadata_format_ok(self, metadata):
if not isinstance(metadata, collections.Mapping):
return False
if set(metadata) != set(self.METADATA_KEYS):
return False
return self._status_ok(metadata[self.STATUS])
def normalise_signal_data(self, signal_data, latest_metadata):
return signal_data
def handle_signal(self, details=None):
write_attempts = []
def merge_signal_metadata(signal_data, latest_rsrc_metadata):
signal_data = self.normalise_signal_data(signal_data,
latest_rsrc_metadata)
if not self._metadata_format_ok(signal_data):
LOG.info("Metadata failed validation for %s", self.name)
raise ValueError(_("Metadata format invalid"))
new_entry = signal_data.copy()
unique_id = six.text_type(new_entry.pop(self.UNIQUE_ID))
new_rsrc_metadata = latest_rsrc_metadata.copy()
if unique_id in new_rsrc_metadata:
LOG.info("Overwriting Metadata item for id %s!",
unique_id)
new_rsrc_metadata.update({unique_id: new_entry})
write_attempts.append(signal_data)
return new_rsrc_metadata
self.metadata_set(details, merge_metadata=merge_signal_metadata)
data_written = write_attempts[-1]
signal_reason = ('status:%s reason:%s' %
(data_written[self.STATUS],
data_written[self.REASON]))
return signal_reason
def get_status(self):
"""Return a list of the Status values for the handle signals."""
return [v[self.STATUS]
for v in six.itervalues(self.metadata_get(refresh=True))]
def get_status_reason(self, status):
"""Return a list of reasons associated with a particular status."""
return [v[self.REASON]
for v in six.itervalues(self.metadata_get(refresh=True))
if v[self.STATUS] == status]
class WaitConditionFailure(exception.Error):
def __init__(self, wait_condition, handle):
reasons = handle.get_status_reason(handle.STATUS_FAILURE)
super(WaitConditionFailure, self).__init__(';'.join(reasons))
class WaitConditionTimeout(exception.Error):
def __init__(self, wait_condition, handle):
reasons = handle.get_status_reason(handle.STATUS_SUCCESS)
vals = {'len': len(reasons),
'count': wait_condition.properties[wait_condition.COUNT]}
if reasons:
vals['reasons'] = ';'.join(reasons)
message = (_('%(len)d of %(count)d received - %(reasons)s') % vals)
else:
message = (_('%(len)d of %(count)d received') % vals)
super(WaitConditionTimeout, self).__init__(message)
|
{
"content_hash": "8b397c582dccdc9f9354174ead8c5f16",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 79,
"avg_line_length": 35.64485981308411,
"alnum_prop": 0.6132669113791295,
"repo_name": "noironetworks/heat",
"id": "87d797237bc188453e702a2cbe16f75732dd070e",
"size": "4389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/engine/resources/wait_condition.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8804896"
},
{
"name": "Shell",
"bytes": "64533"
}
],
"symlink_target": ""
}
|
import logging
from uuid import uuid4
from flask import render_template
from tempfile import NamedTemporaryFile
from pantomime.types import HTML
from aleph import settings
from aleph.core import archive
from aleph.index.entities import entities_by_ids
log = logging.getLogger(__name__)
FIELDS = ["id", "schema", "properties"]
def publish_diagram(entityset):
embed = render_diagram(entityset)
with NamedTemporaryFile("w") as fh:
fh.write(embed)
fh.flush()
publish_id = uuid4().hex
embed_path = f"embeds/{entityset.id}/{publish_id}.html"
url = archive.publish_file(fh.name, embed_path, mime_type=HTML)
return {"embed": embed, "url": url}
def render_diagram(entityset):
"""Generate an HTML snippet from a diagram object."""
entity_ids = entityset.entities
entities = []
for entity in entities_by_ids(entity_ids, cached=True):
for field in list(entity.keys()):
if field not in FIELDS:
entity.pop(field)
entities.append(entity)
# TODO: add viewport
return render_template(
"diagram.html",
data={
"entities": entities,
"layout": entityset.layout,
"viewport": {"center": {"x": 0, "y": 0}},
},
entityset=entityset,
settings=settings,
)
|
{
"content_hash": "dc4b5ad81fb2252ab8e3c52cd17e00c4",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 71,
"avg_line_length": 29.02173913043478,
"alnum_prop": 0.6322097378277154,
"repo_name": "pudo/aleph",
"id": "e65c38cdf7b9e5982bcfb04fe3eaf9d4ad76cee5",
"size": "1335",
"binary": false,
"copies": "2",
"ref": "refs/heads/dependabot/pip/develop/jsonschema-4.1.2",
"path": "aleph/logic/diagrams.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15949"
},
{
"name": "HTML",
"bytes": "170476"
},
{
"name": "JavaScript",
"bytes": "111287"
},
{
"name": "Makefile",
"bytes": "1319"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "492593"
}
],
"symlink_target": ""
}
|
"""
Author : Mingbin Xu (mingbin.xu@gmail.com)
Filename : kbp-system.py
Last Update : Jul 26, 2016
Description : N/A
Website : https://wiki.eecs.yorku.ca/lab/MLL/
Copyright (c) 2016 iNCML (author: Mingbin Xu)
License: MIT License (see ../LICENSE)
"""
import argparse, logging, time
from itertools import product, chain
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument( 'word_embedding', type = str,
help = 'word_embedding.{-case-insensitive, -case-sensitive}.word2vec are assumed' )
parser.add_argument( 'data_path', type = str,
help = 'path to ed-eng-{train,eval} of KBP2015' )
# training-related arguments
parser.add_argument( '--n_char_embedding', type = int, default = 32,
help = 'char embedding dimension' )
parser.add_argument( '--n_ner_embedding', type = int, default = 32,
help = 'ner embedding dimension' )
parser.add_argument( '--n_char', type = int, default = 128,
help = 'character set size. since ascii is used; 128 is assumed' )
parser.add_argument( '--layer_size', type = str, default = '512,512,512',
help = 'size of fully connected layers after projection' )
parser.add_argument( '--n_batch_size', type = int, default = 512,
help = 'mini batch size; the last one may be smaller' )
parser.add_argument( '--learning_rate', type = float, default = 0.1024,
help = 'global initial learning rate' )
parser.add_argument( '--momentum', type = float, default = 0.9,
help = 'momentum value when MomentumOptimizer is used' )
parser.add_argument( '--max_iter', type = int, default = 64,
help = 'maximum number of iterations' )
parser.add_argument( '--feature_choice', type = int, default = 767,
help = 'the features used are pick with a bit mask. They are ' +
'1) case-insensitive bfofe with candidate word(s), ' +
'2) case-insensitive bfofe without candidate word(s), ' +
'3) case-insensitive bag-of-words, ' +
'4) case-sensitive bfofe with candidate word(s), ' +
'5) case-sensitive bfofe without candidate word(s), ' +
'6) case-sensitive bag-of-words, ' +
'7) char-level bfofe of candidate word(s), ' +
'8) char-level bfofe of candidate initial, ' +
'9) gazetteer exact match, ' +
'10) character-convolution'
'e.g. default choice is 0b000111111, feature 1 to 6 are used' )
parser.add_argument( '--overlap_rate', type = float, default = 0.08,
help = 'what percentage of overlap examples is used during training' )
parser.add_argument( '--disjoint_rate', type = float, default = 0.016,
help = 'what percentage of disjoint example is used during training' )
parser.add_argument( '--dropout', action = 'store_true', default = False,
help = 'whether to use dropout or not' )
parser.add_argument( '--char_alpha', type = float, default = 0.8,
help = 'char-level forgetting factor' )
parser.add_argument( '--word_alpha', type = float, default = 0.5,
help = 'word-level forgetting factor' )
parser.add_argument( '--share_word_embedding', action = 'store_true', default = False,
help = 'whether or not bow and context share a same word embedding' )
parser.add_argument( '--n_window', type = int, default = 7,
help = 'maximum length of NER candidate' )
parser.add_argument( '--strictly_one_hot', action = 'store_true', default = False,
help = 'when gazetteer is used, True if 7-bit match or False 5-bit match' )
parser.add_argument( '--hope_out', type = int, default = 0,
help = 'dimension of z in the HOPE paper; 0 means not used' )
parser.add_argument( '--n_label_type', type = int, default = 10,
help = 'By default, PER, LOC, ORG and MISC are assumed' )
parser.add_argument( '--kernel_height', type = str, default = '2,3,4,5,6,7,8,9' )
parser.add_argument( '--kernel_depth', type = str, default = ','.join( ['16'] * 8 ) )
parser.add_argument( '--enable_distant_supervision', action = 'store_true', default = False )
parser.add_argument( '--initialize_method', type = str, default = 'uniform',
choices = [ 'uniform', 'gaussian' ] )
parser.add_argument( '--model', type = str, default = 'kbp2016' )
parser.add_argument( '--iflytek', action = 'store_true', default = False )
parser.add_argument( '--language', type = str, choices = ['eng', 'cmn', 'spa'], default = 'eng' )
parser.add_argument( '--average', action = 'store_true', default = False,
help = 'word embedding is averaged on number of characters ' + \
'when word level feature is used in Chinese' )
# experimental
parser.add_argument( '--n_pattern', type = int, default = 0,
help = 'number of patterns in sparse-fofe' )
parser.add_argument( '--is_2nd_pass', action = 'store_true', default = False,
help = 'run 2nd pass training when true' )
parser.add_argument( '--split', type = str, default = '' )
########################################################################
# set a logging file at DEBUG level, TODO: windows doesn't allow ":" appear in a file name
logging.basicConfig( format = '%(asctime)s : %(levelname)s : %(message)s',
level= logging.DEBUG,
filename = ('log/kbp ' + time.ctime() + '.log').replace(' ', '-'),
filemode = 'w' )
# direct the INFO-level logging to the screen
console = logging.StreamHandler()
console.setLevel( logging.INFO )
console.setFormatter( logging.Formatter( '%(asctime)s : %(levelname)s : %(message)s' ) )
logging.getLogger().addHandler( console )
logger = logging.getLogger()
########################################################################
args = parser.parse_args()
logger.info( str(args) + '\n' )
################################################################################
if args.is_2nd_pass:
logger.info( 'user-input feature-choice was %d' % args.feature_choice )
args.feature_choice &= 2038
logger.info( 'feature-choice now is %d' % args.feature_choice )
########################################################################
from fofe_mention_net import *
config = mention_config( args )
########################################################################
mention_net = fofe_mention_net( config )
mention_net.tofile( args.model )
########################################################################
# there are 2 sets of vocabulary, case-insensitive and case sensitive
nt = config.n_label_type if config.is_2nd_pass else 0
if config.language != 'cmn':
numericizer1 = vocabulary(
config.word_embedding + '-case-insensitive.wordlist',
config.char_alpha,
False,
n_label_type = nt
)
numericizer2 = vocabulary(
config.word_embedding + '-case-sensitive.wordlist',
config.char_alpha,
True,
n_label_type = nt
)
else:
numericizer1 = chinese_word_vocab(
config.word_embedding + '-char.wordlist',
n_label_type = nt
)
numericizer2 = chinese_word_vocab(
config.word_embedding + ('-avg.wordlist' if config.average else '-word.wordlist'),
n_label_type = nt
)
# it's assumed that there are exactly 2 files in 'data_path'
# namely 'ed-eng-train' and 'ed-eng-eval'
if args.feature_choice & 256 > 0:
kbp_gazetteer = gazetteer(
os.path.join( config.data_path, '%s-gaz' % config.language ),
mode = 'KBP'
)
logger.info( 'gazetteer loaded' )
else:
kbp_gazetteer = [ set() for _ in xrange( args.n_label_type ) ]
logger.info( 'gazetteer not used' )
# load all KBP training data and 90% KBP test data
source = chain(
imap(
lambda x: x[1],
ifilter(
lambda x : x[0] % 10 < 9,
enumerate(
imap(
lambda x: x[:4],
LoadED( os.path.join(
config.data_path,
'%s-eval-parsed' % config.language
) )
)
)
)
),
imap(
lambda x: x[:4],
LoadED( os.path.join(
config.data_path,
'%s-train-parsed' % config.language
) )
)
)
# load 90% iflytek data
if args.iflytek:
source = chain( source,
imap( lambda x: x[1],
ifilter( lambda x : x[0] % 10 < 9,
enumerate( imap( lambda x: x[:4],
LoadED( 'iflytek-clean-%s' % config.language ) ) ) ) ) )
# instantiate a batch constructor
human = batch_constructor( source,
numericizer1, numericizer2, gazetteer = kbp_gazetteer,
alpha = config.word_alpha, window = config.n_window,
n_label_type = config.n_label_type,
language = config.language,
is2ndPass = args.is_2nd_pass )
logger.info( 'human: ' + str(human) )
# load 10% KBP test data
source = imap( lambda x: x[1],
ifilter( lambda x : x[0] % 10 >= 9,
enumerate( imap( lambda x: x[:4],
LoadED( config.data_path + '/%s-eval-parsed' % config.language ) ) ) ) )
# load 10% iflytek data
if args.iflytek:
source = chain( source,
imap( lambda x: x[1],
ifilter( lambda x : 90 <= x[0] % 100 < 95,
enumerate( imap( lambda x: x[:4],
LoadED( 'iflytek-clean-%s' % config.language ) ) ) ) ) )
# istantiate a batch constructor
valid = batch_constructor( source,
numericizer1, numericizer2, gazetteer = kbp_gazetteer,
alpha = config.word_alpha, window = config.n_window,
n_label_type = config.n_label_type,
language = config.language,
is2ndPass = args.is_2nd_pass )
logger.info( 'valid: ' + str(valid) )
# internal data set is chinese and english only
if config.language != 'spa':
source = imap( lambda x: x[1],
ifilter( lambda x: x[0] % 100 >= 95,
enumerate( imap( lambda x: x[:4],
LoadED( 'iflytek-clean-%s' % config.language ) ) ) ) )
test = batch_constructor( source,
numericizer1, numericizer2, gazetteer = kbp_gazetteer,
alpha = config.word_alpha, window = config.n_window,
n_label_type = config.n_label_type,
language = config.language,
is2ndPass = args.is_2nd_pass )
else:
test = valid
logger.info( 'test: ' + str(test) )
logger.info( 'data set loaded' )
################### let's compute ####################
prev_cost, decay_started = 2054, False
infinite_human = human.infinite_mini_batch_multi_thread(
config.n_batch_size,
True,
config.overlap_rate,
config.disjoint_rate,
config.feature_choice,
True )
for n_epoch in xrange( config.max_iter ):
if not os.path.exists( 'kbp-result' ):
os.makedirs( 'kbp-result' )
valid_predicted_file = 'kbp-result/kbp-valid-%s.predicted' % args.model
test_predicted_file = 'kbp-result/kbp-test-%s.predicted' % args.model
valid_predicted = open( valid_predicted_file, 'wb' )
test_predicted = open( test_predicted_file, 'wb' )
#############################################
########## go through training set ##########
#############################################
if config.enable_distant_supervision:
X, Y = n_epoch / 16, n_epoch % (16 if not args.iflytek else 4)
dsp = distant_supervision_parser(
'distant-supervision/data-chunk/sentence-%02d' % X,
'distant-supervision/data-chunk/labels-%02d' % X,
Y, None, 64 if not args.iflytek else 16 )
train = batch_constructor( dsp, numericizer1, numericizer2,
gazetteer = kbp_gazetteer,
alpha = config.word_alpha,
window = config.n_window,
n_label_type = config.n_label_type,
language = config.language )
logger.info( 'train: ' + str(train) )
else:
train = human
# phar is used to observe training progress
logger.info( 'epoch %2d, learning-rate: %f' % \
(n_epoch + 1, mention_net.config.learning_rate) )
pbar = tqdm( total = len(train.positive) +
int(len(train.overlap) * config.overlap_rate) +
int(len(train.disjoint) * config.disjoint_rate) )
cost, cnt = 0, 0
for x in ifilter( lambda x : x[-1].shape[0] == config.n_batch_size,
train.mini_batch_multi_thread( config.n_batch_size,
True,
config.overlap_rate,
config.disjoint_rate,
config.feature_choice ) ):
if config.enable_distant_supervision:
x = [ x, infinite_human.next() ]
if choice( [ True, False ] ):
x.append( infinite_human.next() )
else:
x = [ x ]
for example in x:
c = mention_net.train( example )
cost += c * example[-1].shape[0]
cnt += example[-1].shape[0]
pbar.update( example[-1].shape[0] )
pbar.close()
train_cost = cost / cnt
logger.info( 'training set iterated, %f' % train_cost )
########################################################################
## if config.enable_distant_supervision or \
if n_epoch + 1 == config.max_iter or \
(n_epoch + 1) % max(1, min(16, config.max_iter / 16)) == 0:
###############################################
########## go through validation set ##########
###############################################
cost, cnt = 0, 0
for example in valid.mini_batch_multi_thread(
config.n_batch_size * 2 if config.feature_choice & (1 << 9 ) > 0 else 2048,
shuffle_needed = False,
overlap_rate = 1,
disjoint_rate = 1,
feature_choice = config.feature_choice
):
c, pi, pv = mention_net.eval( example )
cost += c * example[-1].shape[0]
cnt += example[-1].shape[0]
for expected, estimate, probability in zip( example[-1], pi, pv ):
print >> valid_predicted, '%d %d %s' % \
(expected, estimate, ' '.join( [('%f' % x) for x in probability.tolist()] ))
valid_cost = cost / cnt
valid_predicted.close()
#########################################
########## go through test set ##########
#########################################
cost, cnt = 0, 0
for example in test.mini_batch_multi_thread(
config.n_batch_size * 2 if config.feature_choice & (1 << 9 ) > 0 else 2048,
shuffle_needed = False,
overlap_rate = 1,
disjoint_rate = 1,
feature_choice = config.feature_choice
):
c, pi, pv = mention_net.eval( example )
cost += c * example[-1].shape[0]
cnt += example[-1].shape[0]
for expected, estimate, probability in zip( example[-1], pi, pv ):
print >> test_predicted, '%d %d %s' % \
(expected, estimate, ' '.join( [('%f' % x) for x in probability.tolist()] ))
test_cost = cost / cnt
test_predicted.close()
###################################################################################
########## exhaustively iterate 3 decodding algrithms with 0.x cut-off ############
###################################################################################
logger.info( 'cost: %f (train), %f (valid), %f (test)', train_cost, valid_cost, test_cost )
# logger.info( 'cost: %f (train), %f (valid)', train_cost, valid_cost )
# algo_list = ['highest-first', 'longest-first', 'subsumption-removal']
idx2algo = { 1: 'highest-first', 2: 'longest-first', 3:'subsumption-removal' }
algo2idx = { 'highest-first': 1, 'longest-first': 2, 'subsumption-removal': 3 }
best_dev_fb1, best_threshold, best_algorithm = 0, [0.5, 0.5], [1, 1]
if n_epoch >= config.max_iter / 2:
source = imap(
lambda x: x[1],
ifilter(
lambda x : x[0] % 10 >= 9,
enumerate(
imap(
lambda x: x[:4],
LoadED( os.path.join(
config.data_path, '%s-eval-parsed' % config.language
) )
)
)
)
)
if args.iflytek:
source = chain(
source,
imap(
lambda x: x[1],
ifilter(
lambda x : 90 <= x[0] % 100 < 95,
enumerate(
imap(
lambda x: x[:4],
LoadED( 'iflytek-clean-%s' % config.language )
)
)
)
)
)
pp = list( PredictionParser(
source,
valid_predicted_file,
config.n_window,
n_label_type = config.n_label_type
) )
for algorithm in product( [1, 2], repeat = 2 ):
algorithm = list( algorithm )
name = [ idx2algo[i] for i in algorithm ]
for threshold in product( [ 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9 ], repeat = 2 ):
threshold = list( threshold )
precision, recall, f1, _ = evaluation( pp, threshold, algorithm, True,
n_label_type = config.n_label_type )
logger.debug( ('cut-off: %s, algorithm: %-20s' % (str(threshold), name)) +
(', validation -- precision: %f, recall: %f, fb1: %f' % (precision, recall, f1)) )
if f1 > best_dev_fb1:
best_dev_fb1, best_threshold, best_algorithm = f1, threshold, algorithm
best_precision, best_recall = precision, recall
mention_net.config.algorithm = best_algorithm
mention_net.config.threshold = best_threshold
mention_net.tofile( args.model )
logger.info( 'cut-off: %s, algorithm: %-20s' % \
(str(best_threshold), str([ idx2algo[i] for i in best_algorithm ])) )
source = imap( lambda x: x[1],
ifilter( lambda x : x[0] % 10 >= 9,
enumerate( imap( lambda x: x[:4],
LoadED( config.data_path + '/%s-eval-parsed' % config.language ) ) ) ) )
if args.iflytek:
source = chain( source,
imap( lambda x: x[1],
ifilter( lambda x : 90 <= x[0] % 100 < 95,
enumerate( imap( lambda x: x[:4],
LoadED( 'iflytek-clean-%s' % config.language ) ) ) ) ) )
precision, recall, f1, info = evaluation( PredictionParser( source,
valid_predicted_file,
config.n_window,
n_label_type = config.n_label_type ),
best_threshold, best_algorithm, True,
analysis = None,
n_label_type = config.n_label_type )
logger.info( '%s\n%s' % ('validation', info) )
if config.language != 'spa':
source = imap( lambda x: x[1],
ifilter( lambda x: x[0] % 100 >= 95,
enumerate( imap( lambda x: x[:4],
LoadED( 'iflytek-clean-%s' % config.language ) ) ) ) )
precision, recall, f1, info = evaluation( PredictionParser( source,
test_predicted_file,
config.n_window,
n_label_type = config.n_label_type ),
best_threshold, best_algorithm, True,
analysis = None,
n_label_type = config.n_label_type )
logger.info( '%s\n%s' % ('test', info) )
mention_net.config.learning_rate *= 0.5 ** ((4./ config.max_iter) if config.drop_rate > 0 else (1./ 2))
mention_net.config.drop_rate *= 0.5 ** (2./ config.max_iter)
logger.info( 'results are written in kbp-result/%s-{valid,test}.predicted' % args.model )
|
{
"content_hash": "6292948cd5f0d5ce6471e74df26ca3c8",
"timestamp": "",
"source": "github",
"line_count": 501,
"max_line_length": 122,
"avg_line_length": 48.650698602794414,
"alnum_prop": 0.4343562812833347,
"repo_name": "nanalelfe/fofe-ner",
"id": "0b44250bfa0a10cbe8ed51b984776f56d06efd07",
"size": "24441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kbp-system.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "51080"
},
{
"name": "JavaScript",
"bytes": "318599"
},
{
"name": "Perl",
"bytes": "12728"
},
{
"name": "Python",
"bytes": "583203"
},
{
"name": "Shell",
"bytes": "12701"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import glob
from time import sleep
import tinctest
from tinctest.lib import local_path, Gpdiff
from mpp.lib.PSQL import PSQL
from gppylib.commands.base import Command
from mpp.lib.gpstart import GpStart
from mpp.lib.gpstop import GpStop
from mpp.models import MPPTestCase
from mpp.lib.gpConfig import GpConfig
class UDFExceptionHandling(MPPTestCase):
def __init__(self):
self.gpstart = GpStart()
self.gpstop = GpStop()
self.config = GpConfig()
self.port = os.getenv('PGPORT')
self.gphome = (os.getenv('GPHOME'))
self.base_dir = os.path.dirname(sys.modules[self.__class__.__module__].__file__)
def get_sql_files(self, sql_file_name):
sql_file = os.path.join( self.base_dir, "sql", sql_file_name + ".sql");
return sql_file
def validate_sql(self, ans_file, out_file):
''' Compare the out and ans files '''
init_file=os.path.join( self.base_dir, "sql",'init_file')
result1 = Gpdiff.are_files_equal(out_file, ans_file, match_sub =[init_file])
self.assertTrue(result1 ,'Gpdiff.are_files_equal')
def run_sql(self, filename, out_file):
''' Run the provided sql and validate it '''
out_file = local_path(filename.replace(".sql", ".out"))
PSQL.run_sql_file(filename,out_file=out_file)
def set_protocol_conf( self, debug_dtm_action_segment, debug_dtm_action_target, debug_dtm_action_protocol,debug_dtm_action,debug_dtm_action_nestinglevel):
tinctest.logger.info('Configuring debug config parameters')
self.set_guc('debug_dtm_action_segment', debug_dtm_action_segment)
tinctest.logger.info('Configuring debug_dtm_action_segment = %s ' % debug_dtm_action_segment)
self.set_guc('debug_dtm_action_target', debug_dtm_action_target)
tinctest.logger.info('Configuring debug_dtm_action_target = %s ' % debug_dtm_action_target)
self.set_guc('debug_dtm_action_protocol', debug_dtm_action_protocol)
tinctest.logger.info('Configuring debug_dtm_action_protocol = %s ' % debug_dtm_action_protocol)
self.set_guc('debug_dtm_action', debug_dtm_action)
tinctest.logger.info('Configuring debug_dtm_action = %s ' % debug_dtm_action)
self.set_guc('debug_dtm_action_nestinglevel', debug_dtm_action_nestinglevel)
tinctest.logger.info('Configuring debug_dtm_action_nestinglevel = %s ' % debug_dtm_action_nestinglevel)
def set_sql_conf( self, debug_dtm_action_segment, debug_dtm_action_target, debug_dtm_action_sql_command_tag,debug_dtm_action,debug_dtm_action_nestinglevel):
tinctest.logger.info('Configuring debug config parameters')
self.set_guc('debug_dtm_action_segment', debug_dtm_action_segment)
tinctest.logger.info('Configuring debug_dtm_action_segment = %s ' % debug_dtm_action_segment)
self.set_guc('debug_dtm_action_target', debug_dtm_action_target)
tinctest.logger.info('Configuring debug_dtm_action_target = %s ' % debug_dtm_action_target)
self.set_guc('debug_dtm_action_sql_command_tag', debug_dtm_action_sql_command_tag)
tinctest.logger.info('Configuring debug_dtm_action_sql_command_tag = %s ' % debug_dtm_action_sql_command_tag)
self.set_guc('debug_dtm_action', debug_dtm_action)
tinctest.logger.info('Configuring debug_dtm_action = %s ' % debug_dtm_action)
self.set_guc('debug_dtm_action_nestinglevel', debug_dtm_action_nestinglevel)
tinctest.logger.info('Configuring debug_dtm_action_nestinglevel = %s ' % debug_dtm_action_nestinglevel)
def set_guc(self, guc_name, guc_value):
# Set the guc value
tinctest.logger.info('Configuring ' + guc_name +' ...')
cmd_str='source ' + self.gphome+ '/greenplum_path.sh;gpconfig -c ' + guc_name + ' -v ' +guc_value +' --skipvalidation'
cmd=Command("gpconfig " ,cmd_str)
cmd.run()
self.assertTrue(int(cmd.get_results().rc) == 0,cmd_str)
# Load the new value to the db
tinctest.logger.info('gpstop -u to reload config files...')
cmd_str2='source '+ self.gphome+ '/greenplum_path.sh;gpstop -u'
cmd = Command("gpstop -u", cmd_str2)
cmd.run()
self.assertTrue(int(cmd.get_results().rc) == 0,cmd_str2)
def reset_protocol_conf( self):
# Unset the guc value
tinctest.logger.info('Reset debug config parameters...Begin')
self.set_guc('debug_dtm_action_segment', '0')
self.set_guc('debug_dtm_action_target', 'none')
self.set_guc('debug_dtm_action_protocol', 'none')
self.set_guc('debug_dtm_action', 'none')
self.set_guc('debug_dtm_action_nestinglevel','0')
self.set_guc('debug_dtm_action_sql_command_tag','none')
tinctest.logger.info('Reset debug config parameters...End')
def get_output_file(self, debug_dtm_action_segment,debug_dtm_action_target, debug_dtm_action_protocol, debug_dtm_action, debug_dtm_action_nestinglevel):
file_name='_seg'+debug_dtm_action_segment
if debug_dtm_action_target == 'protocol':
# debug_dtm_action_protocol
if debug_dtm_action_protocol == 'subtransaction_begin':
file_name=file_name+'_subtxtbeg'
elif debug_dtm_action_protocol == 'subtransaction_rollback':
file_name=file_name+'_subtxtrolbk'
else:
file_name=file_name+'_subtxtrelse'
if debug_dtm_action_target == 'sql':
file_name=file_name+'_mppexec'
# debug_dtm_action
if debug_dtm_action == 'fail_begin_command':
file_name=file_name+'_failbeg'
elif debug_dtm_action == 'fail_end_command':
file_name=file_name+'_failend'
elif debug_dtm_action == 'panic_begin_command':
file_name=file_name+'_panicbeg'
# debug_dtm_action_nestinglevel
file_name = file_name+'_lvl'+debug_dtm_action_nestinglevel
tinctest.logger.info('Test ans and out file name : %s ' % file_name)
return file_name
def run_test(self, debug_dtm_action_segment, debug_dtm_action_target, debug_dtm_action_protocol, debug_dtm_action, debug_dtm_action_nestinglevel):
file_name =''
file_name = 'protocol'+self.get_output_file(debug_dtm_action_segment,debug_dtm_action_target,debug_dtm_action_protocol, debug_dtm_action, debug_dtm_action_nestinglevel)
test_name = 'udf_exception_handling_'+debug_dtm_action_target+'_seg'+ debug_dtm_action_segment
sql_file = self.get_sql_files(test_name)
out_file = self.base_dir+ "/sql/"+test_name+'.out'
out_file2 = self.base_dir+ "/sql/"+file_name+'.out'
ans_file = self.base_dir+ "/expected/"+file_name+'.ans'
tinctest.logger.info( 'sql-file == %s \n' % sql_file)
tinctest.logger.info( 'out-file == %s \n' % out_file)
tinctest.logger.info( 'ans-file == %s \n' % ans_file)
self.run_sql(sql_file, out_file=out_file)
cmd_str='cp ' + out_file + ' ' + out_file2
cmd=Command("bak outfile " ,cmd_str)
cmd.run()
self.assertTrue(int(cmd.get_results().rc) == 0,cmd_str)
self.validate_sql(ans_file,out_file2)
|
{
"content_hash": "8dce31eb255026835df46c9364da083f",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 177,
"avg_line_length": 51.25974025974026,
"alnum_prop": 0.6602482898403851,
"repo_name": "edespino/gpdb",
"id": "ec281351ddcf2b2ebf9fb4362ec65cf8d1a65786",
"size": "7894",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "src/test/tinc/tincrepo/mpp/gpdb/tests/catalog/udf_exception_handling/udf.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3737"
},
{
"name": "Batchfile",
"bytes": "11369"
},
{
"name": "C",
"bytes": "36580146"
},
{
"name": "C++",
"bytes": "3396346"
},
{
"name": "CMake",
"bytes": "17118"
},
{
"name": "CSS",
"bytes": "7407"
},
{
"name": "Csound Score",
"bytes": "164"
},
{
"name": "DTrace",
"bytes": "3746"
},
{
"name": "Fortran",
"bytes": "14777"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Gherkin",
"bytes": "740582"
},
{
"name": "HTML",
"bytes": "354931"
},
{
"name": "Java",
"bytes": "186576"
},
{
"name": "JavaScript",
"bytes": "23969"
},
{
"name": "Lex",
"bytes": "195794"
},
{
"name": "M4",
"bytes": "97709"
},
{
"name": "Makefile",
"bytes": "440584"
},
{
"name": "Objective-C",
"bytes": "42255"
},
{
"name": "PLSQL",
"bytes": "218116"
},
{
"name": "PLpgSQL",
"bytes": "5424886"
},
{
"name": "Perl",
"bytes": "3911633"
},
{
"name": "Perl 6",
"bytes": "8302"
},
{
"name": "Python",
"bytes": "8130606"
},
{
"name": "Roff",
"bytes": "39530"
},
{
"name": "Ruby",
"bytes": "26862"
},
{
"name": "SQLPL",
"bytes": "3939815"
},
{
"name": "Shell",
"bytes": "571615"
},
{
"name": "XS",
"bytes": "8405"
},
{
"name": "XSLT",
"bytes": "5779"
},
{
"name": "Yacc",
"bytes": "519516"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('venta', '0008_auto_20160205_2313'),
]
operations = [
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('url', models.ImageField(upload_to='category_images/')),
],
),
]
|
{
"content_hash": "f0a19c9bbb06e71339aae572407be1fc",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 114,
"avg_line_length": 27.681818181818183,
"alnum_prop": 0.5796387520525451,
"repo_name": "vpadillar/pventa",
"id": "e46c0a546e06f2925d082b39ce4b4e19fd93804c",
"size": "681",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "venta/migrations/0009_auto_20160206_0432.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "491"
},
{
"name": "CSS",
"bytes": "87140"
},
{
"name": "Groff",
"bytes": "76"
},
{
"name": "HTML",
"bytes": "47212"
},
{
"name": "JavaScript",
"bytes": "177804"
},
{
"name": "Python",
"bytes": "201594"
},
{
"name": "SQLPL",
"bytes": "1006"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
from urlparse import urlparse, urlunparse
from socket import gethostbyname
from textwrap import dedent
from time import time
import requests
from progressbar import ProgressBar
from gevent import Timeout, sleep as g_sleep
from gevent.pool import Pool
from surcharge import logger
__all__ = ['Surcharger']
class Surcharger(object):
http_method_supported = ('get', 'post',)
def __init__(self, url=None, method='get', concurrency=1, numbers=1,
duration=0, format='json', cli=False, **options):
"""
Init all necessary stuff to make a benchmark
:param url: URL that you want benchmark
:param method: HTTP method
:param concurrency: simulate *client* connection
:param numbers: number of requests
:param duration: duration in seconds. Override the numbers option.
:param format: format for the benchmark result
:param cli: Surcharge from the CLI (display some informations)
:param options: *requests* options
"""
self.url = url
self.method = method
self.concurrency = concurrency
self.numbers = numbers
self.duration = duration
self.format = format
self.cli = cli
self.options = options
def __call__(self):
"""
Launch the benchmark.
"""
logger.info("launch benchmark :: {}".format(self.__dict__()))
self.result = []
self.result = defaultdict(list)
progress = ProgressBar()
pool = Pool(self.concurrency)
start = time()
if self.duration:
try:
with Timeout(self.duration, False):
while True:
pool.spawn(self.surcharge)
g_sleep()
except Timeout:
pool.join()
else:
if self.cli:
self.display_informations()
range_numbers = progress(xrange(self.numbers))
else:
range_numbers = xrange(self.numbers)
for number in range_numbers:
pool.spawn(self.surcharge)
pool.join()
self.exec_time = time() - start
def __dict__(self):
return dict(
url=self.url,
method=self.method,
concurrency=self.concurrency,
numbers=self.numbers,
options=self.options
)
@property
def url(self):
return u'{}'.format(self._url)
@url.setter
def url(self, value):
"""
Resolving the URL is a heavy process.
We don't need that each request makes a DNS resolve.
"""
scheme, netloc, path, params, query, fragment = urlparse(value)
if not scheme:
mess = "Invalid URL {}: No schema supplied. Perhaps you meant http://{}?"
raise Exception(mess.format(value, value))
url_netloc = netloc.rsplit(':')
if len(url_netloc) == 1:
url_netloc.append('80')
url_resolved = gethostbyname(url_netloc[0])
url_full = url_resolved + ':' + url_netloc[1]
self._url = urlunparse((scheme, url_full) + (path, params, query, fragment))
@property
def method(self):
return u'{}'.format(self._method)
@method.setter
def method(self, value):
"""
Ensure the HTTP method is supported.
"""
if value.lower() not in self.http_method_supported:
mess = "Invalid method {}: Not supported. Only {}"
raise Exception(mess.format(value, self.http_method_supported))
self._method = value.lower()
def surcharge(self):
"""
Make the request. Keep the status code of the response and
the exec time in a result list.
"""
try:
start = time()
response = getattr(requests, self.method)(self.url, **self.options)
status_code = response.status_code
except Exception as e:
logger.error("error surcharge :: {}".format(e))
status_code = 666
finally:
self.result[status_code].append(time() - start)
def display_informations(self):
""" Displays useful informations.
"""
res = requests.head(self.url)
print '\nServer: {server}\n'.format(**res.headers)
print 'URL: {}\n'.format(self.url)
print 'Concurrency level: {}\n'.format(self.concurrency)
print 'Options: {}\n\n'.format(self.options)
class SurchargerStats(object):
def __init__(self, surcharger):
self.surcharger = surcharger
self.result = self.surcharger.result
def __call__(self):
self.compute()
self.send()
def compute(self):
try:
self.stats = {
'exec_time': self.surcharger.exec_time,
'total': sum([len(self.result[key]) for key in self.result.iterkeys()]),
'total_success': len(self.result[200]),
'requests_process': 0,
'min': 0,
'max': 0,
'moy': 0,
'RPS': 0,
}
if self.stats['total_success']:
request_process = sum(self.result[200])
self.stats.update({
'requests_process': request_process,
'min': min(self.result[200]),
'max': max(self.result[200]),
'moy': request_process / self.stats['total_success'],
'RPS': self.stats['total_success'] / request_process * self.surcharger.concurrency,
})
self.stats['total_failed'] = self.stats['total'] - self.stats['total_success']
except Exception as error:
logger.error("compute stats :: {}".format(error))
def stdout(self):
print dedent('''\n
Number process requests: {total}
Time taken for tests: {exec_time:.2f}
Complete requests: {total_success}
Failed requests: {total_failed}
Faster request: {min:.3f}
Slower request: {max:.3f}
Time per request (only success): {moy:.3f}
Request per second: {RPS:.2f}
'''.format(**self.stats))
def send(self):
if self.surcharger.cli:
self.stdout()
|
{
"content_hash": "87569a647eeabe9621ef55e02a880d95",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 103,
"avg_line_length": 31.41176470588235,
"alnum_prop": 0.5483770287141073,
"repo_name": "Lujeni/Surcharge",
"id": "3a071d828f91fdd421803baf98af7f4e5df94e89",
"size": "6433",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "surcharge/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30090"
}
],
"symlink_target": ""
}
|
import logging
from datetime import timedelta
from pajbot.managers.handler import HandlerManager
from pajbot.modules.base import BaseModule
from pajbot.modules.base import ModuleSetting
log = logging.getLogger(__name__)
class AsciiProtectionModule(BaseModule):
ID = __name__.split(".")[-1]
NAME = "ASCII Protection"
DESCRIPTION = "Times out users who post messages that contain too many ASCII characters."
CATEGORY = "Moderation"
SETTINGS = [
ModuleSetting(
key="enabled_by_stream_status",
label="Enable moderation of ASCII characters when the stream is:",
type="options",
required=True,
default="Offline and Online",
options=["Online Only", "Offline Only", "Offline and Online"],
),
ModuleSetting(
key="min_msg_length",
label="Minimum message length to be considered bad",
type="number",
required=True,
placeholder="",
default=70,
constraints={"min_value": 20, "max_value": 500},
),
ModuleSetting(
key="moderation_action",
label="Moderation action to apply",
type="options",
required=True,
default="Timeout",
options=["Delete", "Timeout"],
),
ModuleSetting(
key="timeout_length",
label="Timeout length",
type="number",
required=True,
placeholder="Timeout length in seconds",
default=120,
constraints={"min_value": 1, "max_value": 1209600},
),
ModuleSetting(
key="bypass_level",
label="Level to bypass module",
type="number",
required=True,
placeholder="",
default=500,
constraints={"min_value": 100, "max_value": 1000},
),
ModuleSetting(
key="timeout_reason",
label="Timeout Reason",
type="text",
required=False,
placeholder="",
default="Too many ASCII characters",
constraints={},
),
ModuleSetting(
key="whisper_offenders",
label="Send offenders a whisper explaining the timeout",
type="boolean",
required=True,
default=False,
),
ModuleSetting(
key="whisper_timeout_reason",
label="Whisper Timeout Reason | Available arguments: {punishment}",
type="text",
required=False,
placeholder="",
default="You have been {punishment} because your message contained too many ascii characters.",
constraints={},
),
ModuleSetting(
key="disable_warnings",
label="Disable warning timeouts",
type="boolean",
required=True,
default=False,
),
]
@staticmethod
def check_message(message):
if len(message) <= 0:
return False
non_alnum = sum(not c.isalnum() for c in message)
ratio = non_alnum / len(message)
if (len(message) > 240 and ratio > 0.8) or ratio > 0.93:
return True
return False
def on_pubmsg(self, source, message, tags, **rest):
if self.settings["enabled_by_stream_status"] == "Online Only" and not self.bot.is_online:
return
if self.settings["enabled_by_stream_status"] == "Offline Only" and self.bot.is_online:
return
if source.level >= self.settings["bypass_level"] or source.moderator is True:
return
if len(message) <= self.settings["min_msg_length"]:
return
if AsciiProtectionModule.check_message(message) is False:
return
if self.settings["moderation_action"] == "Delete":
self.bot.delete_message(tags["id"])
elif self.settings["disable_warnings"] is True and self.settings["moderation_action"] == "Timeout":
self.bot.timeout(source, self.settings["timeout_length"], reason=self.settings["timeout_reason"])
else:
duration, punishment = self.bot.timeout_warn(
source, self.settings["timeout_length"], reason=self.settings["timeout_reason"]
)
""" We only send a notification to the user if he has spent more than
one hour watching the stream. """
if self.settings["whisper_offenders"] and duration > 0 and source.time_in_chat_online >= timedelta(hours=1):
self.bot.whisper(source, self.settings["whisper_timeout_reason"].format(punishment=punishment))
return False
def enable(self, bot):
HandlerManager.add_handler("on_pubmsg", self.on_pubmsg, priority=150, run_if_propagation_stopped=True)
def disable(self, bot):
HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg)
|
{
"content_hash": "040ba1d15ed22b790a2bae2f107eec17",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 120,
"avg_line_length": 34.9020979020979,
"alnum_prop": 0.5682228010418754,
"repo_name": "pajlada/pajbot",
"id": "eac67985be408c0891e5a8e1f03240cba216345a",
"size": "4991",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pajbot/modules/ascii.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11288"
},
{
"name": "HTML",
"bytes": "129576"
},
{
"name": "JavaScript",
"bytes": "202450"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "987601"
},
{
"name": "Shell",
"bytes": "589"
}
],
"symlink_target": ""
}
|
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
from django.utils import timezone
from weight.views import editorWeight
from weight.models import Weight
class EditorWeightTest(TestCase):
def test_url(self):
editorWeightPage = resolve('/')
self.assertEqual(editorWeightPage.func, editorWeight)
def test_response_html(self):
request = HttpRequest()
response = editorWeight(request)
expected_html = render_to_string('editorWeight.html')
self.assertEqual(response.content.decode(), expected_html)
def test_save_request(self):
request = HttpRequest()
request.method = 'POST'
request.POST['weightInput'] = '50'
response = editorWeight(request)
self.assertIn('50', response.content.decode())
expected_html = render_to_string(
'editorweight.html',
{'new_weight': '50'}
)
print(expected_html)
self.assertEqual(response.content.decode(), expected_html)
class WeightModelTest(TestCase):
def test_save_weight(self):
weight1 = Weight()
weight1.weight = 100
weight1.record_date=timezone.now()
weight1.save()
weight2 = Weight()
weight2.weight = 50
weight2.record_date=timezone.now()
weight2.save()
saved_weights = Weight.objects.all()
self.assertEqual(saved_weights.count(), 2)
weight1 = saved_weights[0]
weight2 = saved_weights[1]
self.assertEqual(weight1.weight, 100)
self.assertEqual(weight2.weight, 50)
|
{
"content_hash": "dde34f15b1cdc25276af7d3ec2ab9c61",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 66,
"avg_line_length": 28.76271186440678,
"alnum_prop": 0.6540954625810254,
"repo_name": "williamHuang5468/WeightController",
"id": "8baa85b9e03ac5c53ebc26c6ca72de35977dbed3",
"size": "1697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WeightController/weight/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "384"
},
{
"name": "Python",
"bytes": "8263"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
import binascii
import itertools
import math
import os
import pytest
from cryptography.exceptions import (
AlreadyFinalized, InvalidSignature, _Reasons
)
from cryptography.hazmat.backends.interfaces import (
PEMSerializationBackend, RSABackend
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
padding, rsa, utils as asym_utils
)
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateNumbers, RSAPublicNumbers
)
from .fixtures_rsa import (
RSA_KEY_1024, RSA_KEY_1025, RSA_KEY_1026, RSA_KEY_1027, RSA_KEY_1028,
RSA_KEY_1029, RSA_KEY_1030, RSA_KEY_1031, RSA_KEY_1536, RSA_KEY_2048,
RSA_KEY_2048_ALT, RSA_KEY_512, RSA_KEY_512_ALT, RSA_KEY_522, RSA_KEY_599,
RSA_KEY_745, RSA_KEY_768,
)
from .utils import (
_check_rsa_private_numbers, generate_rsa_verification_test
)
from ...doubles import (
DummyAsymmetricPadding, DummyHashAlgorithm, DummyKeySerializationEncryption
)
from ...utils import (
load_pkcs1_vectors, load_rsa_nist_vectors, load_vectors_from_file,
raises_unsupported_algorithm
)
class DummyMGF(object):
_salt_length = 0
def _check_rsa_private_numbers_if_serializable(key):
if isinstance(key, rsa.RSAPrivateKeyWithSerialization):
_check_rsa_private_numbers(key.private_numbers())
def test_check_rsa_private_numbers_if_serializable():
_check_rsa_private_numbers_if_serializable("notserializable")
def _flatten_pkcs1_examples(vectors):
flattened_vectors = []
for vector in vectors:
examples = vector[0].pop("examples")
for example in examples:
merged_vector = (vector[0], vector[1], example)
flattened_vectors.append(merged_vector)
return flattened_vectors
def _build_oaep_sha2_vectors():
base_path = os.path.join("asymmetric", "RSA", "oaep-custom")
vectors = []
hashalgs = [
hashes.SHA1(),
hashes.SHA224(),
hashes.SHA256(),
hashes.SHA384(),
hashes.SHA512(),
]
for mgf1alg, oaepalg in itertools.product(hashalgs, hashalgs):
if mgf1alg.name == "sha1" and oaepalg.name == "sha1":
# We need to generate the cartesian product of the permutations
# of all the SHAs above, but SHA1/SHA1 is something we already
# tested previously and thus did not generate custom vectors for.
continue
examples = _flatten_pkcs1_examples(
load_vectors_from_file(
os.path.join(
base_path,
"oaep-{0}-{1}.txt".format(
mgf1alg.name, oaepalg.name
)
),
load_pkcs1_vectors
)
)
# We've loaded the files, but the loaders don't give us any information
# about the mgf1 or oaep hash algorithms. We know this info so we'll
# just add that to the end of the tuple
for private, public, vector in examples:
vectors.append((private, public, vector, mgf1alg, oaepalg))
return vectors
def _skip_pss_hash_algorithm_unsupported(backend, hash_alg):
if not backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hash_alg),
salt_length=padding.PSS.MAX_LENGTH
)
):
pytest.skip(
"Does not support {0} in MGF1 using PSS.".format(hash_alg.name)
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
def test_skip_pss_hash_algorithm_unsupported(backend):
with pytest.raises(pytest.skip.Exception):
_skip_pss_hash_algorithm_unsupported(backend, DummyHashAlgorithm())
def test_modular_inverse():
p = int(
"d1f9f6c09fd3d38987f7970247b85a6da84907753d42ec52bc23b745093f4fff5cff3"
"617ce43d00121a9accc0051f519c76e08cf02fc18acfe4c9e6aea18da470a2b611d2e"
"56a7b35caa2c0239bc041a53cc5875ca0b668ae6377d4b23e932d8c995fd1e58ecfd8"
"c4b73259c0d8a54d691cca3f6fb85c8a5c1baf588e898d481", 16
)
q = int(
"d1519255eb8f678c86cfd06802d1fbef8b664441ac46b73d33d13a8404580a33a8e74"
"cb2ea2e2963125b3d454d7a922cef24dd13e55f989cbabf64255a736671f4629a47b5"
"b2347cfcd669133088d1c159518531025297c2d67c9da856a12e80222cd03b4c6ec0f"
"86c957cb7bb8de7a127b645ec9e820aa94581e4762e209f01", 16
)
assert rsa._modinv(q, p) == int(
"0275e06afa722999315f8f322275483e15e2fb46d827b17800f99110b269a6732748f"
"624a382fa2ed1ec68c99f7fc56fb60e76eea51614881f497ba7034c17dde955f92f15"
"772f8b2b41f3e56d88b1e096cdd293eba4eae1e82db815e0fadea0c4ec971bc6fd875"
"c20e67e48c31a611e98d32c6213ae4c4d7b53023b2f80c538", 16
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSA(object):
@pytest.mark.parametrize(
("public_exponent", "key_size"),
itertools.product(
(3, 5, 65537),
(1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1536, 2048)
)
)
def test_generate_rsa_keys(self, backend, public_exponent, key_size):
skey = rsa.generate_private_key(public_exponent, key_size, backend)
assert skey.key_size == key_size
_check_rsa_private_numbers_if_serializable(skey)
pkey = skey.public_key()
assert isinstance(pkey.public_numbers(), rsa.RSAPublicNumbers)
def test_generate_bad_public_exponent(self, backend):
with pytest.raises(ValueError):
rsa.generate_private_key(public_exponent=1,
key_size=2048,
backend=backend)
with pytest.raises(ValueError):
rsa.generate_private_key(public_exponent=4,
key_size=2048,
backend=backend)
def test_cant_generate_insecure_tiny_key(self, backend):
with pytest.raises(ValueError):
rsa.generate_private_key(public_exponent=65537,
key_size=511,
backend=backend)
with pytest.raises(ValueError):
rsa.generate_private_key(public_exponent=65537,
key_size=256,
backend=backend)
@pytest.mark.parametrize(
"pkcs1_example",
load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs-1v2-1d2-vec", "pss-vect.txt"),
load_pkcs1_vectors
)
)
def test_load_pss_vect_example_keys(self, pkcs1_example):
secret, public = pkcs1_example
private_num = rsa.RSAPrivateNumbers(
p=secret["p"],
q=secret["q"],
d=secret["private_exponent"],
dmp1=secret["dmp1"],
dmq1=secret["dmq1"],
iqmp=secret["iqmp"],
public_numbers=rsa.RSAPublicNumbers(
e=secret["public_exponent"],
n=secret["modulus"]
)
)
_check_rsa_private_numbers(private_num)
public_num = rsa.RSAPublicNumbers(
e=public["public_exponent"],
n=public["modulus"]
)
assert public_num
public_num2 = private_num.public_numbers
assert public_num2
assert public_num.n == public_num2.n
assert public_num.e == public_num2.e
def test_rsa_generate_invalid_backend():
pretend_backend = object()
with raises_unsupported_algorithm(_Reasons.BACKEND_MISSING_INTERFACE):
rsa.generate_private_key(65537, 2048, pretend_backend)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSASignature(object):
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
@pytest.mark.parametrize(
"pkcs1_example",
_flatten_pkcs1_examples(load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs1v15sign-vectors.txt"),
load_pkcs1_vectors
))
)
def test_pkcs1v15_signing(self, pkcs1_example, backend):
private, public, example = pkcs1_example
private_key = rsa.RSAPrivateNumbers(
p=private["p"],
q=private["q"],
d=private["private_exponent"],
dmp1=private["dmp1"],
dmq1=private["dmq1"],
iqmp=private["iqmp"],
public_numbers=rsa.RSAPublicNumbers(
e=private["public_exponent"],
n=private["modulus"]
)
).private_key(backend)
signer = private_key.signer(padding.PKCS1v15(), hashes.SHA1())
signer.update(binascii.unhexlify(example["message"]))
signature = signer.finalize()
assert binascii.hexlify(signature) == example["signature"]
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
@pytest.mark.parametrize(
"pkcs1_example",
_flatten_pkcs1_examples(load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs-1v2-1d2-vec", "pss-vect.txt"),
load_pkcs1_vectors
))
)
def test_pss_signing(self, pkcs1_example, backend):
private, public, example = pkcs1_example
private_key = rsa.RSAPrivateNumbers(
p=private["p"],
q=private["q"],
d=private["private_exponent"],
dmp1=private["dmp1"],
dmq1=private["dmq1"],
iqmp=private["iqmp"],
public_numbers=rsa.RSAPublicNumbers(
e=private["public_exponent"],
n=private["modulus"]
)
).private_key(backend)
public_key = rsa.RSAPublicNumbers(
e=public["public_exponent"],
n=public["modulus"]
).public_key(backend)
signer = private_key.signer(
padding.PSS(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
signer.update(binascii.unhexlify(example["message"]))
signature = signer.finalize()
assert len(signature) == math.ceil(private_key.key_size / 8.0)
# PSS signatures contain randomness so we can't do an exact
# signature check. Instead we'll verify that the signature created
# successfully verifies.
verifier = public_key.verifier(
signature,
padding.PSS(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1(),
)
verifier.update(binascii.unhexlify(example["message"]))
verifier.verify()
@pytest.mark.parametrize(
"hash_alg",
[hashes.SHA224(), hashes.SHA256(), hashes.SHA384(), hashes.SHA512()]
)
def test_pss_signing_sha2(self, hash_alg, backend):
_skip_pss_hash_algorithm_unsupported(backend, hash_alg)
private_key = RSA_KEY_768.private_key(backend)
public_key = private_key.public_key()
pss = padding.PSS(
mgf=padding.MGF1(hash_alg),
salt_length=padding.PSS.MAX_LENGTH
)
signer = private_key.signer(pss, hash_alg)
signer.update(b"testing signature")
signature = signer.finalize()
verifier = public_key.verifier(signature, pss, hash_alg)
verifier.update(b"testing signature")
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: (
backend.hash_supported(hashes.SHA512()) and
backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
)
),
skip_message="Does not support SHA512."
)
def test_pss_minimum_key_size_for_digest(self, backend):
private_key = RSA_KEY_522.private_key(backend)
signer = private_key.signer(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA512()
)
signer.update(b"no failure")
signer.finalize()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
@pytest.mark.supported(
only_if=lambda backend: backend.hash_supported(hashes.SHA512()),
skip_message="Does not support SHA512."
)
def test_pss_signing_digest_too_large_for_key_size(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with pytest.raises(ValueError):
private_key.signer(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA512()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
def test_pss_signing_salt_length_too_long(self, backend):
private_key = RSA_KEY_512.private_key(backend)
signer = private_key.signer(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=1000000
),
hashes.SHA1()
)
signer.update(b"failure coming")
with pytest.raises(ValueError):
signer.finalize()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_use_after_finalize(self, backend):
private_key = RSA_KEY_512.private_key(backend)
signer = private_key.signer(padding.PKCS1v15(), hashes.SHA1())
signer.update(b"sign me")
signer.finalize()
with pytest.raises(AlreadyFinalized):
signer.finalize()
with pytest.raises(AlreadyFinalized):
signer.update(b"more data")
def test_unsupported_padding(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_PADDING):
private_key.signer(DummyAsymmetricPadding(), hashes.SHA1())
def test_padding_incorrect_type(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with pytest.raises(TypeError):
private_key.signer("notpadding", hashes.SHA1())
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
),
skip_message="Does not support PSS."
)
def test_unsupported_pss_mgf(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_MGF):
private_key.signer(
padding.PSS(
mgf=DummyMGF(),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_pkcs1_digest_too_large_for_key_size(self, backend):
private_key = RSA_KEY_599.private_key(backend)
signer = private_key.signer(
padding.PKCS1v15(),
hashes.SHA512()
)
signer.update(b"failure coming")
with pytest.raises(ValueError):
signer.finalize()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_pkcs1_minimum_key_size(self, backend):
private_key = RSA_KEY_745.private_key(backend)
signer = private_key.signer(
padding.PKCS1v15(),
hashes.SHA512()
)
signer.update(b"no failure")
signer.finalize()
def test_sign(self, backend):
private_key = RSA_KEY_512.private_key(backend)
message = b"one little message"
pkcs = padding.PKCS1v15()
algorithm = hashes.SHA1()
signature = private_key.sign(message, pkcs, algorithm)
public_key = private_key.public_key()
verifier = public_key.verifier(signature, pkcs, algorithm)
verifier.update(message)
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
),
skip_message="Does not support PSS."
)
def test_prehashed_sign(self, backend):
private_key = RSA_KEY_512.private_key(backend)
message = b"one little message"
h = hashes.Hash(hashes.SHA1(), backend)
h.update(message)
digest = h.finalize()
pss = padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
prehashed_alg = asym_utils.Prehashed(hashes.SHA1())
signature = private_key.sign(digest, pss, prehashed_alg)
public_key = private_key.public_key()
verifier = public_key.verifier(signature, pss, hashes.SHA1())
verifier.update(message)
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
),
skip_message="Does not support PSS."
)
def test_prehashed_digest_mismatch(self, backend):
private_key = RSA_KEY_512.private_key(backend)
message = b"one little message"
h = hashes.Hash(hashes.SHA512(), backend)
h.update(message)
digest = h.finalize()
pss = padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
prehashed_alg = asym_utils.Prehashed(hashes.SHA1())
with pytest.raises(ValueError):
private_key.sign(digest, pss, prehashed_alg)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSAVerification(object):
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
@pytest.mark.parametrize(
"pkcs1_example",
_flatten_pkcs1_examples(load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs1v15sign-vectors.txt"),
load_pkcs1_vectors
))
)
def test_pkcs1v15_verification(self, pkcs1_example, backend):
private, public, example = pkcs1_example
public_key = rsa.RSAPublicNumbers(
e=public["public_exponent"],
n=public["modulus"]
).public_key(backend)
verifier = public_key.verifier(
binascii.unhexlify(example["signature"]),
padding.PKCS1v15(),
hashes.SHA1()
)
verifier.update(binascii.unhexlify(example["message"]))
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_invalid_pkcs1v15_signature_wrong_data(self, backend):
private_key = RSA_KEY_512.private_key(backend)
public_key = private_key.public_key()
signer = private_key.signer(padding.PKCS1v15(), hashes.SHA1())
signer.update(b"sign me")
signature = signer.finalize()
verifier = public_key.verifier(
signature,
padding.PKCS1v15(),
hashes.SHA1()
)
verifier.update(b"incorrect data")
with pytest.raises(InvalidSignature):
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_invalid_pkcs1v15_signature_wrong_key(self, backend):
private_key = RSA_KEY_512.private_key(backend)
private_key2 = RSA_KEY_512_ALT.private_key(backend)
public_key = private_key2.public_key()
signer = private_key.signer(padding.PKCS1v15(), hashes.SHA1())
signer.update(b"sign me")
signature = signer.finalize()
verifier = public_key.verifier(
signature,
padding.PKCS1v15(),
hashes.SHA1()
)
verifier.update(b"sign me")
with pytest.raises(InvalidSignature):
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=20
)
),
skip_message="Does not support PSS."
)
@pytest.mark.parametrize(
"pkcs1_example",
_flatten_pkcs1_examples(load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs-1v2-1d2-vec", "pss-vect.txt"),
load_pkcs1_vectors
))
)
def test_pss_verification(self, pkcs1_example, backend):
private, public, example = pkcs1_example
public_key = rsa.RSAPublicNumbers(
e=public["public_exponent"],
n=public["modulus"]
).public_key(backend)
verifier = public_key.verifier(
binascii.unhexlify(example["signature"]),
padding.PSS(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
salt_length=20
),
hashes.SHA1()
)
verifier.update(binascii.unhexlify(example["message"]))
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
def test_invalid_pss_signature_wrong_data(self, backend):
public_key = rsa.RSAPublicNumbers(
n=int(
b"dffc2137d5e810cde9e4b4612f5796447218bab913b3fa98bdf7982e4fa6"
b"ec4d6653ef2b29fb1642b095befcbea6decc178fb4bed243d3c3592c6854"
b"6af2d3f3", 16
),
e=65537
).public_key(backend)
signature = binascii.unhexlify(
b"0e68c3649df91c5bc3665f96e157efa75b71934aaa514d91e94ca8418d100f45"
b"6f05288e58525f99666bab052adcffdf7186eb40f583bd38d98c97d3d524808b"
)
verifier = public_key.verifier(
signature,
padding.PSS(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
verifier.update(b"incorrect data")
with pytest.raises(InvalidSignature):
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
def test_invalid_pss_signature_wrong_key(self, backend):
signature = binascii.unhexlify(
b"3a1880165014ba6eb53cc1449d13e5132ebcc0cfd9ade6d7a2494a0503bd0826"
b"f8a46c431e0d7be0ca3e453f8b2b009e2733764da7927cc6dbe7a021437a242e"
)
public_key = rsa.RSAPublicNumbers(
n=int(
b"381201f4905d67dfeb3dec131a0fbea773489227ec7a1448c3109189ac68"
b"5a95441be90866a14c4d2e139cd16db540ec6c7abab13ffff91443fd46a8"
b"960cbb7658ded26a5c95c86f6e40384e1c1239c63e541ba221191c4dd303"
b"231b42e33c6dbddf5ec9a746f09bf0c25d0f8d27f93ee0ae5c0d723348f4"
b"030d3581e13522e1", 16
),
e=65537
).public_key(backend)
verifier = public_key.verifier(
signature,
padding.PSS(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
verifier.update(b"sign me")
with pytest.raises(InvalidSignature):
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
def test_invalid_pss_signature_data_too_large_for_modulus(self, backend):
signature = binascii.unhexlify(
b"cb43bde4f7ab89eb4a79c6e8dd67e0d1af60715da64429d90c716a490b799c29"
b"194cf8046509c6ed851052367a74e2e92d9b38947ed74332acb115a03fcc0222"
)
public_key = rsa.RSAPublicNumbers(
n=int(
b"381201f4905d67dfeb3dec131a0fbea773489227ec7a1448c3109189ac68"
b"5a95441be90866a14c4d2e139cd16db540ec6c7abab13ffff91443fd46a8"
b"960cbb7658ded26a5c95c86f6e40384e1c1239c63e541ba221191c4dd303"
b"231b42e33c6dbddf5ec9a746f09bf0c25d0f8d27f93ee0ae5c0d723348f4"
b"030d3581e13522", 16
),
e=65537
).public_key(backend)
verifier = public_key.verifier(
signature,
padding.PSS(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
verifier.update(b"sign me")
with pytest.raises(InvalidSignature):
verifier.verify()
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_use_after_finalize(self, backend):
private_key = RSA_KEY_512.private_key(backend)
public_key = private_key.public_key()
signer = private_key.signer(padding.PKCS1v15(), hashes.SHA1())
signer.update(b"sign me")
signature = signer.finalize()
verifier = public_key.verifier(
signature,
padding.PKCS1v15(),
hashes.SHA1()
)
verifier.update(b"sign me")
verifier.verify()
with pytest.raises(AlreadyFinalized):
verifier.verify()
with pytest.raises(AlreadyFinalized):
verifier.update(b"more data")
def test_unsupported_padding(self, backend):
private_key = RSA_KEY_512.private_key(backend)
public_key = private_key.public_key()
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_PADDING):
public_key.verifier(
b"sig", DummyAsymmetricPadding(), hashes.SHA1()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_signature_not_bytes(self, backend):
public_key = RSA_KEY_512.public_numbers.public_key(backend)
signature = 1234
with pytest.raises(TypeError):
public_key.verifier(
signature,
padding.PKCS1v15(),
hashes.SHA1()
)
def test_padding_incorrect_type(self, backend):
private_key = RSA_KEY_512.private_key(backend)
public_key = private_key.public_key()
with pytest.raises(TypeError):
public_key.verifier(b"sig", "notpadding", hashes.SHA1())
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
),
skip_message="Does not support PSS."
)
def test_unsupported_pss_mgf(self, backend):
private_key = RSA_KEY_512.private_key(backend)
public_key = private_key.public_key()
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_MGF):
public_key.verifier(
b"sig",
padding.PSS(
mgf=DummyMGF(),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
@pytest.mark.supported(
only_if=lambda backend: backend.hash_supported(hashes.SHA512()),
skip_message="Does not support SHA512."
)
def test_pss_verify_digest_too_large_for_key_size(self, backend):
private_key = RSA_KEY_512.private_key(backend)
signature = binascii.unhexlify(
b"8b9a3ae9fb3b64158f3476dd8d8a1f1425444e98940e0926378baa9944d219d8"
b"534c050ef6b19b1bdc6eb4da422e89161106a6f5b5cc16135b11eb6439b646bd"
)
public_key = private_key.public_key()
with pytest.raises(ValueError):
public_key.verifier(
signature,
padding.PSS(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA512()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS."
)
def test_pss_verify_salt_length_too_long(self, backend):
signature = binascii.unhexlify(
b"8b9a3ae9fb3b64158f3476dd8d8a1f1425444e98940e0926378baa9944d219d8"
b"534c050ef6b19b1bdc6eb4da422e89161106a6f5b5cc16135b11eb6439b646bd"
)
public_key = rsa.RSAPublicNumbers(
n=int(
b"d309e4612809437548b747d7f9eb9cd3340f54fe42bb3f84a36933b0839c"
b"11b0c8b7f67e11f7252370161e31159c49c784d4bc41c42a78ce0f0b40a3"
b"ca8ffb91", 16
),
e=65537
).public_key(backend)
verifier = public_key.verifier(
signature,
padding.PSS(
mgf=padding.MGF1(
algorithm=hashes.SHA1(),
),
salt_length=1000000
),
hashes.SHA1()
)
verifier.update(b"sign me")
with pytest.raises(InvalidSignature):
verifier.verify()
def test_verify(self, backend):
private_key = RSA_KEY_512.private_key(backend)
message = b"one little message"
pkcs = padding.PKCS1v15()
algorithm = hashes.SHA1()
signer = private_key.signer(pkcs, algorithm)
signer.update(message)
signature = signer.finalize()
public_key = private_key.public_key()
public_key.verify(signature, message, pkcs, algorithm)
def test_prehashed_verify(self, backend):
private_key = RSA_KEY_512.private_key(backend)
message = b"one little message"
h = hashes.Hash(hashes.SHA1(), backend)
h.update(message)
digest = h.finalize()
prehashed_alg = asym_utils.Prehashed(hashes.SHA1())
pkcs = padding.PKCS1v15()
signature = private_key.sign(message, pkcs, hashes.SHA1())
public_key = private_key.public_key()
public_key.verify(signature, digest, pkcs, prehashed_alg)
def test_prehashed_digest_mismatch(self, backend):
public_key = RSA_KEY_512.private_key(backend).public_key()
message = b"one little message"
h = hashes.Hash(hashes.SHA1(), backend)
h.update(message)
data = h.finalize()
prehashed_alg = asym_utils.Prehashed(hashes.SHA512())
pkcs = padding.PKCS1v15()
with pytest.raises(ValueError):
public_key.verify(b"\x00" * 64, data, pkcs, prehashed_alg)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSAPSSMGF1Verification(object):
test_rsa_pss_mgf1_sha1 = pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA1()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS using MGF1 with SHA1."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGenPSS_186-2.rsp",
"SigGenPSS_186-3.rsp",
"SigVerPSS_186-3.rsp",
],
hashes.SHA1(),
lambda params, hash_alg: padding.PSS(
mgf=padding.MGF1(
algorithm=hash_alg,
),
salt_length=params["salt_length"]
)
))
test_rsa_pss_mgf1_sha224 = pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA224()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS using MGF1 with SHA224."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGenPSS_186-2.rsp",
"SigGenPSS_186-3.rsp",
"SigVerPSS_186-3.rsp",
],
hashes.SHA224(),
lambda params, hash_alg: padding.PSS(
mgf=padding.MGF1(
algorithm=hash_alg,
),
salt_length=params["salt_length"]
)
))
test_rsa_pss_mgf1_sha256 = pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA256()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS using MGF1 with SHA256."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGenPSS_186-2.rsp",
"SigGenPSS_186-3.rsp",
"SigVerPSS_186-3.rsp",
],
hashes.SHA256(),
lambda params, hash_alg: padding.PSS(
mgf=padding.MGF1(
algorithm=hash_alg,
),
salt_length=params["salt_length"]
)
))
test_rsa_pss_mgf1_sha384 = pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA384()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS using MGF1 with SHA384."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGenPSS_186-2.rsp",
"SigGenPSS_186-3.rsp",
"SigVerPSS_186-3.rsp",
],
hashes.SHA384(),
lambda params, hash_alg: padding.PSS(
mgf=padding.MGF1(
algorithm=hash_alg,
),
salt_length=params["salt_length"]
)
))
test_rsa_pss_mgf1_sha512 = pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(hashes.SHA512()),
salt_length=padding.PSS.MAX_LENGTH
)
),
skip_message="Does not support PSS using MGF1 with SHA512."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGenPSS_186-2.rsp",
"SigGenPSS_186-3.rsp",
"SigVerPSS_186-3.rsp",
],
hashes.SHA512(),
lambda params, hash_alg: padding.PSS(
mgf=padding.MGF1(
algorithm=hash_alg,
),
salt_length=params["salt_length"]
)
))
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSAPKCS1Verification(object):
test_rsa_pkcs1v15_verify_sha1 = pytest.mark.supported(
only_if=lambda backend: (
backend.hash_supported(hashes.SHA1()) and
backend.rsa_padding_supported(padding.PKCS1v15())
),
skip_message="Does not support SHA1 and PKCS1v1.5."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGen15_186-2.rsp",
"SigGen15_186-3.rsp",
"SigVer15_186-3.rsp",
],
hashes.SHA1(),
lambda params, hash_alg: padding.PKCS1v15()
))
test_rsa_pkcs1v15_verify_sha224 = pytest.mark.supported(
only_if=lambda backend: (
backend.hash_supported(hashes.SHA224()) and
backend.rsa_padding_supported(padding.PKCS1v15())
),
skip_message="Does not support SHA224 and PKCS1v1.5."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGen15_186-2.rsp",
"SigGen15_186-3.rsp",
"SigVer15_186-3.rsp",
],
hashes.SHA224(),
lambda params, hash_alg: padding.PKCS1v15()
))
test_rsa_pkcs1v15_verify_sha256 = pytest.mark.supported(
only_if=lambda backend: (
backend.hash_supported(hashes.SHA256()) and
backend.rsa_padding_supported(padding.PKCS1v15())
),
skip_message="Does not support SHA256 and PKCS1v1.5."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGen15_186-2.rsp",
"SigGen15_186-3.rsp",
"SigVer15_186-3.rsp",
],
hashes.SHA256(),
lambda params, hash_alg: padding.PKCS1v15()
))
test_rsa_pkcs1v15_verify_sha384 = pytest.mark.supported(
only_if=lambda backend: (
backend.hash_supported(hashes.SHA384()) and
backend.rsa_padding_supported(padding.PKCS1v15())
),
skip_message="Does not support SHA384 and PKCS1v1.5."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGen15_186-2.rsp",
"SigGen15_186-3.rsp",
"SigVer15_186-3.rsp",
],
hashes.SHA384(),
lambda params, hash_alg: padding.PKCS1v15()
))
test_rsa_pkcs1v15_verify_sha512 = pytest.mark.supported(
only_if=lambda backend: (
backend.hash_supported(hashes.SHA512()) and
backend.rsa_padding_supported(padding.PKCS1v15())
),
skip_message="Does not support SHA512 and PKCS1v1.5."
)(generate_rsa_verification_test(
load_rsa_nist_vectors,
os.path.join("asymmetric", "RSA", "FIPS_186-2"),
[
"SigGen15_186-2.rsp",
"SigGen15_186-3.rsp",
"SigVer15_186-3.rsp",
],
hashes.SHA512(),
lambda params, hash_alg: padding.PKCS1v15()
))
class TestPSS(object):
def test_calculate_max_pss_salt_length(self):
with pytest.raises(TypeError):
padding.calculate_max_pss_salt_length(object(), hashes.SHA256())
def test_invalid_salt_length_not_integer(self):
with pytest.raises(TypeError):
padding.PSS(
mgf=padding.MGF1(
hashes.SHA1()
),
salt_length=b"not_a_length"
)
def test_invalid_salt_length_negative_integer(self):
with pytest.raises(ValueError):
padding.PSS(
mgf=padding.MGF1(
hashes.SHA1()
),
salt_length=-1
)
def test_valid_pss_parameters(self):
algorithm = hashes.SHA1()
salt_length = algorithm.digest_size
mgf = padding.MGF1(algorithm)
pss = padding.PSS(mgf=mgf, salt_length=salt_length)
assert pss._mgf == mgf
assert pss._salt_length == salt_length
def test_valid_pss_parameters_maximum(self):
algorithm = hashes.SHA1()
mgf = padding.MGF1(algorithm)
pss = padding.PSS(mgf=mgf, salt_length=padding.PSS.MAX_LENGTH)
assert pss._mgf == mgf
assert pss._salt_length == padding.PSS.MAX_LENGTH
class TestMGF1(object):
def test_invalid_hash_algorithm(self):
with pytest.raises(TypeError):
padding.MGF1(b"not_a_hash")
def test_valid_mgf1_parameters(self):
algorithm = hashes.SHA1()
mgf = padding.MGF1(algorithm)
assert mgf._algorithm == algorithm
class TestOAEP(object):
def test_invalid_algorithm(self):
mgf = padding.MGF1(hashes.SHA1())
with pytest.raises(TypeError):
padding.OAEP(
mgf=mgf,
algorithm=b"",
label=None
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSADecryption(object):
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
@pytest.mark.parametrize(
"vector",
_flatten_pkcs1_examples(load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs1v15crypt-vectors.txt"),
load_pkcs1_vectors
))
)
def test_decrypt_pkcs1v15_vectors(self, vector, backend):
private, public, example = vector
skey = rsa.RSAPrivateNumbers(
p=private["p"],
q=private["q"],
d=private["private_exponent"],
dmp1=private["dmp1"],
dmq1=private["dmq1"],
iqmp=private["iqmp"],
public_numbers=rsa.RSAPublicNumbers(
e=private["public_exponent"],
n=private["modulus"]
)
).private_key(backend)
ciphertext = binascii.unhexlify(example["encryption"])
assert len(ciphertext) == math.ceil(skey.key_size / 8.0)
message = skey.decrypt(ciphertext, padding.PKCS1v15())
assert message == binascii.unhexlify(example["message"])
def test_unsupported_padding(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_PADDING):
private_key.decrypt(b"0" * 64, DummyAsymmetricPadding())
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_decrypt_invalid_decrypt(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with pytest.raises(ValueError):
private_key.decrypt(
b"\x00" * 64,
padding.PKCS1v15()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_decrypt_ciphertext_too_large(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with pytest.raises(ValueError):
private_key.decrypt(
b"\x00" * 65,
padding.PKCS1v15()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
def test_decrypt_ciphertext_too_small(self, backend):
private_key = RSA_KEY_512.private_key(backend)
ct = binascii.unhexlify(
b"50b4c14136bd198c2f3c3ed243fce036e168d56517984a263cd66492b80804f1"
b"69d210f2b9bdfb48b12f9ea05009c77da257cc600ccefe3a6283789d8ea0"
)
with pytest.raises(ValueError):
private_key.decrypt(
ct,
padding.PKCS1v15()
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
),
skip_message="Does not support OAEP."
)
@pytest.mark.parametrize(
"vector",
_flatten_pkcs1_examples(load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs-1v2-1d2-vec", "oaep-vect.txt"),
load_pkcs1_vectors
))
)
def test_decrypt_oaep_vectors(self, vector, backend):
private, public, example = vector
skey = rsa.RSAPrivateNumbers(
p=private["p"],
q=private["q"],
d=private["private_exponent"],
dmp1=private["dmp1"],
dmq1=private["dmq1"],
iqmp=private["iqmp"],
public_numbers=rsa.RSAPublicNumbers(
e=private["public_exponent"],
n=private["modulus"]
)
).private_key(backend)
message = skey.decrypt(
binascii.unhexlify(example["encryption"]),
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
)
assert message == binascii.unhexlify(example["message"])
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA224()),
algorithm=hashes.SHA224(),
label=None
)
),
skip_message="Does not support OAEP using SHA224 MGF1 and SHA224 hash."
)
@pytest.mark.parametrize(
"vector",
_build_oaep_sha2_vectors()
)
def test_decrypt_oaep_sha2_vectors(self, vector, backend):
private, public, example, mgf1_alg, hash_alg = vector
skey = rsa.RSAPrivateNumbers(
p=private["p"],
q=private["q"],
d=private["private_exponent"],
dmp1=private["dmp1"],
dmq1=private["dmq1"],
iqmp=private["iqmp"],
public_numbers=rsa.RSAPublicNumbers(
e=private["public_exponent"],
n=private["modulus"]
)
).private_key(backend)
message = skey.decrypt(
binascii.unhexlify(example["encryption"]),
padding.OAEP(
mgf=padding.MGF1(algorithm=mgf1_alg),
algorithm=hash_alg,
label=None
)
)
assert message == binascii.unhexlify(example["message"])
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
),
skip_message="Does not support OAEP."
)
def test_invalid_oaep_decryption(self, backend):
# More recent versions of OpenSSL may raise RSA_R_OAEP_DECODING_ERROR
# This test triggers it and confirms that we properly handle it. Other
# backends should also return the proper ValueError.
private_key = RSA_KEY_512.private_key(backend)
ciphertext = private_key.public_key().encrypt(
b'secure data',
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
)
private_key_alt = RSA_KEY_512_ALT.private_key(backend)
with pytest.raises(ValueError):
private_key_alt.decrypt(
ciphertext,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
)
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
),
skip_message="Does not support OAEP."
)
def test_invalid_oaep_decryption_data_to_large_for_modulus(self, backend):
key = RSA_KEY_2048_ALT.private_key(backend)
ciphertext = (
b'\xb1ph\xc0\x0b\x1a|\xe6\xda\xea\xb5\xd7%\x94\x07\xf96\xfb\x96'
b'\x11\x9b\xdc4\xea.-\x91\x80\x13S\x94\x04m\xe9\xc5/F\x1b\x9b:\\'
b'\x1d\x04\x16ML\xae\xb32J\x01yuA\xbb\x83\x1c\x8f\xf6\xa5\xdbp\xcd'
b'\nx\xc7\xf6\x15\xb2/\xdcH\xae\xe7\x13\x13by\r4t\x99\x0fc\x1f\xc1'
b'\x1c\xb1\xdd\xc5\x08\xd1\xee\xa1XQ\xb8H@L5v\xc3\xaf\xf2\r\x97'
b'\xed\xaa\xe7\xf1\xd4xai\xd3\x83\xd9\xaa9\xbfx\xe1\x87F \x01\xff'
b'L\xccv}ae\xb3\xfa\xf2B\xb8\xf9\x04H\x94\x85\xcb\x86\xbb\\ghx!W31'
b'\xc7;t\na_E\xc2\x16\xb0;\xa1\x18\t\x1b\xe1\xdb\x80>)\x15\xc6\x12'
b'\xcb\xeeg`\x8b\x9b\x1b\x05y4\xb0\x84M6\xcd\xa1\x827o\xfd\x96\xba'
b'Z#\x8d\xae\x01\xc9\xf2\xb6\xde\x89{8&eQ\x1e8\x03\x01#?\xb66\\'
b'\xad.\xe9\xfa!\x95 c{\xcaz\xe0*\tP\r\x91\x9a)B\xb5\xadN\xf4$\x83'
b'\t\xb5u\xab\x19\x99'
)
with pytest.raises(ValueError):
key.decrypt(
ciphertext,
padding.OAEP(
algorithm=hashes.SHA1(),
mgf=padding.MGF1(hashes.SHA1()),
label=None
)
)
def test_unsupported_oaep_mgf(self, backend):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_MGF):
private_key.decrypt(
b"0" * 64,
padding.OAEP(
mgf=DummyMGF(),
algorithm=hashes.SHA1(),
label=None
)
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSAEncryption(object):
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
),
skip_message="Does not support OAEP."
)
@pytest.mark.parametrize(
("key_data", "pad"),
itertools.product(
(RSA_KEY_1024, RSA_KEY_1025, RSA_KEY_1026, RSA_KEY_1027,
RSA_KEY_1028, RSA_KEY_1029, RSA_KEY_1030, RSA_KEY_1031,
RSA_KEY_1536, RSA_KEY_2048),
[
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
]
)
)
def test_rsa_encrypt_oaep(self, key_data, pad, backend):
private_key = key_data.private_key(backend)
pt = b"encrypt me!"
public_key = private_key.public_key()
ct = public_key.encrypt(pt, pad)
assert ct != pt
assert len(ct) == math.ceil(public_key.key_size / 8.0)
recovered_pt = private_key.decrypt(ct, pad)
assert recovered_pt == pt
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA512(),
label=None
)
),
skip_message="Does not support OAEP using SHA256 MGF1 and SHA512 hash."
)
@pytest.mark.parametrize(
("mgf1hash", "oaephash"),
itertools.product([
hashes.SHA1(),
hashes.SHA224(),
hashes.SHA256(),
hashes.SHA384(),
hashes.SHA512(),
], [
hashes.SHA1(),
hashes.SHA224(),
hashes.SHA256(),
hashes.SHA384(),
hashes.SHA512(),
])
)
def test_rsa_encrypt_oaep_sha2(self, mgf1hash, oaephash, backend):
pad = padding.OAEP(
mgf=padding.MGF1(algorithm=mgf1hash),
algorithm=oaephash,
label=None
)
private_key = RSA_KEY_2048.private_key(backend)
pt = b"encrypt me using sha2 hashes!"
public_key = private_key.public_key()
ct = public_key.encrypt(pt, pad)
assert ct != pt
assert len(ct) == math.ceil(public_key.key_size / 8.0)
recovered_pt = private_key.decrypt(ct, pad)
assert recovered_pt == pt
@pytest.mark.supported(
only_if=lambda backend: backend.rsa_padding_supported(
padding.PKCS1v15()
),
skip_message="Does not support PKCS1v1.5."
)
@pytest.mark.parametrize(
("key_data", "pad"),
itertools.product(
(RSA_KEY_1024, RSA_KEY_1025, RSA_KEY_1026, RSA_KEY_1027,
RSA_KEY_1028, RSA_KEY_1029, RSA_KEY_1030, RSA_KEY_1031,
RSA_KEY_1536, RSA_KEY_2048),
[padding.PKCS1v15()]
)
)
def test_rsa_encrypt_pkcs1v15(self, key_data, pad, backend):
private_key = key_data.private_key(backend)
pt = b"encrypt me!"
public_key = private_key.public_key()
ct = public_key.encrypt(pt, pad)
assert ct != pt
assert len(ct) == math.ceil(public_key.key_size / 8.0)
recovered_pt = private_key.decrypt(ct, pad)
assert recovered_pt == pt
@pytest.mark.parametrize(
("key_data", "pad"),
itertools.product(
(RSA_KEY_1024, RSA_KEY_1025, RSA_KEY_1026, RSA_KEY_1027,
RSA_KEY_1028, RSA_KEY_1029, RSA_KEY_1030, RSA_KEY_1031,
RSA_KEY_1536, RSA_KEY_2048),
(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
),
padding.PKCS1v15()
)
)
)
def test_rsa_encrypt_key_too_small(self, key_data, pad, backend):
private_key = key_data.private_key(backend)
public_key = private_key.public_key()
# Slightly smaller than the key size but not enough for padding.
with pytest.raises(ValueError):
public_key.encrypt(
b"\x00" * (private_key.key_size // 8 - 1),
pad
)
# Larger than the key size.
with pytest.raises(ValueError):
public_key.encrypt(
b"\x00" * (private_key.key_size // 8 + 5),
pad
)
def test_unsupported_padding(self, backend):
private_key = RSA_KEY_512.private_key(backend)
public_key = private_key.public_key()
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_PADDING):
public_key.encrypt(b"somedata", DummyAsymmetricPadding())
with pytest.raises(TypeError):
public_key.encrypt(b"somedata", padding=object())
def test_unsupported_oaep_mgf(self, backend):
private_key = RSA_KEY_512.private_key(backend)
public_key = private_key.public_key()
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_MGF):
public_key.encrypt(
b"ciphertext",
padding.OAEP(
mgf=DummyMGF(),
algorithm=hashes.SHA1(),
label=None
)
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSANumbers(object):
def test_rsa_public_numbers(self):
public_numbers = rsa.RSAPublicNumbers(e=1, n=15)
assert public_numbers.e == 1
assert public_numbers.n == 15
def test_rsa_private_numbers(self):
public_numbers = rsa.RSAPublicNumbers(e=1, n=15)
private_numbers = rsa.RSAPrivateNumbers(
p=3,
q=5,
d=1,
dmp1=1,
dmq1=1,
iqmp=2,
public_numbers=public_numbers
)
assert private_numbers.p == 3
assert private_numbers.q == 5
assert private_numbers.d == 1
assert private_numbers.dmp1 == 1
assert private_numbers.dmq1 == 1
assert private_numbers.iqmp == 2
assert private_numbers.public_numbers == public_numbers
def test_rsa_private_numbers_create_key(self, backend):
private_key = RSA_KEY_1024.private_key(backend)
assert private_key
def test_rsa_public_numbers_create_key(self, backend):
public_key = RSA_KEY_1024.public_numbers.public_key(backend)
assert public_key
def test_public_numbers_invalid_types(self):
with pytest.raises(TypeError):
rsa.RSAPublicNumbers(e=None, n=15)
with pytest.raises(TypeError):
rsa.RSAPublicNumbers(e=1, n=None)
def test_private_numbers_invalid_types(self):
public_numbers = rsa.RSAPublicNumbers(e=1, n=15)
with pytest.raises(TypeError):
rsa.RSAPrivateNumbers(
p=None,
q=5,
d=1,
dmp1=1,
dmq1=1,
iqmp=2,
public_numbers=public_numbers
)
with pytest.raises(TypeError):
rsa.RSAPrivateNumbers(
p=3,
q=None,
d=1,
dmp1=1,
dmq1=1,
iqmp=2,
public_numbers=public_numbers
)
with pytest.raises(TypeError):
rsa.RSAPrivateNumbers(
p=3,
q=5,
d=None,
dmp1=1,
dmq1=1,
iqmp=2,
public_numbers=public_numbers
)
with pytest.raises(TypeError):
rsa.RSAPrivateNumbers(
p=3,
q=5,
d=1,
dmp1=None,
dmq1=1,
iqmp=2,
public_numbers=public_numbers
)
with pytest.raises(TypeError):
rsa.RSAPrivateNumbers(
p=3,
q=5,
d=1,
dmp1=1,
dmq1=None,
iqmp=2,
public_numbers=public_numbers
)
with pytest.raises(TypeError):
rsa.RSAPrivateNumbers(
p=3,
q=5,
d=1,
dmp1=1,
dmq1=1,
iqmp=None,
public_numbers=public_numbers
)
with pytest.raises(TypeError):
rsa.RSAPrivateNumbers(
p=3,
q=5,
d=1,
dmp1=1,
dmq1=1,
iqmp=2,
public_numbers=None
)
def test_invalid_public_numbers_argument_values(self, backend):
# Start with public_exponent=7, modulus=15. Then change one value at a
# time to test the bounds.
# Test a modulus < 3.
with pytest.raises(ValueError):
rsa.RSAPublicNumbers(e=7, n=2).public_key(backend)
# Test a public_exponent < 3
with pytest.raises(ValueError):
rsa.RSAPublicNumbers(e=1, n=15).public_key(backend)
# Test a public_exponent > modulus
with pytest.raises(ValueError):
rsa.RSAPublicNumbers(e=17, n=15).public_key(backend)
# Test a public_exponent that is not odd.
with pytest.raises(ValueError):
rsa.RSAPublicNumbers(e=14, n=15).public_key(backend)
def test_invalid_private_numbers_argument_values(self, backend):
# Start with p=3, q=11, private_exponent=3, public_exponent=7,
# modulus=33, dmp1=1, dmq1=3, iqmp=2. Then change one value at
# a time to test the bounds.
# Test a modulus < 3.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=2
)
).private_key(backend)
# Test a modulus != p * q.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=35
)
).private_key(backend)
# Test a p > modulus.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=37,
q=11,
d=3,
dmp1=1,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
# Test a q > modulus.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=37,
d=3,
dmp1=1,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
# Test a dmp1 > modulus.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=35,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
# Test a dmq1 > modulus.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=35,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
# Test an iqmp > modulus.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=3,
iqmp=35,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
# Test a private_exponent > modulus
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=37,
dmp1=1,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
# Test a public_exponent < 3
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=1,
n=33
)
).private_key(backend)
# Test a public_exponent > modulus
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=3,
iqmp=35,
public_numbers=rsa.RSAPublicNumbers(
e=65537,
n=33
)
).private_key(backend)
# Test a public_exponent that is not odd.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=6,
n=33
)
).private_key(backend)
# Test a dmp1 that is not odd.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=2,
dmq1=3,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
# Test a dmq1 that is not odd.
with pytest.raises(ValueError):
rsa.RSAPrivateNumbers(
p=3,
q=11,
d=3,
dmp1=1,
dmq1=4,
iqmp=2,
public_numbers=rsa.RSAPublicNumbers(
e=7,
n=33
)
).private_key(backend)
def test_public_number_repr(self):
num = RSAPublicNumbers(1, 1)
assert repr(num) == "<RSAPublicNumbers(e=1, n=1)>"
class TestRSANumbersEquality(object):
def test_public_numbers_eq(self):
num = RSAPublicNumbers(1, 2)
num2 = RSAPublicNumbers(1, 2)
assert num == num2
def test_public_numbers_ne(self):
num = RSAPublicNumbers(1, 2)
assert num != RSAPublicNumbers(2, 2)
assert num != RSAPublicNumbers(1, 3)
assert num != object()
def test_private_numbers_eq(self):
pub = RSAPublicNumbers(1, 2)
num = RSAPrivateNumbers(1, 2, 3, 4, 5, 6, pub)
pub2 = RSAPublicNumbers(1, 2)
num2 = RSAPrivateNumbers(1, 2, 3, 4, 5, 6, pub2)
assert num == num2
def test_private_numbers_ne(self):
pub = RSAPublicNumbers(1, 2)
num = RSAPrivateNumbers(1, 2, 3, 4, 5, 6, pub)
assert num != RSAPrivateNumbers(
1, 2, 3, 4, 5, 7, RSAPublicNumbers(1, 2)
)
assert num != RSAPrivateNumbers(
1, 2, 3, 4, 4, 6, RSAPublicNumbers(1, 2)
)
assert num != RSAPrivateNumbers(
1, 2, 3, 5, 5, 6, RSAPublicNumbers(1, 2)
)
assert num != RSAPrivateNumbers(
1, 2, 4, 4, 5, 6, RSAPublicNumbers(1, 2)
)
assert num != RSAPrivateNumbers(
1, 3, 3, 4, 5, 6, RSAPublicNumbers(1, 2)
)
assert num != RSAPrivateNumbers(
2, 2, 3, 4, 5, 6, RSAPublicNumbers(1, 2)
)
assert num != RSAPrivateNumbers(
1, 2, 3, 4, 5, 6, RSAPublicNumbers(2, 2)
)
assert num != RSAPrivateNumbers(
1, 2, 3, 4, 5, 6, RSAPublicNumbers(1, 3)
)
assert num != object()
def test_public_numbers_hash(self):
pub1 = RSAPublicNumbers(3, 17)
pub2 = RSAPublicNumbers(3, 17)
pub3 = RSAPublicNumbers(7, 21)
assert hash(pub1) == hash(pub2)
assert hash(pub1) != hash(pub3)
def test_private_numbers_hash(self):
priv1 = RSAPrivateNumbers(1, 2, 3, 4, 5, 6, RSAPublicNumbers(1, 2))
priv2 = RSAPrivateNumbers(1, 2, 3, 4, 5, 6, RSAPublicNumbers(1, 2))
priv3 = RSAPrivateNumbers(1, 2, 3, 4, 5, 6, RSAPublicNumbers(1, 3))
assert hash(priv1) == hash(priv2)
assert hash(priv1) != hash(priv3)
class TestRSAPrimeFactorRecovery(object):
@pytest.mark.parametrize(
"vector",
_flatten_pkcs1_examples(load_vectors_from_file(
os.path.join(
"asymmetric", "RSA", "pkcs1v15crypt-vectors.txt"),
load_pkcs1_vectors
))
)
def test_recover_prime_factors(self, vector):
private, public, example = vector
p, q = rsa.rsa_recover_prime_factors(
private["modulus"],
private["public_exponent"],
private["private_exponent"]
)
# Unfortunately there is no convention on which prime should be p
# and which one q. The function we use always makes p > q, but the
# NIST vectors are not so consistent. Accordingly, we verify we've
# recovered the proper (p, q) by sorting them and asserting on that.
assert sorted([p, q]) == sorted([private["p"], private["q"]])
assert p > q
def test_invalid_recover_prime_factors(self):
with pytest.raises(ValueError):
rsa.rsa_recover_prime_factors(34, 3, 7)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=PEMSerializationBackend)
class TestRSAPrivateKeySerialization(object):
@pytest.mark.parametrize(
("fmt", "password"),
itertools.product(
[
serialization.PrivateFormat.TraditionalOpenSSL,
serialization.PrivateFormat.PKCS8
],
[
b"s",
b"longerpassword",
b"!*$&(@#$*&($T@%_somesymbols",
b"\x01" * 1000,
]
)
)
def test_private_bytes_encrypted_pem(self, backend, fmt, password):
key = RSA_KEY_2048.private_key(backend)
serialized = key.private_bytes(
serialization.Encoding.PEM,
fmt,
serialization.BestAvailableEncryption(password)
)
loaded_key = serialization.load_pem_private_key(
serialized, password, backend
)
loaded_priv_num = loaded_key.private_numbers()
priv_num = key.private_numbers()
assert loaded_priv_num == priv_num
@pytest.mark.parametrize(
("fmt", "password"),
[
[serialization.PrivateFormat.PKCS8, b"s"],
[serialization.PrivateFormat.PKCS8, b"longerpassword"],
[serialization.PrivateFormat.PKCS8, b"!*$&(@#$*&($T@%_somesymbol"],
[serialization.PrivateFormat.PKCS8, b"\x01" * 1000]
]
)
def test_private_bytes_encrypted_der(self, backend, fmt, password):
key = RSA_KEY_2048.private_key(backend)
serialized = key.private_bytes(
serialization.Encoding.DER,
fmt,
serialization.BestAvailableEncryption(password)
)
loaded_key = serialization.load_der_private_key(
serialized, password, backend
)
loaded_priv_num = loaded_key.private_numbers()
priv_num = key.private_numbers()
assert loaded_priv_num == priv_num
@pytest.mark.parametrize(
("encoding", "fmt", "loader_func"),
[
[
serialization.Encoding.PEM,
serialization.PrivateFormat.TraditionalOpenSSL,
serialization.load_pem_private_key
],
[
serialization.Encoding.DER,
serialization.PrivateFormat.TraditionalOpenSSL,
serialization.load_der_private_key
],
[
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.load_pem_private_key
],
[
serialization.Encoding.DER,
serialization.PrivateFormat.PKCS8,
serialization.load_der_private_key
],
]
)
def test_private_bytes_unencrypted(self, backend, encoding, fmt,
loader_func):
key = RSA_KEY_2048.private_key(backend)
serialized = key.private_bytes(
encoding, fmt, serialization.NoEncryption()
)
loaded_key = loader_func(serialized, None, backend)
loaded_priv_num = loaded_key.private_numbers()
priv_num = key.private_numbers()
assert loaded_priv_num == priv_num
@pytest.mark.parametrize(
("key_path", "encoding", "loader_func"),
[
[
os.path.join(
"asymmetric",
"Traditional_OpenSSL_Serialization",
"testrsa.pem"
),
serialization.Encoding.PEM,
serialization.load_pem_private_key
],
[
os.path.join("asymmetric", "DER_Serialization", "testrsa.der"),
serialization.Encoding.DER,
serialization.load_der_private_key
],
]
)
def test_private_bytes_traditional_openssl_unencrypted(
self, backend, key_path, encoding, loader_func
):
key_bytes = load_vectors_from_file(
key_path, lambda pemfile: pemfile.read(), mode="rb"
)
key = loader_func(key_bytes, None, backend)
serialized = key.private_bytes(
encoding,
serialization.PrivateFormat.TraditionalOpenSSL,
serialization.NoEncryption()
)
assert serialized == key_bytes
def test_private_bytes_traditional_der_encrypted_invalid(self, backend):
key = RSA_KEY_2048.private_key(backend)
with pytest.raises(ValueError):
key.private_bytes(
serialization.Encoding.DER,
serialization.PrivateFormat.TraditionalOpenSSL,
serialization.BestAvailableEncryption(b"password")
)
def test_private_bytes_invalid_encoding(self, backend):
key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
key.private_bytes(
"notencoding",
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption()
)
def test_private_bytes_invalid_format(self, backend):
key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
key.private_bytes(
serialization.Encoding.PEM,
"invalidformat",
serialization.NoEncryption()
)
def test_private_bytes_invalid_encryption_algorithm(self, backend):
key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.TraditionalOpenSSL,
"notanencalg"
)
def test_private_bytes_unsupported_encryption_type(self, backend):
key = RSA_KEY_2048.private_key(backend)
with pytest.raises(ValueError):
key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.TraditionalOpenSSL,
DummyKeySerializationEncryption()
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=PEMSerializationBackend)
class TestRSAPEMPublicKeySerialization(object):
@pytest.mark.parametrize(
("key_path", "loader_func", "encoding", "format"),
[
(
os.path.join("asymmetric", "public", "PKCS1", "rsa.pub.pem"),
serialization.load_pem_public_key,
serialization.Encoding.PEM,
serialization.PublicFormat.PKCS1,
), (
os.path.join("asymmetric", "public", "PKCS1", "rsa.pub.der"),
serialization.load_der_public_key,
serialization.Encoding.DER,
serialization.PublicFormat.PKCS1,
), (
os.path.join("asymmetric", "PKCS8", "unenc-rsa-pkcs8.pub.pem"),
serialization.load_pem_public_key,
serialization.Encoding.PEM,
serialization.PublicFormat.SubjectPublicKeyInfo,
), (
os.path.join(
"asymmetric",
"DER_Serialization",
"unenc-rsa-pkcs8.pub.der"
),
serialization.load_der_public_key,
serialization.Encoding.DER,
serialization.PublicFormat.SubjectPublicKeyInfo,
)
]
)
def test_public_bytes_match(self, key_path, loader_func, encoding, format,
backend):
key_bytes = load_vectors_from_file(
key_path, lambda pemfile: pemfile.read(), mode="rb"
)
key = loader_func(key_bytes, backend)
serialized = key.public_bytes(encoding, format)
assert serialized == key_bytes
def test_public_bytes_openssh(self, backend):
key_bytes = load_vectors_from_file(
os.path.join("asymmetric", "public", "PKCS1", "rsa.pub.pem"),
lambda pemfile: pemfile.read(), mode="rb"
)
key = serialization.load_pem_public_key(key_bytes, backend)
ssh_bytes = key.public_bytes(
serialization.Encoding.OpenSSH, serialization.PublicFormat.OpenSSH
)
assert ssh_bytes == (
b"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC7JHoJfg6yNzLMOWet8Z49a4KD"
b"0dCspMAYvo2YAMB7/wdEycocujbhJ2n/seONi+5XqTqqFkM5VBl8rmkkFPZk/7x0"
b"xmdsTPECSWnHK+HhoaNDFPR3j8jQhVo1laxiqcEhAHegi5cwtFosuJAvSKAFKEvy"
b"D43si00DQnXWrYHAEQ=="
)
with pytest.raises(ValueError):
key.public_bytes(
serialization.Encoding.PEM, serialization.PublicFormat.OpenSSH
)
with pytest.raises(ValueError):
key.public_bytes(
serialization.Encoding.DER, serialization.PublicFormat.OpenSSH
)
with pytest.raises(ValueError):
key.public_bytes(
serialization.Encoding.OpenSSH,
serialization.PublicFormat.PKCS1,
)
with pytest.raises(ValueError):
key.public_bytes(
serialization.Encoding.OpenSSH,
serialization.PublicFormat.SubjectPublicKeyInfo,
)
def test_public_bytes_invalid_encoding(self, backend):
key = RSA_KEY_2048.private_key(backend).public_key()
with pytest.raises(TypeError):
key.public_bytes("notencoding", serialization.PublicFormat.PKCS1)
def test_public_bytes_invalid_format(self, backend):
key = RSA_KEY_2048.private_key(backend).public_key()
with pytest.raises(TypeError):
key.public_bytes(serialization.Encoding.PEM, "invalidformat")
|
{
"content_hash": "4e521aa24e8b8c2c80d56b1039bfd651",
"timestamp": "",
"source": "github",
"line_count": 2314,
"max_line_length": 79,
"avg_line_length": 35.620138288677616,
"alnum_prop": 0.5395329087048832,
"repo_name": "hipnusleo/laserjet",
"id": "ee9353209b99d78c8ea7b9bdd6807e6847e36c76",
"size": "82608",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resource/pypi/cryptography-1.7.1/tests/hazmat/primitives/test_rsa.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3096"
},
{
"name": "Batchfile",
"bytes": "13184"
},
{
"name": "C",
"bytes": "672858"
},
{
"name": "C++",
"bytes": "9678"
},
{
"name": "Go",
"bytes": "6671"
},
{
"name": "HTML",
"bytes": "850945"
},
{
"name": "Java",
"bytes": "14456"
},
{
"name": "Makefile",
"bytes": "14373"
},
{
"name": "Python",
"bytes": "5156663"
}
],
"symlink_target": ""
}
|
import json
import os
from shutil import copytree
from subprocess import CalledProcessError, check_call, check_output
import pytest
import pkgpanda.build
import pkgpanda.build.cli
from pkgpanda.util import expect_fs, is_windows
def get_tar_contents(filename):
return set(check_output(["tar", "-tf", filename]).decode().splitlines())
def package(resource_dir, name, tmpdir):
# Build once using command line interface
pkg_dir = tmpdir.join(name)
copytree(resource_dir, str(pkg_dir))
with pkg_dir.as_cwd():
check_call(["mkpanda"])
# Build once using programmatic interface
pkg_dir_2 = str(tmpdir.join("api-build/" + name))
copytree(resource_dir, pkg_dir_2)
package_store = pkgpanda.build.PackageStore(str(tmpdir.join("api-build")), None)
return pkgpanda.build.build_package_variants(package_store, name, True)
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_build(tmpdir):
package("resources/base", "base", tmpdir)
# TODO(cmaloney): Check the package exists with the right contents.
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_build_bad_sha1(tmpdir):
package("resources/base", "base", tmpdir)
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_hash_build_script(tmpdir):
# hashcheck1 is the base package we're comparing against.
pkg_path1 = str(package("resources/buildhash/hashcheck1", "hashcheck", tmpdir.join("hashcheck1")))
# hashcheck2 is identical to hashcheck1 other that a tweak to the build script.
pkg_path2 = str(package("resources/buildhash/hashcheck2", "hashcheck", tmpdir.join("hashcheck2")))
# hashcheck3 is identical to hashcheck1 in every way other than the directory name.
pkg_path3 = str(package("resources/buildhash/hashcheck3", "hashcheck", tmpdir.join("hashcheck3")))
assert os.path.basename(pkg_path1) == os.path.basename(pkg_path3)
assert os.path.basename(pkg_path1) != os.path.basename(pkg_path2)
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_url_extract_tar(tmpdir):
package("resources/url_extract-tar", "url_extract-tar", tmpdir)
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_url_extract_zip(tmpdir):
package("resources/url_extract-zip", "url_extract-zip", tmpdir)
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_single_source_with_extra(tmpdir):
package("resources/single_source_extra", "single_source_extra", tmpdir)
# remove the built package tarball because that has a variable filename
cache_dir = tmpdir.join("cache/packages/single_source_extra/")
packages = [str(x) for x in cache_dir.visit(fil="single_source_extra*.tar.xz")]
assert len(packages) == 1, "should have built exactly one package: {}".format(packages)
os.remove(packages[0])
expect_fs(str(cache_dir), {
"latest": None,
"single_source_extra": ["foo"]})
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_bad_buildinfo(tmpdir):
def tmp_pkg(name, buildinfo):
pkg_dir = tmpdir.join(name)
pkg_dir.ensure(dir=True)
pkg_dir.join('buildinfo.json').write(json.dumps(buildinfo).encode())
pkg_dir.join('build').ensure()
with pytest.raises(pkgpanda.build.BuildError):
package_store = pkgpanda.build.PackageStore(str(tmpdir), None)
pkgpanda.build.build_package_variants(package_store, name, True)
package(str(pkg_dir), name, tmpdir.join('build'))
tmp_pkg('unknown_field', {'user': 'dcos_user', 'docker': 'ubuntu:14.04.4'})
tmp_pkg('disallowed_field', {'name': 'disallowed_field', 'docker': 'ubuntu:14.04.4'})
# TODO(cmaloney): Re-enable once we build a dcos-builder docker as part of this test. Currently the
# default docker is dcos-builder, and that isn't built here so these tests fail.
# def test_no_buildinfo(tmpdir):
# package("resources/no_buildinfo", "no_buildinfo", tmpdir)
def test_restricted_services(tmpdir):
with pytest.raises(CalledProcessError):
package("resources-nonbootstrapable/restricted_services", "restricted_services", tmpdir)
# TODO: DCOS_OSS-3470 - muted Windows tests requiring investigation
@pytest.mark.skipif(is_windows, reason="Fails on windows, cause unknown")
def test_single_source_corrupt(tmpdir):
with pytest.raises(CalledProcessError):
package("resources-nonbootstrapable/single_source_corrupt", "single_source", tmpdir)
# Check the corrupt file got moved to the right place
expect_fs(str(tmpdir.join("cache/packages/single_source/single_source")), ["foo.corrupt"])
@pytest.mark.skipif(is_windows, reason="Fails on windows, don't have necessary windows build scripts for this test")
def test_bootstrap(tmpdir):
pkg_dir = tmpdir.join("bootstrap_test")
copytree("resources/", str(pkg_dir))
with pkg_dir.as_cwd():
treeinfo = {
'variants': {
'variant': 'downstream',
'non_bootstrap_variant': 'downstream',
},
# All packages in resources/ except non_bootstrap*
'bootstrap_package_list': [
'base',
'single_source',
'single_source_extra',
'url_extract-tar',
'url_extract-zip',
'variant',
]
}
pkg_dir.join("treeinfo.json").write(json.dumps(treeinfo), ensure=True)
check_call(["mkpanda", "tree", "--mkbootstrap"])
cache_dir = str(pkg_dir.join("cache/bootstrap")) + "/"
bootstrap_id = open(cache_dir + "bootstrap.latest", 'r').read().strip()
bootstrap_files = get_tar_contents(cache_dir + bootstrap_id + ".bootstrap.tar.xz")
# Seperate files that come from individual packages from those in the root directory
package_files = dict()
merged_files = set()
for path in bootstrap_files:
if not path.startswith("./packages/"):
merged_files.add(path)
continue
# Skip the packages folder itself
if path == './packages/':
continue
# Figure out the package name, file inside the package
path_parts = path.split('/')
package_name = path_parts[2].split('--')[0]
file_path = '/'.join(path_parts[3:])
file_set = package_files.get(package_name, set())
# don't add the package directory / empty path.
if len(file_path) == 0:
continue
file_set.add(file_path)
package_files[package_name] = file_set
# Check that the root has exactly the right set of files.
assert merged_files == {
'./',
'./active.buildinfo.full.json',
'./bootstrap',
'./environment',
'./environment.export',
'./active/',
'./active/base',
'./active/url_extract-tar',
'./active/url_extract-zip',
'./active/variant',
'./active/single_source',
'./active/single_source_extra',
'./bin/',
'./bin/mesos-master',
'./etc/',
'./etc/dcos-service-configuration.json',
'./lib/',
'./lib/',
'./lib/libmesos.so',
'./include/'}
assert package_files == {
'url_extract-zip': {'pkginfo.json', 'buildinfo.full.json'},
'url_extract-tar': {'pkginfo.json', 'buildinfo.full.json'},
'single_source': {'pkginfo.json', 'buildinfo.full.json'},
'single_source_extra': {'pkginfo.json', 'buildinfo.full.json'},
'variant': {'pkginfo.json', 'buildinfo.full.json'},
'base': {
'base',
'bin/',
'dcos.target.wants/',
'dcos.target.wants/dcos-foo.service',
'version',
'buildinfo.full.json',
'bin/mesos-master',
'pkginfo.json',
'lib/',
'lib/libmesos.so'}}
|
{
"content_hash": "ca5526c3a6db3982f80f54735abc9185",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 116,
"avg_line_length": 41.00471698113208,
"alnum_prop": 0.6308524099850454,
"repo_name": "surdy/dcos",
"id": "8f676b4ad43b0399885539d0f03443d7050a064c",
"size": "8693",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "pkgpanda/build/tests/build_integration_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2529"
},
{
"name": "Dockerfile",
"bytes": "10736"
},
{
"name": "Groovy",
"bytes": "711"
},
{
"name": "HTML",
"bytes": "88421"
},
{
"name": "Lua",
"bytes": "193851"
},
{
"name": "Makefile",
"bytes": "179"
},
{
"name": "PowerShell",
"bytes": "20007"
},
{
"name": "Python",
"bytes": "1354432"
},
{
"name": "Shell",
"bytes": "95258"
}
],
"symlink_target": ""
}
|
from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
from ... import check
from ...describe import Dimension, Synapses, Biases
from ...check import has_shape
class ELU(Affine):
def predict(self, input__bi):
output__bo = Affine.predict(self, input__bi)
self.ops.elu(output__bo, inplace=True)
return output__bo
def begin_update(self, input__bi, drop=0.):
output__bo, finish_affine = Affine.begin_update(self, input__bi, drop=drop)
output_copy = self.ops.xp.ascontiguousarray(output__bo, dtype='f')
self.ops.elu(output_copy, inplace=True)
def finish_update(gradient, sgd=None):
gradient = self.ops.xp.ascontiguousarray(gradient, dtype='f')
self.ops.backprop_elu(gradient, output_copy, inplace=True)
return finish_affine(gradient, sgd)
output__bo[:] = output_copy
output__bo, bp_dropout = self.ops.dropout(output__bo, drop, inplace=True)
return output__bo, bp_dropout(finish_update)
|
{
"content_hash": "8b71cc1b6e1cbe1c3b2f5bb948b7b44b",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 83,
"avg_line_length": 40.69230769230769,
"alnum_prop": 0.6597353497164461,
"repo_name": "ryfeus/lambda-packs",
"id": "245d829dbe9bde091d0e71d11421beb5f36433e7",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Spacy/source2.7/thinc/neural/_classes/elu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
}
|
import pymongo
import pandas as pd
from math import log
from time import time
import sys
from scipy.stats import linregress
import pickle
client = pymongo.MongoClient()
db = client['bitmicro']
def get_book_df(symbol, limit, convert_timestamps=False):
'''
Returns a DataFrame of book data
'''
books_db = db[symbol+'_books']
cursor = books_db.find().sort('_id', -1).limit(limit)
books = pd.DataFrame(list(cursor))
books = books.set_index('_id')
if convert_timestamps:
books.index = pd.to_datetime(books.index, unit='s')
def to_df(x):
return pd.DataFrame(x[:10])
return books.applymap(to_df).sort_index()
def get_width_and_mid(books):
'''
Returns width of best market and midpoint for each data point in DataFrame
of book data
'''
best_bid = books.bids.apply(lambda x: x.price[0])
best_ask = books.asks.apply(lambda x: x.price[0])
return best_ask-best_bid, (best_bid + best_ask)/2
def get_future_mid(books, offset, sensitivity):
'''
Returns percent change of future midpoints for each data point in DataFrame
of book data
'''
def future(timestamp):
i = books.index.get_loc(timestamp+offset, method='nearest')
if abs(books.index[i] - (timestamp+offset)) < sensitivity:
return books.mid.iloc[i]
return (books.index.map(future)/books.mid).apply(log)
def get_power_imbalance(books, n=10, power=2):
'''
Returns a measure of the imbalance between bids and offers for each data
point in DataFrame of book data
'''
def calc_imbalance(book):
def calc(x):
return x.amount*(.5*book.width/(x.price-book.mid))**power
bid_imbalance = book.bids.iloc[:n].apply(calc, axis=1)
ask_imbalance = book.asks.iloc[:n].apply(calc, axis=1)
return (bid_imbalance-ask_imbalance).sum()
imbalance = books.apply(calc_imbalance, axis=1)
return imbalance
def get_power_adjusted_price(books, n=10, power=2):
'''
Returns the percent change of an average of order prices weighted by inverse
distance-wieghted volume for each data point in DataFrame of book data
'''
def calc_adjusted_price(book):
def calc(x):
return x.amount*(.5*book.width/(x.price-book.mid))**power
bid_inv = 1/book.bids.iloc[:n].apply(calc, axis=1)
ask_inv = 1/book.asks.iloc[:n].apply(calc, axis=1)
bid_price = book.bids.price.iloc[:n]
ask_price = book.asks.price.iloc[:n]
return (bid_price*bid_inv + ask_price*ask_inv).sum() /\
(bid_inv + ask_inv).sum()
adjusted = books.apply(calc_adjusted_price, axis=1)
return (adjusted/books.mid).apply(log).fillna(0)
def get_trade_df(symbol, min_ts, max_ts, convert_timestamps=False):
'''
Returns a DataFrame of trades for symbol in time range
'''
trades_db = db[symbol+'_trades']
query = {'timestamp': {'$gt': min_ts, '$lt': max_ts}}
cursor = trades_db.find(query).sort('_id', pymongo.ASCENDING)
trades = pd.DataFrame(list(cursor))
if not trades.empty:
trades = trades.set_index('_id')
if convert_timestamps:
trades.index = pd.to_datetime(trades.index, unit='s')
return trades
def get_trades_indexes(books, trades, offset, live=False):
'''
Returns indexes of trades in offset range for each data point in DataFrame
of book data
'''
def indexes(ts):
ts = int(ts)
i_0 = trades.timestamp.searchsorted([ts-offset], side='left')[0]
if live:
i_n = -1
else:
i_n = trades.timestamp.searchsorted([ts-1], side='right')[0]
return (i_0, i_n)
return books.index.map(indexes)
def get_trades_count(books, trades):
'''
Returns a count of trades for each data point in DataFrame of book data
'''
def count(x):
return len(trades.iloc[x.indexes[0]:x.indexes[1]])
return books.apply(count, axis=1)
def get_trades_average(books, trades):
'''
Returns the percent change of a volume-weighted average of trades for each
data point in DataFrame of book data
'''
def mean_trades(x):
trades_n = trades.iloc[x.indexes[0]:x.indexes[1]]
if not trades_n.empty:
return (trades_n.price*trades_n.amount).sum()/trades_n.amount.sum()
return (books.mid/books.apply(mean_trades, axis=1)).apply(log).fillna(0)
def get_aggressor(books, trades):
'''
Returns a measure of whether trade aggressors were buyers or sellers for
each data point in DataFrame of book data
'''
def aggressor(x):
trades_n = trades.iloc[x.indexes[0]:x.indexes[1]]
if trades_n.empty:
return 0
buys = trades_n['type'] == 'buy'
buy_vol = trades_n[buys].amount.sum()
sell_vol = trades_n[~buys].amount.sum()
return buy_vol - sell_vol
return books.apply(aggressor, axis=1)
def get_trend(books, trades):
'''
Returns the linear trend in previous trades for each data point in DataFrame
of book data
'''
def trend(x):
trades_n = trades.iloc[x.indexes[0]:x.indexes[1]]
if len(trades_n) < 3:
return 0
else:
return linregress(trades_n.index.values, trades_n.price.values)[0]
return books.apply(trend, axis=1)
def check_times(books):
'''
Returns list of differences between collection time and max book timestamps
for verification purposes
'''
time_diff = []
for i in range(len(books)):
book = books.iloc[i]
ask_ts = max(book.asks.timestamp)
bid_ts = max(book.bids.timestamp)
ts = max(ask_ts, bid_ts)
time_diff.append(book.name-ts)
return time_diff
def make_features(symbol, sample, mid_offsets,
trades_offsets, powers, live=False):
'''
Returns a DataFrame with targets and features
'''
start = time()
stage = time()
# Book related features:
books = get_book_df(symbol, sample)
if not live:
print 'get book data run time:', (time()-stage)/60, 'minutes'
stage = time()
books['width'], books['mid'] = get_width_and_mid(books)
if not live:
print 'width and mid run time:', (time()-stage)/60, 'minutes'
stage = time()
for n in mid_offsets:
books['mid{}'.format(n)] = get_future_mid(books, n, sensitivity=1)
if not live:
books = books.dropna()
print 'offset mids run time:', (time()-stage)/60, 'minutes'
stage = time()
for p in powers:
books['imbalance{}'.format(p)] = get_power_imbalance(books, 10, p)
books['adj_price{}'.format(p)] = get_power_adjusted_price(books, 10, p)
if not live:
print 'power calcs run time:', (time()-stage)/60, 'minutes'
stage = time()
books = books.drop(['bids', 'asks'], axis=1)
# Trade related features:
min_ts = books.index.min() - trades_offsets[-1]
max_ts = books.index.max()
if live:
max_ts += 10
trades = get_trade_df(symbol, min_ts, max_ts)
for n in trades_offsets:
if trades.empty:
books['indexes'] = 0
books['t{}_count'.format(n)] = 0
books['t{}_av'.format(n)] = 0
books['agg{}'.format(n)] = 0
books['trend{}'.format(n)] = 0
else:
books['indexes'] = get_trades_indexes(books, trades, n, live)
books['t{}_count'.format(n)] = get_trades_count(books, trades)
books['t{}_av'.format(n)] = get_trades_average(books, trades)
books['agg{}'.format(n)] = get_aggressor(books, trades)
books['trend{}'.format(n)] = get_trend(books, trades)
if not live:
print 'trade features run time:', (time()-stage)/60, 'minutes'
stage = time()
print 'make_features run time:', (time()-start)/60, 'minutes'
return books.drop('indexes', axis=1)
def make_data(symbol, sample):
'''
Convenience function for calling make_features
'''
data = make_features(symbol,
sample=sample,
mid_offsets=[30],
trades_offsets=[30, 60, 120, 180],
powers=[2, 4, 8])
return data
if __name__ == '__main__' and len(sys.argv) == 4:
data = make_data(sys.argv[1], int(sys.argv[2]))
with open(sys.argv[3], 'w+') as f:
pickle.dump(data, f)
|
{
"content_hash": "b9ac4313136925cb3531a0f053b9e439",
"timestamp": "",
"source": "github",
"line_count": 259,
"max_line_length": 80,
"avg_line_length": 32.52123552123552,
"alnum_prop": 0.6009735248723732,
"repo_name": "cbyn/bitmicro",
"id": "1e53cb2b5237abc737a3d95fe3d56d9bcbbb4a4f",
"size": "8423",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "model/features.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "431"
},
{
"name": "HTML",
"bytes": "5627"
},
{
"name": "JavaScript",
"bytes": "449"
},
{
"name": "Python",
"bytes": "35887"
}
],
"symlink_target": ""
}
|
class DataClassFile():
def functionName1(self, parameter1):
""" One Function (takes a parameter) with three test functions
(with single line comments) where outdated documentation
exists.
************************************************************
####UnitTest Specifications
- Given: Old specifications Given: line
When : Old specifications When: line
Then : Old specifications Then: line
`test_functionName1_test_case_4()`
************************************************************
@param string $parameter1 a String
@return string a String
"""
say = "say"
fu = "fu"
return say + " " + fu
|
{
"content_hash": "f91af2c9a12c462003c84eb7a79e3cce",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 71,
"avg_line_length": 36.904761904761905,
"alnum_prop": 0.45806451612903226,
"repo_name": "anconaesselmann/ClassesAndTests",
"id": "8f9c9273dabded8548ecc535debf41dc069301bf",
"size": "775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "classes_and_testsTest/DocumentationFromUnitTestsTestData/DataSet4_py_ClassFile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "14722"
},
{
"name": "PHP",
"bytes": "28735"
},
{
"name": "Python",
"bytes": "323162"
}
],
"symlink_target": ""
}
|
import webob
from webob import exc
from cinder.api.openstack import extensions
from cinder.api.openstack import wsgi
from cinder import db
from cinder import exception
from cinder import volume
from cinder.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class AdminController(wsgi.Controller):
"""Abstract base class for AdminControllers."""
collection = None # api collection to extend
# FIXME(clayg): this will be hard to keep up-to-date
# Concrete classes can expand or over-ride
valid_status = set([
'creating',
'available',
'deleting',
'error',
'error_deleting',
])
def __init__(self, *args, **kwargs):
super(AdminController, self).__init__(*args, **kwargs)
# singular name of the resource
self.resource_name = self.collection.rstrip('s')
self.volume_api = volume.API()
def _update(self, *args, **kwargs):
raise NotImplementedError()
def _get(self, *args, **kwargs):
raise NotImplementedError()
def _delete(self, *args, **kwargs):
raise NotImplementedError()
def validate_update(self, body):
update = {}
try:
update['status'] = body['status']
except (TypeError, KeyError):
raise exc.HTTPBadRequest("Must specify 'status'")
if update['status'] not in self.valid_status:
raise exc.HTTPBadRequest("Must specify a valid status")
return update
def authorize(self, context, action_name):
# e.g. "snapshot_admin_actions:reset_status"
action = '%s_admin_actions:%s' % (self.resource_name, action_name)
extensions.extension_authorizer('volume', action)(context)
@wsgi.action('os-reset_status')
def _reset_status(self, req, id, body):
"""Reset status on the resource."""
context = req.environ['cinder.context']
self.authorize(context, 'reset_status')
update = self.validate_update(body['os-reset_status'])
msg = _("Updating %(resource)s '%(id)s' with '%(update)r'")
LOG.debug(msg, {'resource': self.resource_name, 'id': id,
'update': update})
try:
self._update(context, id, update)
except exception.NotFound, e:
raise exc.HTTPNotFound(e)
return webob.Response(status_int=202)
@wsgi.action('os-force_delete')
def _force_delete(self, req, id, body):
"""Delete a resource, bypassing the check that it must be available."""
context = req.environ['cinder.context']
self.authorize(context, 'force_delete')
try:
resource = self._get(context, id)
except exception.NotFound:
raise exc.HTTPNotFound()
self._delete(context, resource, force=True)
return webob.Response(status_int=202)
class VolumeAdminController(AdminController):
"""AdminController for Volumes."""
collection = 'volumes'
valid_status = AdminController.valid_status.union(
set(['attaching', 'in-use', 'detaching']))
def _update(self, *args, **kwargs):
db.volume_update(*args, **kwargs)
def _get(self, *args, **kwargs):
return self.volume_api.get(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.volume_api.delete(*args, **kwargs)
def validate_update(self, body):
update = super(VolumeAdminController, self).validate_update(body)
if 'attach_status' in body:
if body['attach_status'] not in ('detached', 'attached'):
raise exc.HTTPBadRequest("Must specify a valid attach_status")
update['attach_status'] = body['attach_status']
return update
class SnapshotAdminController(AdminController):
"""AdminController for Snapshots."""
collection = 'snapshots'
def _update(self, *args, **kwargs):
db.snapshot_update(*args, **kwargs)
def _get(self, *args, **kwargs):
return self.volume_api.get_snapshot(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.volume_api.delete_snapshot(*args, **kwargs)
class Admin_actions(extensions.ExtensionDescriptor):
"""Enable admin actions."""
name = "AdminActions"
alias = "os-admin-actions"
namespace = "http://docs.openstack.org/volume/ext/admin-actions/api/v1.1"
updated = "2012-08-25T00:00:00+00:00"
def get_controller_extensions(self):
exts = []
for class_ in (VolumeAdminController, SnapshotAdminController):
controller = class_()
extension = extensions.ControllerExtension(
self, class_.collection, controller)
exts.append(extension)
return exts
|
{
"content_hash": "6fce1eb5ba724588e79f1a04b71af53a",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 79,
"avg_line_length": 33.16783216783217,
"alnum_prop": 0.6251317731393633,
"repo_name": "rnirmal/cinder",
"id": "8aa6863c0350d8315631b0acc11a6bb9913a1372",
"size": "5343",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/api/openstack/volume/contrib/admin_actions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "1888971"
},
{
"name": "Shell",
"bytes": "7441"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import print_function
from pyface.qt.QtCore import QTimer
from pyface.qt.QtGui import QLabel, QImage, QPixmap, QSizePolicy
from traits.api import Int, Instance
# ============= standard library imports ========================
# ============= local library imports ==========================
from traitsui.basic_editor_factory import BasicEditorFactory
from traitsui.qt4.editor import Editor
class _CameraEditor(Editor):
timer = Instance(QTimer)
swap = False
def init(self, parent):
self.control = self._create_control(parent)
self._setup_loop()
def update_editor(self):
self._setup_loop()
def dispose(self):
if self.timer:
self.timer.stop()
def _setup_loop(self):
# if self.value is not None:
self.timer = QTimer(self.control)
self.timer.timeout.connect(self._update)
print("fps", self.factory.fps)
if self.factory.fps:
self.timer.setInterval(1000 / self.factory.fps)
self.timer.start()
def _update(self):
if self.value:
# w, h = self.control.width(), self.control.height()
# img = self.value.get_image_data(size=(w, h))
img = self.value.get_image_data()
if img is not None:
s = img.shape
if s:
im = QImage(img, s[1], s[0], QImage.Format_RGB32)
# im = QImage(img, s[1], s[0], QImage.Format_RGB16)
if self.swap:
im = QImage.rgbSwapped(im)
pix = QPixmap.fromImage(im)
self.control.setPixmap(pix)
def _create_control(self, parent):
label = QLabel()
width, height = self.item.width, self.item.height
if self.item.width != -1.0:
label.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
label.setFixedWidth(abs(width))
label.setFixedHeight(abs(height))
return label
class CameraEditor(BasicEditorFactory):
klass = _CameraEditor
fps = Int(24)
# ============= EOF =============================================
|
{
"content_hash": "83628611e7f0bcced894f6f90cb9a039",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 71,
"avg_line_length": 32.30882352941177,
"alnum_prop": 0.5539371870732818,
"repo_name": "NMGRL/pychron",
"id": "511ee1a9d8ae2591e660caebf47721b95ea8a826",
"size": "2997",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pychron/core/ui/qt/camera_editor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
}
|
from django import template
from ..models import Story, StoryCategory
register = template.Library()
@register.assignment_tag
def get_story_categories():
return StoryCategory.objects.all()
@register.assignment_tag
def get_stories_featured():
return Story.objects.featured()
@register.assignment_tag
def get_stories_by_category(category_slug, limit=5):
return Story.objects.published().filter(category__slug__exact=category_slug)[:limit]
@register.assignment_tag
def get_stories_latest(limit=5):
return Story.objects.published()[:limit]
|
{
"content_hash": "eecc62fa49445964cdc3c5247b82857b",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 88,
"avg_line_length": 21.615384615384617,
"alnum_prop": 0.7597864768683275,
"repo_name": "lepture/pythondotorg",
"id": "4b0310e5dd10eedc1b25f9b6a5408de2d61f03aa",
"size": "562",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "successstories/templatetags/successstories.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "707898"
},
{
"name": "HTML",
"bytes": "379774"
},
{
"name": "JavaScript",
"bytes": "306174"
},
{
"name": "PostScript",
"bytes": "19072"
},
{
"name": "Python",
"bytes": "943320"
},
{
"name": "Ruby",
"bytes": "218314"
},
{
"name": "Shell",
"bytes": "696"
}
],
"symlink_target": ""
}
|
import newrelic.agent
import os.path
newrelic.agent.initialize(os.path.join(os.path.dirname(__file__), 'config', 'newrelic.ini'))
import pymongo.errors
from flask.ext import cors
from flask.ext.pymongo import PyMongo
from flask import Flask, g, render_template, request, flash, abort
import mail
from config import config
from linky import bookmarklet, forms
app = Flask(__name__)
app.config.from_object(config)
mongo = PyMongo(app)
BASE_URL = config.BASE_URL
@app.route('/')
def index():
return render_template('index.html')
def _gen_uuid():
import uuid
return uuid.uuid4().get_hex()
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = forms.SignupForm()
if form.validate_on_submit():
# app.logger.debug('Form validate')
email = request.form.get('email')
if email:
uuid = _gen_uuid()
try:
g.db.users.insert({
'_id': email,
'signup_key': uuid
})
except pymongo.errors.DuplicateKeyError:
flash('<strong>Hey!</strong> This email '
'has already been used.', 'danger')
return render_template('signup.html', form=form)
else:
mail.send_signup_email(uuid, email)
return render_template('signup-success.html')
else:
pass
return render_template('signup.html', form=form)
@app.route('/user/verify/<uuid>')
def verify(uuid):
result = g.db.users.find_one_or_404({'signup_key': uuid})
acct_key = _gen_uuid()
doc = {
'$set': {
'verified': True,
'acct_key': acct_key,
'signup_key': None,
'send_limit': 10
}
}
try:
update = g.db.users.update(result, doc)
app.logger.debug(update)
app.logger.debug(doc)
except Exception as e:
app.logger.exception(e)
raise
else:
mail.send_verified_email(acct_key, result.get('_id'))
return render_template('signup-verified.html', user=result,
update_status=update, acct_key=acct_key)
@app.route('/user/<uuid>')
def user(uuid):
result = g.db.users.find_one_or_404({'acct_key': uuid})
url = "%s/user/%s/send" % (BASE_URL, uuid)
bmk = bookmarklet.bookmarklet(url)
return render_template('user.html', result=result,
url=url, bookmarklet=bmk)
@app.route('/user/<uuid>/send', methods=['POST'])
@cors.origin('*', methods=['POST', 'OPTIONS'])
def send_link(uuid):
result = g.db.users.find_one_or_404({'acct_key': uuid})
to_addr = result.get('_id')
req_url = request.form.get('url')
req_title = request.form.get('title')
app.logger.debug(request.form)
if not req_url and req_title:
return abort(400)
try:
mail.send_link(req_title, req_url, to_addr)
except Exception as e:
return e.message, 500, {}
else:
return "", 200, {}
@app.before_request
def before_request():
mongo.db.authenticate(config.MONGO_USERNAME, config.MONGO_PASSWORD)
g.db = mongo.db
|
{
"content_hash": "59a8a62974de062884143900c9c00937",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 92,
"avg_line_length": 27.301724137931036,
"alnum_prop": 0.5822544995263657,
"repo_name": "angstwad/linky",
"id": "02f2d3fe765426c0db931e7f005271120b106c31",
"size": "3845",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linky/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "68"
},
{
"name": "JavaScript",
"bytes": "327"
},
{
"name": "Python",
"bytes": "9387"
}
],
"symlink_target": ""
}
|
from genMixins import subjectMixin, updateCallsExecuteModuleMixin
class transformStackClass(list,
subjectMixin,
updateCallsExecuteModuleMixin):
def __init__(self, d3Module):
# call base ctors
subjectMixin.__init__(self)
updateCallsExecuteModuleMixin.__init__(self, d3Module)
def close(self):
subjectMixin.close(self)
updateCallsExecuteModuleMixin.close(self)
|
{
"content_hash": "1f60b3aaec8da1cd30ecdbb27ec1dbb1",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 65,
"avg_line_length": 31.133333333333333,
"alnum_prop": 0.6359743040685225,
"repo_name": "nagyistoce/devide",
"id": "0b62e3a1aa3b7aed503684509b8623eba5d84fe5",
"size": "475",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "modules/insight/typeModules/transformStackClass.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "NSIS",
"bytes": "2786"
},
{
"name": "Python",
"bytes": "3104368"
},
{
"name": "Shell",
"bytes": "7369"
}
],
"symlink_target": ""
}
|
import random
import unittest
from exporters.groupers.base_grouper import BaseGrouper
from exporters.groupers.file_key_grouper import FileKeyGrouper
from exporters.groupers.python_exp_grouper import PythonExpGrouper
from exporters.records.base_record import BaseRecord
country_codes = ['es', 'uk', 'us']
states = ['valencia', 'madrid', 'barcelona']
cities = ['alicante', 'lleida', 'somecity']
def get_batch(batch_size=1000):
batch = []
for i in range(0, batch_size):
item = BaseRecord()
item['key'] = i
item['country_code'] = random.choice(country_codes)
item['state'] = random.choice(states)
item['city'] = random.choice(cities)
item['value'] = random.randint(0, 10000)
batch.append(item)
return batch
class BaseGrouperTest(unittest.TestCase):
def setUp(self):
self.options = {
'exporter_options': {
'LOG_LEVEL': 'DEBUG',
'LOGGER_NAME': 'export-pipeline'
},
'grouper': {
'name': 'exporters.groupers.base_grouper.BaseGrouper',
'options': {
}
}
}
def test_raise_exception(self):
grouper = BaseGrouper(self.options)
with self.assertRaises(NotImplementedError):
grouper.group_batch([])
class FileKeyGrouperTest(unittest.TestCase):
def setUp(self):
self.options_ok = {
'exporter_options': {
'LOG_LEVEL': 'DEBUG',
'LOGGER_NAME': 'export-pipeline'
},
'grouper': {
'name': 'exporters.groupers.file_key_grouper.FileKeyGrouper',
'options': {
'keys': ['country_code', 'state', 'city']
}
}
}
self.options_unknown_key = {
'exporter_options': {
'LOG_LEVEL': 'DEBUG',
'LOGGER_NAME': 'export-pipeline'
},
'grouper': {
'name': 'exporters.groupers.file_key_grouper.FileKeyGrouper',
'options': {
'keys': ['country_code', 'not_a_key', 'city']
}
}
}
def test_group_batch(self):
grouper = FileKeyGrouper(self.options_ok['grouper'])
batch = get_batch()
grouped = grouper.group_batch(batch)
for item in grouped:
country, state, city = item.group_membership
self.assertTrue(country in country_codes)
self.assertTrue(state in states)
self.assertTrue(city in cities)
def test_unknown_keys_batch(self):
grouper = FileKeyGrouper(self.options_unknown_key['grouper'])
batch = get_batch()
grouped = grouper.group_batch(batch)
for item in grouped:
country, state, city = item.group_membership
self.assertTrue(state == 'unknown')
class PythonExpGrouperTest(unittest.TestCase):
def setUp(self):
self.options_exp_in = {
'exporter_options': {
'LOG_LEVEL': 'DEBUG',
'LOGGER_NAME': 'export-pipeline'
},
'grouper': {
'name': 'exporters.groupers.python_exp_grouper.PythonExpGrouper',
'options': {
'python_expressions': ['item[\'country_code\'] in [\'uk\', \'us\']']
}
}
}
self.options_value_modulo = {
'exporter_options': {
'LOG_LEVEL': 'DEBUG',
'LOGGER_NAME': 'export-pipeline'
},
'grouper': {
'name': 'exporters.groupers.python_exp_grouper.PythonExpGrouper',
'options': {
'python_expressions': ['item[\'value\'] % 5']
}
}
}
self.options_multiple = {
'exporter_options': {
'LOG_LEVEL': 'DEBUG',
'LOGGER_NAME': 'export-pipeline'
},
'grouper': {
'name': 'exporters.groupers.python_exp_grouper.PythonExpGrouper',
'options': {
'python_expressions': [
'item[\'country_code\'] in [\'uk\', \'us\']',
'item[\'value\'] % 5'
]
}
}
}
self.options_invalid = {
'exporter_options': {
'LOG_LEVEL': 'DEBUG',
'LOGGER_NAME': 'export-pipeline'
},
'grouper': {
'name': 'exporters.groupers.python_exp_grouper.PythonExpGrouper',
'options': {
'python_expressions': ['item[\'description\'] % 5']
}
}
}
def test_group_batch_is_in(self):
grouper = PythonExpGrouper(self.options_exp_in['grouper'])
batch = get_batch()
grouped = grouper.group_batch(batch)
for item in grouped:
is_in = item.group_membership[0]
self.assertTrue((item['country_code'] in ['uk', 'us']) == is_in)
def test_group_batch_modulo(self):
grouper = PythonExpGrouper(self.options_value_modulo['grouper'])
batch = get_batch()
grouped = grouper.group_batch(batch)
for item in grouped:
modulo = item.group_membership[0]
self.assertTrue(item['value'] % 5 == modulo)
def test_group_batch_multiple(self):
grouper = PythonExpGrouper(self.options_multiple['grouper'])
batch = get_batch()
grouped = grouper.group_batch(batch)
for item in grouped:
is_in, modulo = item.group_membership
self.assertTrue((item['country_code'] in ['uk', 'us']) == is_in)
self.assertTrue(item['value'] % 5 == modulo)
def test_group_batch_invalid(self):
grouper = PythonExpGrouper(self.options_invalid['grouper'])
batch = get_batch()
grouped = grouper.group_batch(batch)
with self.assertRaises(Exception):
next(grouped)
|
{
"content_hash": "f516b992b3922380dcee6423104ba582",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 88,
"avg_line_length": 33.90555555555556,
"alnum_prop": 0.513845649680485,
"repo_name": "scrapinghub/exporters",
"id": "c530c6809a3e8d8fb658c25b75772d8ec96beac1",
"size": "6103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_groupers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1715"
},
{
"name": "Python",
"bytes": "501414"
}
],
"symlink_target": ""
}
|
from django import forms
from django.conf import settings
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from django.contrib import messages
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import User, Group
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext, ugettext_lazy as _
from guardian.forms import UserObjectPermissionsForm
from guardian.forms import GroupObjectPermissionsForm
from guardian.shortcuts import get_perms
from guardian.shortcuts import get_users_with_perms
from guardian.shortcuts import get_groups_with_perms
from guardian.shortcuts import get_perms_for_model
class AdminUserObjectPermissionsForm(UserObjectPermissionsForm):
"""
Extends :form:`UserObjectPermissionsForm`. It only overrides
``get_obj_perms_field_widget`` method so it return
``django.contrib.admin.widgets.FilteredSelectMultiple`` widget.
"""
def get_obj_perms_field_widget(self):
return FilteredSelectMultiple(_("Permissions"), False)
class AdminGroupObjectPermissionsForm(GroupObjectPermissionsForm):
"""
Extends :form:`GroupObjectPermissionsForm`. It only overrides
``get_obj_perms_field_widget`` method so it return
``django.contrib.admin.widgets.FilteredSelectMultiple`` widget.
"""
def get_obj_perms_field_widget(self):
return FilteredSelectMultiple(_("Permissions"), False)
class GuardedModelAdmin(admin.ModelAdmin):
"""
Extends ``django.contrib.admin.ModelAdmin`` class. Provides some extra
views for object permissions management at admin panel. It also changes
default ``change_form_template`` option to
``'admin/guardian/model/change_form.html'`` which is required for proper
url (object permissions related) being shown at the model pages.
**Extra options**
``GuardedModelAdmin.obj_perms_manage_template``
*Default*: ``admin/guardian/model/obj_perms_manage.html``
``GuardedModelAdmin.obj_perms_manage_user_template``
*Default*: ``admin/guardian/model/obj_perms_manage_user.html``
``GuardedModelAdmin.obj_perms_manage_group_template``
*Default*: ``admin/guardian/model/obj_perms_manage_group.html``
``GuardedModelAdmin.user_can_access_owned_objects_only``
*Default*: ``False``
If this would be set to ``True``, ``request.user`` would be used to
filter out objects he or she doesn't own (checking ``user`` field
of used model - field name may be overridden by
``user_owned_objects_field`` option.
.. note::
Please remember that this will **NOT** affect superusers!
Admins would still see all items.
``GuardedModelAdmin.user_owned_objects_field``
*Default*: ``user``
**Usage example**
Just use :admin:`GuardedModelAdmin` instead of
``django.contrib.admin.ModelAdmin``.
.. code-block:: python
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from myapp.models import Author
class AuthorAdmin(GuardedModelAdmin):
pass
admin.site.register(Author, AuthorAdmin)
"""
change_form_template = \
'admin/guardian/model/change_form.html'
obj_perms_manage_template = \
'admin/guardian/model/obj_perms_manage.html'
obj_perms_manage_user_template = \
'admin/guardian/model/obj_perms_manage_user.html'
obj_perms_manage_group_template = \
'admin/guardian/model/obj_perms_manage_group.html'
user_can_access_owned_objects_only = False
user_owned_objects_field = 'user'
def queryset(self, request):
qs = super(GuardedModelAdmin, self).queryset(request)
if self.user_can_access_owned_objects_only and \
not request.user.is_superuser:
filters = {self.user_owned_objects_field: request.user}
qs = qs.filter(**filters)
return qs
def get_urls(self):
"""
Extends standard admin model urls with the following:
- ``.../permissions/``
- ``.../permissions/user-manage/<user_id>/``
- ``.../permissions/group-manage/<group_id>/``
.. note::
``...`` above are standard, instance detail url (i.e.
``/admin/flatpages/1/``)
"""
urls = super(GuardedModelAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
myurls = patterns('',
url(r'^(?P<object_pk>.+)/permissions/$',
view=self.admin_site.admin_view(self.obj_perms_manage_view),
name='%s_%s_permissions' % info),
url(r'^(?P<object_pk>.+)/permissions/user-manage/(?P<user_id>\-?\d+)/$',
view=self.admin_site.admin_view(
self.obj_perms_manage_user_view),
name='%s_%s_permissions_manage_user' % info),
url(r'^(?P<object_pk>.+)/permissions/group-manage/(?P<group_id>\-?\d+)/$',
view=self.admin_site.admin_view(
self.obj_perms_manage_group_view),
name='%s_%s_permissions_manage_group' % info),
)
return myurls + urls
def get_obj_perms_base_context(self, request, obj):
"""
Returns context dictionary with common admin and object permissions
related content.
"""
context = {
'adminform': {'model_admin': self},
'object': obj,
'app_label': self.model._meta.app_label,
'opts': self.model._meta,
'original': hasattr(obj, '__unicode__') and obj.__unicode__() or\
str(obj),
'has_change_permission': self.has_change_permission(request, obj),
'model_perms': get_perms_for_model(obj),
'title': _("Object permissions"),
}
return context
def obj_perms_manage_view(self, request, object_pk):
"""
Main object permissions view. Presents all users and groups with any
object permissions for the current model *instance*. Users or groups
without object permissions for related *instance* would **not** be
shown. In order to add or manage user or group one should use links or
forms presented within the page.
"""
obj = get_object_or_404(self.queryset(request), pk=object_pk)
users_perms = SortedDict(
get_users_with_perms(obj, attach_perms=True,
with_group_users=False))
users_perms.keyOrder.sort(key=lambda user: user.username)
groups_perms = SortedDict(
get_groups_with_perms(obj, attach_perms=True))
groups_perms.keyOrder.sort(key=lambda group: group.name)
if request.method == 'POST' and 'submit_manage_user' in request.POST:
user_form = UserManage(request.POST)
group_form = GroupManage()
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
if user_form.is_valid():
user_id = user_form.cleaned_data['user'].id
url = reverse(
'%s:%s_%s_permissions_manage_user' % info,
args=[obj.pk, user_id]
)
return redirect(url)
elif request.method == 'POST' and 'submit_manage_group' in request.POST:
user_form = UserManage()
group_form = GroupManage(request.POST)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
if group_form.is_valid():
group_id = group_form.cleaned_data['group'].id
url = reverse(
'%s:%s_%s_permissions_manage_group' % info,
args=[obj.pk, group_id]
)
return redirect(url)
else:
user_form = UserManage()
group_form = GroupManage()
context = self.get_obj_perms_base_context(request, obj)
context['users_perms'] = users_perms
context['groups_perms'] = groups_perms
context['user_form'] = user_form
context['group_form'] = group_form
return render_to_response(self.get_obj_perms_manage_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_template(self):
"""
Returns main object permissions admin template. May be overridden if
need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage.html'
return self.obj_perms_manage_template
def obj_perms_manage_user_view(self, request, object_pk, user_id):
"""
Manages selected users' permissions for current object.
"""
user = get_object_or_404(User, id=user_id)
obj = get_object_or_404(self.queryset(request), pk=object_pk)
form_class = self.get_obj_perms_manage_user_form()
form = form_class(user, obj, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
msg = ugettext("Permissions saved.")
messages.success(request, msg)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
url = reverse(
'%s:%s_%s_permissions_manage_user' % info,
args=[obj.pk, user.id]
)
return redirect(url)
context = self.get_obj_perms_base_context(request, obj)
context['user_obj'] = user
context['user_perms'] = get_perms(user, obj)
context['form'] = form
return render_to_response(self.get_obj_perms_manage_user_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_user_template(self):
"""
Returns object permissions for user admin template. May be overridden
if need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage_user.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage_user.html'
return self.obj_perms_manage_user_template
def get_obj_perms_manage_user_form(self):
"""
Returns form class for user object permissions management. By default
:form:`AdminUserObjectPermissionsForm` is returned.
"""
return AdminUserObjectPermissionsForm
def obj_perms_manage_group_view(self, request, object_pk, group_id):
"""
Manages selected groups' permissions for current object.
"""
group = get_object_or_404(Group, id=group_id)
obj = get_object_or_404(self.queryset(request), pk=object_pk)
form_class = self.get_obj_perms_manage_group_form()
form = form_class(group, obj, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
msg = ugettext("Permissions saved.")
messages.success(request, msg)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
url = reverse(
'%s:%s_%s_permissions_manage_group' % info,
args=[obj.pk, group.id]
)
return redirect(url)
context = self.get_obj_perms_base_context(request, obj)
context['group_obj'] = group
context['group_perms'] = get_perms(group, obj)
context['form'] = form
return render_to_response(self.get_obj_perms_manage_group_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_group_template(self):
"""
Returns object permissions for group admin template. May be overridden
if need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage_group.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage_group.html'
return self.obj_perms_manage_group_template
def get_obj_perms_manage_group_form(self):
"""
Returns form class for group object permissions management. By default
:form:`AdminGroupObjectPermissionsForm` is returned.
"""
return AdminGroupObjectPermissionsForm
class UserManage(forms.Form):
user = forms.ModelChoiceField(queryset=User.objects.filter(is_active=True))
class GroupManage(forms.Form):
group = forms.ModelChoiceField(queryset=Group.objects.all())
|
{
"content_hash": "694eddbd2597f400383bd0d8e2c39cb6",
"timestamp": "",
"source": "github",
"line_count": 352,
"max_line_length": 86,
"avg_line_length": 38.58806818181818,
"alnum_prop": 0.6160641978944269,
"repo_name": "boardman/django-guardian",
"id": "d0f54e6e67a5d4f647801f586ac195c48d03db47",
"size": "13583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "guardian/admin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "498"
},
{
"name": "Python",
"bytes": "217126"
},
{
"name": "Shell",
"bytes": "235"
}
],
"symlink_target": ""
}
|
import sklearn.metrics
import sklearn.svm
from autosklearn.pipeline.components.classification.libsvm_svc import LibSVM_SVC
from autosklearn.pipeline.util import _test_classifier_predict_proba, get_dataset
from .test_base import BaseClassificationComponentTest
class LibSVM_SVCComponentTest(BaseClassificationComponentTest):
__test__ = True
res = dict()
res["default_iris"] = 0.96
res["default_iris_iterative"] = -1
res["default_iris_proba"] = 0.36298338197586716
res["default_iris_sparse"] = 0.64
res["default_digits"] = 0.096539162112932606
res["default_digits_iterative"] = -1
res["default_digits_binary"] = 0.90103217972070426
res["default_digits_multilabel"] = -1
res["default_digits_multilabel_proba"] = -1
sk_mod = sklearn.svm.SVC
module = LibSVM_SVC
def test_default_configuration_predict_proba_individual(self):
# Leave this additional test here
for i in range(2):
predictions, targets = _test_classifier_predict_proba(
LibSVM_SVC, sparse=True, dataset="digits", train_size_maximum=500
)
self.assertAlmostEqual(
5.273502056835706, sklearn.metrics.log_loss(targets, predictions)
)
for i in range(2):
predictions, targets = _test_classifier_predict_proba(
LibSVM_SVC, sparse=True, dataset="iris"
)
self.assertAlmostEqual(
0.8408320837510618, sklearn.metrics.log_loss(targets, predictions)
)
# 2 class
for i in range(2):
X_train, Y_train, X_test, Y_test = get_dataset(dataset="iris")
remove_training_data = Y_train == 2
remove_test_data = Y_test == 2
X_train = X_train[~remove_training_data]
Y_train = Y_train[~remove_training_data]
X_test = X_test[~remove_test_data]
Y_test = Y_test[~remove_test_data]
ss = sklearn.preprocessing.StandardScaler()
X_train = ss.fit_transform(X_train)
configuration_space = LibSVM_SVC.get_hyperparameter_search_space()
default = configuration_space.get_default_configuration()
cls = LibSVM_SVC(
random_state=1,
**{
hp_name: default[hp_name]
for hp_name in default
if default[hp_name] is not None
},
)
cls = cls.fit(X_train, Y_train)
prediction = cls.predict_proba(X_test)
self.assertAlmostEqual(
sklearn.metrics.log_loss(Y_test, prediction),
0.6927962762794081,
places=4,
)
|
{
"content_hash": "10d848243dbb5568cb092836e07f29a3",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 82,
"avg_line_length": 36.50666666666667,
"alnum_prop": 0.589481373265157,
"repo_name": "automl/auto-sklearn",
"id": "6fe95f5b62930042543ef85eb8b0f387d5e1a2e8",
"size": "2738",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "test/test_pipeline/components/classification/test_libsvm_svc.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "950"
},
{
"name": "Makefile",
"bytes": "3513"
},
{
"name": "Python",
"bytes": "2008151"
},
{
"name": "Shell",
"bytes": "4744"
}
],
"symlink_target": ""
}
|
from distutils.core import Extension, setup
mikktspace = Extension(
'_mikktspace',
sources=['mikktpy.i', 'mikktpy.cpp', 'mikktspace.c'],
swig_opts=['-modern', '-c++'],
extra_compile_args=['/EHsc'])
setup (name = 'MikkTSpace',
version = '1.0',
description = 'This is a demo package',
ext_modules = [mikktspace])
|
{
"content_hash": "d8834509c35a2a7de38ecf6486825b81",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 55,
"avg_line_length": 27.46153846153846,
"alnum_prop": 0.5994397759103641,
"repo_name": "ambrusc/mikktpy",
"id": "07e4c766f5d9ea578af83c03688c071538b35ad8",
"size": "400",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "57351"
},
{
"name": "C++",
"bytes": "6471"
},
{
"name": "Objective-C",
"bytes": "8209"
},
{
"name": "Python",
"bytes": "2480"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from .. forms import (
BinaryQuestionForm,
LikertQuestionForm,
MultipleChoiceQuestionForm,
OpenEndedQuestionForm,
)
class TestBinaryQuestionForm(TestCase):
def test_unbound(self):
form = BinaryQuestionForm()
expected_fields = [
'available', 'instructions', 'instruments', 'labels', 'order',
'subscale', 'text',
]
self.assertEqual(
sorted(list(form.fields.keys())),
expected_fields
)
def test_bound(self):
form = BinaryQuestionForm({
'available': True,
'instructions': 'To do',
'instruments': '',
'labels': 'family,health',
'order': 1,
'subscale': 0,
'text': 'Test Question',
})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['labels'], ['family', 'health'])
class TestLikertQuestionForm(TestCase):
def test_unbound(self):
form = LikertQuestionForm()
expected_fields = [
'available', 'instructions', 'instruments', 'labels', 'order',
'priority', 'scale', 'subscale', 'text',
]
self.assertEqual(
sorted(list(form.fields.keys())),
expected_fields
)
def test_bound(self):
form = LikertQuestionForm({
'available': True,
'instructions': 'To do',
'instruments': '',
'labels': 'family,health',
'order': 1,
'priority': 0,
'scale': '5_point_agreement',
'subscale': 0,
'text': 'Test Question',
})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['labels'], ['family', 'health'])
class TestMultipleChoiceQuestionForm(TestCase):
def test_unbound(self):
form = MultipleChoiceQuestionForm()
expected_fields = [
'available', 'instructions', 'instruments', 'labels', 'order',
'subscale', 'text',
]
self.assertEqual(
sorted(list(form.fields.keys())),
expected_fields
)
def test_bound(self):
form = MultipleChoiceQuestionForm({
'available': True,
'instructions': 'To do',
'instruments': '',
'labels': 'family,health',
'order': 1,
'subscale': 0,
'text': 'Test Question',
})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['labels'], ['family', 'health'])
class TestOpenEndedQuestionForm(TestCase):
def test_unbound(self):
form = OpenEndedQuestionForm()
expected_fields = [
'available', 'input_type', 'instructions', 'instruments', 'labels',
'order', 'subscale', 'text',
]
self.assertEqual(
sorted(list(form.fields.keys())),
expected_fields
)
def test_bound(self):
form = OpenEndedQuestionForm({
'available': True,
'input_type': 'text',
'instructions': 'To do',
'instruments': '',
'labels': 'family,health',
'order': 1,
'subscale': 0,
'text': 'Test Question',
})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['labels'], ['family', 'health'])
|
{
"content_hash": "e525c167d12a2dc1f6b4de95c897fe2d",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 79,
"avg_line_length": 29.016806722689076,
"alnum_prop": 0.5244714740805096,
"repo_name": "izzyalonso/tndata_backend",
"id": "abde430cb55ac8e0a4239152aa9095f551727fc0",
"size": "3453",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tndata_backend/survey/tests/test_forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "29078"
},
{
"name": "HTML",
"bytes": "680433"
},
{
"name": "JavaScript",
"bytes": "186991"
},
{
"name": "Makefile",
"bytes": "393"
},
{
"name": "Python",
"bytes": "2023392"
},
{
"name": "Shell",
"bytes": "2282"
}
],
"symlink_target": ""
}
|
"""
Exception classes for pyRserve
"""
class PyRserveError(Exception):
pass
class REvalError(PyRserveError):
"""Indicates an error raised by R itself (not by Rserve)"""
pass
class RConnectionRefused(PyRserveError):
pass
class RResponseError(PyRserveError):
pass
class RSerializationError(PyRserveError):
pass
class PyRserveClosed(PyRserveError):
pass
|
{
"content_hash": "099153de6ceb47f981e9a9ef28d31d32",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 63,
"avg_line_length": 13.964285714285714,
"alnum_prop": 0.7289002557544757,
"repo_name": "flying-sheep/pyRserve",
"id": "edc385ed5ae8e6aedd741fae10865d265f35acb3",
"size": "391",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pyRserve/rexceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1163"
},
{
"name": "Python",
"bytes": "117350"
},
{
"name": "R",
"bytes": "521"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
# Create your tests here.
# Unit Tests
# System integration test.
class SystemIntegrationTestCase(TestCase):
"""
Complete system integration test that tests the system from
initialization to final score prediction.
0. Assumes database is set up with a small corpus of 10 documents.
1. Load a submission on this corpus.
2. Check that the submission generates a sample.
3. Check that the submission generates evaluation questions.
4. Check that the submission generates mturk HITs (through a MTurk mock).
5. Request mturk tasks
6. Mock interface responses.
7. Check postprocessing of data and loading into evaluation.
8. Check final score and output.
"""
def setUp(self):
"""
0. Set up with a small corpus of 10 documents.
"""
|
{
"content_hash": "27655f37f5c46cb48f1eea66da88f917",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 77,
"avg_line_length": 27.322580645161292,
"alnum_prop": 0.7001180637544274,
"repo_name": "arunchaganty/kbp-online",
"id": "448f4deef649a780d78ba7222f4c5176f3f60992",
"size": "847",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/web/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6524"
},
{
"name": "HTML",
"bytes": "130435"
},
{
"name": "JavaScript",
"bytes": "309886"
},
{
"name": "PLpgSQL",
"bytes": "44305"
},
{
"name": "Python",
"bytes": "521294"
},
{
"name": "Shell",
"bytes": "5038"
}
],
"symlink_target": ""
}
|
import binascii
try:
import StringIO
except ImportError:
from io import StringIO
class PKCS7Encoder(object):
'''
RFC 2315: PKCS#7 page 21
Some content-encryption algorithms assume the
input length is a multiple of k octets, where k > 1, and
let the application define a method for handling inputs
whose lengths are not a multiple of k octets. For such
algorithms, the method shall be to pad the input at the
trailing end with k - (l mod k) octets all having value k -
(l mod k), where l is the length of the input. In other
words, the input is padded at the trailing end with one of
the following strings:
01 -- if l mod k = k-1
02 02 -- if l mod k = k-2
.
.
.
k k ... k k -- if l mod k = 0
The padding can be removed unambiguously since all input is
padded and no padding string is a suffix of another. This
padding method is well-defined if and only if k < 256;
methods for larger k are an open issue for further study.
'''
def __init__(self, k=16):
self.k = k
# @param text The padded text for which the padding is to be removed.
# @exception ValueError Raised when the input padding is missing or corrupt.
def decode(self, text):
'''
Remove the PKCS#7 padding from a text string
'''
nl = len(text)
try:
val = int(binascii.hexlify(text[-1]), 16)
except TypeError:
val = text[-1]
if val > self.k:
raise ValueError('Input is not padded or padding is corrupt')
l = nl - val
return text[:l]
# @param text The text to encode.
def encode(self, text):
'''
Pad an input string according to PKCS#7
'''
l = len(text)
output = StringIO.StringIO()
val = self.k - (l % self.k)
for _ in xrange(val):
output.write('%02x' % val)
return text + binascii.unhexlify(output.getvalue())
|
{
"content_hash": "0f05fe2593148484b5e5533cc1e5dc16",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 80,
"avg_line_length": 31.876923076923077,
"alnum_prop": 0.5815637065637066,
"repo_name": "wileyyugioh/kissanime_dl",
"id": "ffb6322e12945f1fabd191709462b097c2297711",
"size": "2149",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kissanime_dl/pkcs7.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45943"
}
],
"symlink_target": ""
}
|
"""
Single label image classification solution. Typical usage is:
Run preprocess() or preprocess_async() to preprocess data for training.
Run train() or train_async() to train models.
Run predict(), batch_predict(), batch_predict_async() to perform predictions.
The trained model can also be deployed online with google.datalab.ml.ModelVersions.deploy() call.
"""
from ._api import preprocess, preprocess_async, train, train_async, predict, batch_predict, \
batch_predict_async
__all__ = ['preprocess', 'preprocess_async', 'train', 'train_async', 'predict', 'batch_predict',
'batch_predict_async']
|
{
"content_hash": "446e8469b922084d5050a48a4b8a98a6",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 97,
"avg_line_length": 41.13333333333333,
"alnum_prop": 0.7277147487844409,
"repo_name": "yebrahim/pydatalab",
"id": "e81a13d2aa864ff598c0c65cdd73c79b37535b64",
"size": "1206",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "solutionbox/image_classification/mltoolbox/image/classification/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7596"
},
{
"name": "Python",
"bytes": "2406009"
},
{
"name": "Shell",
"bytes": "4256"
},
{
"name": "TypeScript",
"bytes": "105309"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from .models import Customer
class CustomerAdmin(admin.ModelAdmin):
list_display = ['name', 'is_active']
admin.site.register(Customer, CustomerAdmin)
|
{
"content_hash": "fc3f5deb1da4b494603d252e5c58097b",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 44,
"avg_line_length": 21.22222222222222,
"alnum_prop": 0.7643979057591623,
"repo_name": "moshthepitt/answers",
"id": "f6983e4d9416135e5526436f2c362f9d5b6e39f8",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saas/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21826"
},
{
"name": "HTML",
"bytes": "78587"
},
{
"name": "JavaScript",
"bytes": "8141"
},
{
"name": "Python",
"bytes": "157546"
}
],
"symlink_target": ""
}
|
import sys
from oslo_config import cfg
from oslo_log import log as logging
from neutron.agent import dhcp_agent
from neutron.cmd.sanity import checks
from neutron.common import config
from neutron.db import l3_hamode_db
from neutron.i18n import _LE, _LW
LOG = logging.getLogger(__name__)
def setup_conf():
cfg.CONF.import_group('AGENT', 'neutron.plugins.ml2.drivers.openvswitch.'
'agent.common.config')
cfg.CONF.import_group('OVS', 'neutron.plugins.ml2.drivers.openvswitch.'
'agent.common.config')
cfg.CONF.import_group('VXLAN', 'neutron.plugins.ml2.drivers.linuxbridge.'
'agent.common.config')
cfg.CONF.import_group('ml2', 'neutron.plugins.ml2.config')
cfg.CONF.import_group('ml2_sriov',
'neutron.plugins.ml2.drivers.mech_sriov.mech_driver.'
'mech_driver')
dhcp_agent.register_options()
cfg.CONF.register_opts(l3_hamode_db.L3_HA_OPTS)
class BoolOptCallback(cfg.BoolOpt):
def __init__(self, name, callback, **kwargs):
if 'default' not in kwargs:
kwargs['default'] = False
self.callback = callback
super(BoolOptCallback, self).__init__(name, **kwargs)
def check_ovs_vxlan():
result = checks.ovs_vxlan_supported()
if not result:
LOG.error(_LE('Check for Open vSwitch VXLAN support failed. '
'Please ensure that the version of openvswitch '
'being used has VXLAN support.'))
return result
def check_ovs_geneve():
result = checks.ovs_geneve_supported()
if not result:
LOG.error(_LE('Check for Open vSwitch Geneve support failed. '
'Please ensure that the version of openvswitch '
'and kernel being used has Geneve support.'))
return result
def check_iproute2_vxlan():
result = checks.iproute2_vxlan_supported()
if not result:
LOG.error(_LE('Check for iproute2 VXLAN support failed. Please ensure '
'that the iproute2 has VXLAN support.'))
return result
def check_ovs_patch():
result = checks.patch_supported()
if not result:
LOG.error(_LE('Check for Open vSwitch patch port support failed. '
'Please ensure that the version of openvswitch '
'being used has patch port support or disable features '
'requiring patch ports (gre/vxlan, etc.).'))
return result
def check_read_netns():
required = checks.netns_read_requires_helper()
if not required and cfg.CONF.AGENT.use_helper_for_ns_read:
LOG.warning(_LW("The user that is executing neutron can read the "
"namespaces without using the root_helper. Disable "
"the use_helper_for_ns_read option to avoid a "
"performance impact."))
# Don't fail because nothing is actually broken. Just not optimal.
result = True
elif required and not cfg.CONF.AGENT.use_helper_for_ns_read:
LOG.error(_LE("The user that is executing neutron does not have "
"permissions to read the namespaces. Enable the "
"use_helper_for_ns_read configuration option."))
result = False
else:
# everything is configured appropriately
result = True
return result
# NOTE(ihrachyshka): since the minimal version is currently capped due to
# missing hwaddr matching in dnsmasq < 2.67, a better version of the check
# would actually start dnsmasq server and issue a DHCP request using a IPv6
# DHCP client.
def check_dnsmasq_version():
result = checks.dnsmasq_version_supported()
if not result:
LOG.error(_LE('The installed version of dnsmasq is too old. '
'Please update to at least version %s.'),
checks.get_minimal_dnsmasq_version_supported())
return result
def check_keepalived_ipv6_support():
result = checks.keepalived_ipv6_supported()
if not result:
LOG.error(_LE('The installed version of keepalived does not support '
'IPv6. Please update to at least version 1.2.10 for '
'IPv6 support.'))
return result
def check_dibbler_version():
result = checks.dibbler_version_supported()
if not result:
LOG.error(_LE('The installed version of dibbler-client is too old. '
'Please update to at least version %s.'),
checks.get_minimal_dibbler_version_supported())
return result
def check_nova_notify():
result = checks.nova_notify_supported()
if not result:
LOG.error(_LE('Nova notifications are enabled, but novaclient is not '
'installed. Either disable nova notifications or '
'install python-novaclient.'))
return result
def check_arp_responder():
result = checks.arp_responder_supported()
if not result:
LOG.error(_LE('Check for Open vSwitch ARP responder support failed. '
'Please ensure that the version of openvswitch '
'being used has ARP flows support.'))
return result
def check_arp_header_match():
result = checks.arp_header_match_supported()
if not result:
LOG.error(_LE('Check for Open vSwitch support of ARP header matching '
'failed. ARP spoofing suppression will not work. A '
'newer version of OVS is required.'))
return result
def check_vf_management():
result = checks.vf_management_supported()
if not result:
LOG.error(_LE('Check for VF management support failed. '
'Please ensure that the version of ip link '
'being used has VF support.'))
return result
def check_ovsdb_native():
cfg.CONF.set_override('ovsdb_interface', 'native', group='OVS')
result = checks.ovsdb_native_supported()
if not result:
LOG.error(_LE('Check for native OVSDB support failed.'))
return result
def check_ebtables():
result = checks.ebtables_supported()
if not result:
LOG.error(_LE('Cannot run ebtables. Please ensure that it '
'is installed.'))
return result
# Define CLI opts to test specific features, with a callback for the test
OPTS = [
BoolOptCallback('ovs_vxlan', check_ovs_vxlan, default=False,
help=_('Check for OVS vxlan support')),
BoolOptCallback('ovs_geneve', check_ovs_geneve, default=False,
help=_('Check for OVS Geneve support')),
BoolOptCallback('iproute2_vxlan', check_iproute2_vxlan, default=False,
help=_('Check for iproute2 vxlan support')),
BoolOptCallback('ovs_patch', check_ovs_patch, default=False,
help=_('Check for patch port support')),
BoolOptCallback('nova_notify', check_nova_notify,
help=_('Check for nova notification support')),
BoolOptCallback('arp_responder', check_arp_responder,
help=_('Check for ARP responder support')),
BoolOptCallback('arp_header_match', check_arp_header_match,
help=_('Check for ARP header match support')),
BoolOptCallback('vf_management', check_vf_management,
help=_('Check for VF management support')),
BoolOptCallback('read_netns', check_read_netns,
help=_('Check netns permission settings')),
BoolOptCallback('dnsmasq_version', check_dnsmasq_version,
help=_('Check minimal dnsmasq version')),
BoolOptCallback('ovsdb_native', check_ovsdb_native,
help=_('Check ovsdb native interface support')),
BoolOptCallback('ebtables_installed', check_ebtables,
help=_('Check ebtables installation')),
BoolOptCallback('keepalived_ipv6_support', check_keepalived_ipv6_support,
help=_('Check keepalived IPv6 support')),
BoolOptCallback('dibbler_version', check_dibbler_version,
help=_('Check minimal dibbler version')),
]
def enable_tests_from_config():
"""If a test can depend on configuration, use this function to set the
appropriate CLI option to enable that test. It will then be possible to
run all necessary tests, just by passing in the appropriate configs.
"""
if 'vxlan' in cfg.CONF.AGENT.tunnel_types:
cfg.CONF.set_override('ovs_vxlan', True)
if 'geneve' in cfg.CONF.AGENT.tunnel_types:
cfg.CONF.set_override('ovs_geneve', True)
if ('vxlan' in cfg.CONF.ml2.type_drivers or
cfg.CONF.VXLAN.enable_vxlan):
cfg.CONF.set_override('iproute2_vxlan', True)
if cfg.CONF.AGENT.tunnel_types:
cfg.CONF.set_override('ovs_patch', True)
if not cfg.CONF.OVS.use_veth_interconnection:
cfg.CONF.set_override('ovs_patch', True)
if (cfg.CONF.notify_nova_on_port_status_changes or
cfg.CONF.notify_nova_on_port_data_changes):
cfg.CONF.set_override('nova_notify', True)
if cfg.CONF.AGENT.arp_responder:
cfg.CONF.set_override('arp_responder', True)
if cfg.CONF.AGENT.prevent_arp_spoofing:
cfg.CONF.set_override('arp_header_match', True)
if cfg.CONF.ml2_sriov.agent_required:
cfg.CONF.set_override('vf_management', True)
if not cfg.CONF.AGENT.use_helper_for_ns_read:
cfg.CONF.set_override('read_netns', True)
if cfg.CONF.dhcp_driver == 'neutron.agent.linux.dhcp.Dnsmasq':
cfg.CONF.set_override('dnsmasq_version', True)
if cfg.CONF.OVS.ovsdb_interface == 'native':
cfg.CONF.set_override('ovsdb_native', True)
if cfg.CONF.l3_ha:
cfg.CONF.set_override('keepalived_ipv6_support', True)
def all_tests_passed():
return all(opt.callback() for opt in OPTS if cfg.CONF.get(opt.name))
def main():
setup_conf()
cfg.CONF.register_cli_opts(OPTS)
cfg.CONF.set_override('use_stderr', True)
config.setup_logging()
config.init(sys.argv[1:], default_config_files=[])
if cfg.CONF.config_file:
enable_tests_from_config()
return 0 if all_tests_passed() else 1
|
{
"content_hash": "5bbfd9439d7886234e95cdf58a3cae23",
"timestamp": "",
"source": "github",
"line_count": 261,
"max_line_length": 79,
"avg_line_length": 39.26053639846743,
"alnum_prop": 0.6304284180735825,
"repo_name": "mmnelemane/neutron",
"id": "6d3b3dfe7ae47eb433880ac0abf97913f6c23b15",
"size": "10888",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "neutron/cmd/sanity_check.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "7831412"
},
{
"name": "Shell",
"bytes": "13830"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, division, unicode_literals, absolute_import
from builtins import map, range
def get_vox_dims(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume, mmap=NUMPY_MMAP)
hdr = nii.header
voxdims = hdr.get_zooms()
return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])]
def get_data_dims(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume, mmap=NUMPY_MMAP)
hdr = nii.header
datadims = hdr.get_data_shape()
return [int(datadims[0]), int(datadims[1]), int(datadims[2])]
def get_affine(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
nii = nb.load(volume, mmap=NUMPY_MMAP)
return nii.affine
def select_aparc(list_of_files):
for in_file in list_of_files:
if 'aparc+aseg.mgz' in in_file:
idx = list_of_files.index(in_file)
return list_of_files[idx]
def select_aparc_annot(list_of_files):
for in_file in list_of_files:
if '.aparc.annot' in in_file:
idx = list_of_files.index(in_file)
return list_of_files[idx]
def region_list_from_volume(in_file):
import nibabel as nb
import numpy as np
from nipype.utils import NUMPY_MMAP
segmentation = nb.load(in_file, mmap=NUMPY_MMAP)
segmentationdata = segmentation.get_data()
rois = np.unique(segmentationdata)
region_list = list(rois)
region_list.sort()
region_list.remove(0)
region_list = list(map(int, region_list))
return region_list
def id_list_from_lookup_table(lookup_file, region_list):
import numpy as np
LUTlabelsRGBA = np.loadtxt(lookup_file, skiprows=4, usecols=[0, 1, 2, 3, 4, 5], comments='#',
dtype={'names': ('index', 'label', 'R', 'G', 'B', 'A'), 'formats': ('int', '|S30', 'int', 'int', 'int', 'int')})
numLUTLabels = np.size(LUTlabelsRGBA)
LUTlabelDict = {}
for labels in range(0, numLUTLabels):
LUTlabelDict[LUTlabelsRGBA[labels][0]] = [LUTlabelsRGBA[labels][1],
LUTlabelsRGBA[labels][2], LUTlabelsRGBA[labels][3],
LUTlabelsRGBA[labels][4], LUTlabelsRGBA[labels][5]]
id_list = []
for region in region_list:
label = LUTlabelDict[region][0]
id_list.append(label)
id_list = list(map(str, id_list))
return id_list
|
{
"content_hash": "a0ed683f33ecc29080dbe64224e2a943",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 143,
"avg_line_length": 33.11538461538461,
"alnum_prop": 0.6209833526906697,
"repo_name": "mick-d/nipype",
"id": "dba7fb909396a95203acd9d5d815643c21f1a882",
"size": "2721",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "nipype/workflows/misc/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "1854"
},
{
"name": "Matlab",
"bytes": "1999"
},
{
"name": "Python",
"bytes": "4607773"
},
{
"name": "Shell",
"bytes": "380"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
}
|
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class DialogueList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, assistant_sid):
"""
Initialize the DialogueList
:param Version version: Version that contains the resource
:param assistant_sid: The SID of the Assistant that is the parent of the resource
:returns: twilio.rest.autopilot.v1.assistant.dialogue.DialogueList
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueList
"""
super(DialogueList, self).__init__(version)
# Path Solution
self._solution = {'assistant_sid': assistant_sid, }
def get(self, sid):
"""
Constructs a DialogueContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.autopilot.v1.assistant.dialogue.DialogueContext
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueContext
"""
return DialogueContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a DialogueContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.autopilot.v1.assistant.dialogue.DialogueContext
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueContext
"""
return DialogueContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Autopilot.V1.DialogueList>'
class DialoguePage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, response, solution):
"""
Initialize the DialoguePage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param assistant_sid: The SID of the Assistant that is the parent of the resource
:returns: twilio.rest.autopilot.v1.assistant.dialogue.DialoguePage
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialoguePage
"""
super(DialoguePage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of DialogueInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.autopilot.v1.assistant.dialogue.DialogueInstance
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueInstance
"""
return DialogueInstance(self._version, payload, assistant_sid=self._solution['assistant_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Autopilot.V1.DialoguePage>'
class DialogueContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, assistant_sid, sid):
"""
Initialize the DialogueContext
:param Version version: Version that contains the resource
:param assistant_sid: The SID of the Assistant that is the parent of the resource to fetch
:param sid: The unique string that identifies the resource
:returns: twilio.rest.autopilot.v1.assistant.dialogue.DialogueContext
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueContext
"""
super(DialogueContext, self).__init__(version)
# Path Solution
self._solution = {'assistant_sid': assistant_sid, 'sid': sid, }
self._uri = '/Assistants/{assistant_sid}/Dialogues/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch the DialogueInstance
:returns: The fetched DialogueInstance
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return DialogueInstance(
self._version,
payload,
assistant_sid=self._solution['assistant_sid'],
sid=self._solution['sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Autopilot.V1.DialogueContext {}>'.format(context)
class DialogueInstance(InstanceResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, payload, assistant_sid, sid=None):
"""
Initialize the DialogueInstance
:returns: twilio.rest.autopilot.v1.assistant.dialogue.DialogueInstance
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueInstance
"""
super(DialogueInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload.get('account_sid'),
'assistant_sid': payload.get('assistant_sid'),
'sid': payload.get('sid'),
'data': payload.get('data'),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {'assistant_sid': assistant_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: DialogueContext for this DialogueInstance
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueContext
"""
if self._context is None:
self._context = DialogueContext(
self._version,
assistant_sid=self._solution['assistant_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def assistant_sid(self):
"""
:returns: The SID of the Assistant that is the parent of the resource
:rtype: unicode
"""
return self._properties['assistant_sid']
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def data(self):
"""
:returns: The JSON string that describes the dialogue session object
:rtype: dict
"""
return self._properties['data']
@property
def url(self):
"""
:returns: The absolute URL of the Dialogue resource
:rtype: unicode
"""
return self._properties['url']
def fetch(self):
"""
Fetch the DialogueInstance
:returns: The fetched DialogueInstance
:rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueInstance
"""
return self._proxy.fetch()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Autopilot.V1.DialogueInstance {}>'.format(context)
|
{
"content_hash": "45c5c617aefb2edb0b4bfdf9fc45d4ec",
"timestamp": "",
"source": "github",
"line_count": 259,
"max_line_length": 104,
"avg_line_length": 33.50579150579151,
"alnum_prop": 0.630560036874856,
"repo_name": "twilio/twilio-python",
"id": "464ba35d2ebbb5fe771a2184249c11b4c9b97d64",
"size": "8693",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "twilio/rest/autopilot/v1/assistant/dialogue.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "234"
},
{
"name": "Makefile",
"bytes": "2157"
},
{
"name": "Python",
"bytes": "11241545"
}
],
"symlink_target": ""
}
|
import pytest
import uuid
import os
import contextlib
from ipyimport import gather
@contextlib.contextmanager
def stub_file(basedir):
if not os.path.exists(basedir):
os.mkdir(basedir)
filename = os.path.join(basedir, str(uuid.uuid4()) + '.py')
with open(filename, 'w') as outf:
yield outf
# Clean up
os.remove(filename)
try:
os.rmdir(basedir)
except:
pass
def write_imports_to_file(file_obj, modules_to_import):
content = '\n'.join(['import %s' % m for m in modules_to_import])
file_obj.write(content)
file_obj.flush()
@pytest.mark.parametrize("modules, expected", [
(['foobar', 're'], ['foobar']),
(['foobar', 'foobar.baz'], ['foobar']),
(['foo', 'bar'], ['foo', 'bar']),
])
def test_find_imported_modules(modules, expected):
with stub_file('testing') as file1:
write_imports_to_file(file1, modules)
failed_imports = gather.find_failed_imports(file1.name)
assert set(expected) == set(failed_imports)
@pytest.mark.parametrize("modules, expected", [
(['foobar', 're'], ['foobar']),
(['foobar', 'foobar.baz'], ['foobar']),
(['foo', 'bar'], ['foo', 'bar']),
])
def test_find_imported_modules_for_subdir(modules, expected):
basedir = 'testing'
num_modules = len(modules)
with stub_file(basedir) as file1:
with stub_file(basedir) as file2:
write_imports_to_file(file1, modules[:num_modules // 2])
write_imports_to_file(file2, modules[num_modules // 2:])
failed_imports = gather.find_failed_imports_by_directory(basedir)
assert set(expected) == failed_imports
|
{
"content_hash": "8d206e5bab3ea73acbd76149e82aedef",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 77,
"avg_line_length": 29.87272727272727,
"alnum_prop": 0.6269020085209982,
"repo_name": "striglia/ipyimport",
"id": "3a375aa11b4295fc125201fc37ab67cc234dadb1",
"size": "1643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_gather.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3669"
}
],
"symlink_target": ""
}
|
from flask import Flask, jsonify, request
import CHIP_IO.GPIO as GPIO
import CHIP_IO.PWM as PWM
import CHIP_IO.SOFTPWM as SPWM
import CHIP_IO.SERVO as SERVO
import CHIP_IO.LRADC as LRADC
import CHIP_IO.OverlayManager as OM
import CHIP_IO.Utilities as UT
import random
import math
import copy
import sys
import signal
import paho.mqtt.client as mqtt
# Signal Handling
def sig_handler(signal, frame):
if not UT.is_chip_pro():
OM.unload("PWM0")
UT.disable_1v8_pin()
GPIO.cleanup()
PWM.cleanup()
SPWM.cleanup()
SERVO.cleanup()
sys.exit(0)
signal.signal(signal.SIGTERM, sig_handler)
# Classes
class CHIP_RestAPI(Flask):
def __init__(self, *args, **kwargs):
super(CHIP_RestAPI, self).__init__(*args, **kwargs)
# Figure out if we're a CHIP or CHIP Pro automatically
hw = "chip"
if UT.is_chip_pro():
hw = "chip pro"
# Build initial CHIP_INFO
self.CHIP_INFO = {
"id" : "001",
"name" : "CHIPDEV",
"hardware" : hw,
"connected" : False
}
self.VARIABLES = {}
self.FUNCTIONS = {}
self.PINS_IN_USE = []
def make_id(self,mylen):
text = ""
possible = "abcdefghijklmnopqrstuvwxyz0123456789"
for i in xrange(mylen):
text += possible[int(math.floor(random.random() * len(possible)))]
return text
def set_id(self,id):
self.CHIP_INFO["id"] = id
def get_id(self):
return self.CHIP_INFO["id"]
def set_name(self,name):
self.CHIP_INFO["name"] = name
def get_name(self):
return self.CHIP_INFO["name"]
def set_hardware(self,hw):
self.CHIP_INFO["hardware"] = hw
def get_hardware(self):
return self.CHIP_INFO["hardware"]
def set_variable(self,name,value):
self.VARIABLES[name] = value
def get_variable(self,name):
if name in self.VARIABLES:
return self.VARIABLES[name]
else:
return -1
def set_function(self,name,funct):
self.FUNCTIONS[name] = funct
def api_debug(self):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
resp["message"] = "Debug Data"
resp["variables"] = self.VARIABLES
resp["functions"] = self.FUNCTIONS
resp["pins in use"] = self.PINS_IN_USE
return jsonify(resp)
def api_chipio_version(self):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
resp["message"] = GPIO.VERSION
return jsonify(resp)
def api_get_variables(self,variable,req_method,req_args):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
if variable in self.VARIABLES:
if req_method == 'GET':
resp["connected"] = True
resp[variable] = self.VARIABLES[variable]
elif req_method in ['PUT','POST']:
value = req_args.get('value')
resp["connected"] = True
self.VARIABLES[variable] = value
resp[variable] = value
elif req_method == 'DELETE':
tmp = self.VARIABLES.pop(variable,None)
resp["message"] = "Variable {0} deleted".format(variable)
if variable in self.FUNCTIONS:
# Handle function arguments
if req_args:
ddict = req_args.to_dict()
rtn = self.FUNCTIONS[variable](**ddict)
resp["return_value"] = rtn
else:
rtn = self.FUNCTIONS[variable]()
resp["return_value"] = rtn
return jsonify(resp)
def api_digital_cleanup(self):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
resp["message"] = "All GPIO pins cleaned up"
self.PINS_IN_USE = []
GPIO.cleanup()
return jsonify(resp)
def api_digital_pin_cleanup(self,pin):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
if not self.PINS_IN_USE:
resp["message"] = "No pins currently setup"
else:
pin = pin.upper()
if pin not in self.PINS_IN_USE:
resp["message"] = "Pin not previously in use"
else:
resp["message"] = "Cleaning up %s" % pin
GPIO.cleanup(pin)
self.PINS_IN_USE.remove(pin)
return jsonify(resp)
def api_digital_write(self,pin,value):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
pin = pin.upper()
# Setup pin if it isn't already and then add it
if pin not in self.PINS_IN_USE:
GPIO.setup(pin,GPIO.OUT)
self.PINS_IN_USE.append(pin)
# Write data to the pin
if value == 0:
resp["message"] = "Writing 0 to " + pin
GPIO.output(pin,GPIO.LOW)
elif value == 1:
resp["message"] = "Writing 1 to " + pin
GPIO.output(pin,GPIO.HIGH)
else:
resp["message"] = "Invalid value specified for " + pin
return jsonify(resp)
def api_digital_read(self,pin):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
pin = pin.upper()
# Setup pin if it isn't already and then add it
if pin not in self.PINS_IN_USE:
GPIO.setup(pin,GPIO.IN)
self.PINS_IN_USE.append(pin)
# Read the pin
resp["message"] = GPIO.input(pin)
return jsonify(resp)
def api_lradc_data(self,mode,dat,req_method,req_args):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
resp["mode"] = mode
# Get Sample Rate
if mode == "sample_rate" and dat == None and req_method == 'GET':
resp["message"] = LRADC.get_sample_rate()
# Set Sample Rate
elif mode == "sample_rate" and dat != None and req_method in ['GET','PUT','POST']:
dat = float(dat)
if dat in [32.25,62.5,125,250]:
resp["message"] = "Setting LRADC Sample Rate to " + str(dat)
LRADC.set_sample_rate(dat)
# Scale Factor
elif mode == "scale_factor" and req_method == 'GET':
resp["message"] = LRADC.get_scale_factor()
# Get Data
elif (mode == "full" or mode == "raw") and req_method == 'GET':
dat = int(dat)
if dat not in [0,1]:
resp["message"] = "Invalid ADC Channel Specified"
elif dat == 0:
if mode == "full":
resp["message"] = LRADC.get_chan0()
elif mode == "raw":
resp["message"] = LRADC.get_chan0_raw()
elif dat == 1:
if mode == "full":
resp["message"] = LRADC.get_chan1()
elif mode == "raw":
resp["message"] = LRADC.get_chan1_raw()
else:
resp["message"] = "invalid command"
return jsonify(resp)
def api_unexport_all_pins():
UT.unexport_all()
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
resp["message"] = "Unexporting all the Pins"
return jsonify(resp)
def api_handle_1v8pin(command,voltage=None):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
# If the command is "voltage" we are requesting the current voltage setting
if command == "voltage" and not UT.is_chip_pro():
resp["message"] = "1.8V Pin Current Voltage: " + str(UT.get_1v8_pin_voltage())
# Disable the 1v8 Pin
elif command == "disable" and not UT.is_chip_pro():
resp["message"] = "Disabling the 1.8V Pin"
UT.disable_1v8_pin()
elif command == "enable" and not UT.is_chip_pro():
# Enable the 1v8 Pin
voltage = float(voltage)
if voltage not in [1.8, 2.0, 2.6, 3.3]:
resp["message"] = "invalid voltage specified"
else:
resp["message"] = "Enabling the 1.8V Pin to " + str(voltage)
UT.set_1v8_pin_voltage(voltage)
else:
resp["message"] = "invalid command"
return jsonify(resp)
def api_pwm(self,chan,command,option,req_method,req_args):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
# Default the channel to PWM0
# CHIP Pro will support PWM1
cname = "PWM0"
chan = int(chan)
if chan not in [0]: #,1]:
resp["message"] = "Invalid PWM Channel Specified"
return jsonify(resp)
else:
if chan == 0:
cname = "PWM0"
elif chan == 1:
cname = "PWM1"
# Figure out our command
if command == "start" and req_method == 'GET':
# Load the overlay
if not UT.is_chip_pro():
OM.load(cname)
# Get the arguments
duty_cycle = req_args.get('duty_cycle', 25.0)
frequency = req_args.get('frequency', 200.0)
polarity = req_args.get('polarity', 0)
# Start the PWM
PWM.start(cname,duty_cycle,frequency,polarity)
resp["message"] = "Setting {0} to duty cycle: {1}, frequency: {2}, and polarity {3}".format(cname,duty_cycle,frequency,polarity)
elif command == "stop" and req_method == 'GET':
PWM.stop(chame)
resp["message"] = "Stopping {0}".format(cname)
elif command == "cleanup" and req_method == 'GET':
# TODO: Handle per channel cleanup
PWM.cleanup()
if not UT.is_chip_pro():
OM.unload(cname)
resp["message"] = "Cleaning up and unloading {0}".format(cname)
elif command == "duty_cycle" and req_method in ['GET','PUT','POST']:
PWM.set_duty_cycle(cname, float(option))
resp["message"] = "Changing duty cycle on {0} to {1}".format(cname,option)
elif command == "frequency" and req_method in ['GET','PUT','POST']:
PWM.set_frequency(cname, float(option))
resp["message"] = "Changing frequency on {0} to {1}".format(cname,option)
return jsonify(resp)
def api_softpwm(self,pin,command,option,req_method,req_args):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
# Figure out our command
if command == "start" and req_method == 'GET':
# Get the arguments
duty_cycle = req_args.get('duty_cycle', 25.0)
frequency = req_args.get('frequency', 35.0)
polarity = req_args.get('polarity', 0)
# Start the SoftPWM
SPWM.start(pin,duty_cycle,frequency,polarity)
resp["message"] = "Setting {0} to duty cycle: {1}, frequency: {2}, and polarity {3}".format(pin,duty_cycle,frequency,polarity)
elif command == "stop" and req_method == 'GET':
SPWM.stop(pin)
resp["message"] = "Stopping {0}".format(pin)
elif command == "cleanup" and req_method == 'GET':
SPWM.cleanup(pin)
resp["message"] = "Cleaning up {0}".format(pin)
elif command == "duty_cycle" and req_method in ['GET','PUT','POST']:
SPWM.set_duty_cycle(pin, float(option))
resp["message"] = "Changing duty cycle on {0} to {1}".format(pin,option)
elif command == "frequency" and req_method in ['GET','PUT','POST']:
SPWM.set_frequency(pin, float(option))
resp["message"] = "Changing frequency on {0} to {1}".format(pin,option)
return jsonify(resp)
def api_servo(self,pin,command,option,req_method,req_args):
resp = copy.deepcopy(self.CHIP_INFO)
resp["connected"] = True
# Figure out the command
if command == "start" and req_method == 'GET':
# Get the arguments
angle = req_args.get('angle', 0.0)
max_range = req_args.get('max_range', 18.0)
# Start the servo
SERVO.start(pin,angle,max_range)
resp["message"] = "Setting {0} to angle: {1}, max_range: {2},".format(pin,angle,max_range)
elif command == "stop" and req_method == 'GET':
SERVO.stop(pin)
resp["message"] = "Stopping {0}".format(pin)
elif command == "cleanup" and req_method == 'GET':
SERVO.cleanup(pin)
resp["message"] = "Cleaning up {0}".format(pin)
elif command == "angle" and req_method in ['GET','PUT','POST']:
SERVO.set_angle(pin, float(option))
resp["message"] = "Changing angle on {0} to {1}".format(pin,option)
elif command == "frequency" and req_method in ['GET','PUT','POST']:
SERVO.set_range(pin, float(option))
resp["message"] = "Changing max range on {0} to {1}".format(pin,option)
return jsonify(resp)
# Flask App
app = CHIP_RestAPI(__name__)
# User Functions
def set_id(id):
app.set_id(id)
def set_name(name):
app.set_name(name)
def set_hardware(hw):
app.set_hardware(hw)
def variable(name,value):
app.set_variable(name, value)
def function(name,funct):
app.set_function(name,funct)
# ==== The REST App ====
# Actual Rest App
def RestApp(host="0.0.0.0",port=1883,debug=False):
try:
app.run(host=host,port=port,debug=debug)
except KeyboardInterrupt:
if not UT.is_chip_pro():
OM.unload("PWM0")
UT.disable_1v8_pin()
GPIO.cleanup()
PWM.cleanup()
SPWM.cleanup()
sys.exit(0)
# ==== API Basic Data ====
# Get the basic data
# GET: /
# GET: /id
@app.route('/', methods=['GET'])
@app.route('/id', methods=['GET'])
def index():
app.CHIP_INFO["connected"] = True
return jsonify(app.CHIP_INFO)
# Get and Set Variables
# Execute functions
# GET: /<variablename>
# PUT,POST: /<variablename>?value=<value>
# GET: /<functionname>
# GET: /<functionname>?value=<value>
@app.route('/<string:variable>', methods=['GET','PUT','POST','DELETE'])
def get_variables(variable=None):
return app.api_get_variables(variable,request.method,request.args)
# ==== API DEBUG ====
# GET: /debug
@app.route('/debug', methods=['GET'])
def get_api_debug():
return app.api_debug()
# ==== CHIP_IO Basics ====
# Get the CHIP_IO Version
# GET: /version
@app.route('/version', methods=['GET'])
def get_chipio_version():
return app.api_chipio_version()
# ==== GPIO ====
# Digital Read and Cleanup
# GET: /digital/cleanup
# GET: /digital/cleanup/<pinname>
# GET: /digital/<pinname>/r
# GET: /digital/<pinname>
@app.route('/digital/<string:command>', defaults={'pin' : None}, methods=['GET'])
@app.route('/digital/<string:command>/<string:pin>', methods=['GET'])
def digital_read_cleanup(command,pin=None):
# We want to cleanup
if command == "cleanup":
# If we have no mode specified (mode == pin)
if pin == None:
return app.api_digital_cleanup()
# Do the pin cleanup, if the pin value isn't "r" for a read
elif pin != "r":
return app.api_digital_pin_cleanup(pin)
else:
# For the read, the pin name is the command
return app.api_digital_read(command)
# Digital Write
# GET,PUT,POST: /digital/<pinname>/[0,1]
@app.route('/digital/<string:pin>/<int:value>', methods=['GET','PUT','POST'])
def digital_write_command(pin,value):
return app.api_digital_write(pin,value)
# ==== PWM ====
# GET: /pwm/[0,1]/start?duty_cycle=<dutycycle>&frequency=<frequency>&polarity=[0,1]
# GET: /pwm/[0,1]/stop
# GET: /pwm/[0,1]/cleanup
# GET, PUT, POST: /pwm/[0,1]/duty_cycle/<dutycycle>
# GET, PUT, POST: /pwm/[0,1]/frequency/<frequency>
@app.route('/pwm/<int:chan>/<string:command>', methods=['GET','PUT','POST'])
@app.route('/pwm/<int:chan>/<string:command>/<string:option>', methods=['GET','PUT','POST'])
def pwm_all_commands(chan,command,option=None):
return app.api_pwm(chan,command,option,request.method,request.args)
# ==== SOFTPWM ====
# GET: /softpwm/<pinname>/start?duty_cycle=<dutycycle>&frequency=<frequency>&polarity=[0,1]
# GET: /softpwm/<pinname>/stop
# GET: /softpwm/<pinname>/cleanup
# GET, PUT, POST: /softpwm/<pinname>/duty_cycle/<dutycycle>
# GET, PUT, POST: /softpwm/<pinname>/frequency/<frequency>
@app.route('/softpwm/<string:pin>/<string:command>', methods=['GET','PUT','POST'])
@app.route('/softpwm/<string:pin>/<string:command>/<string:option>', methods=['GET','PUT','POST'])
def softpwm_all_commands(pin,command,option=None):
return app.api_softpwm(pin,command,option,request.method,request.args)
# ==== SERVO ====
# Methods
# GET: /servo/<pinname>/start?angle=<angle>&max_range=<range>
# GET: /servo/<pinname>/stop
# GET: /servo/<pinname>/cleanup
# GET, PUT, POST: /servo/<pinname>/angle/<angle>
# GET, PUT, POST: /servo/<pinname>/max_range/<range>
@app.route('/servo/<string:pin>/<string:command>', methods=['GET','PUT','POST'])
@app.route('/servo/<string:pin>/<string:command>/<string:option>', methods=['GET','PUT','POST'])
def servo_all_commands(pin,command,option=None):
return api.api_servo(pin,command,option,request.method,request.args)
# ==== LRADC ====
# Methods
# GET: /analog/sample_rate
# GET: /analog/scale_factor
# GET: /analog/full/[0,1]
# GET: /analog/raw[0,1]
# GET,PUT,POST: /analog/sample_rate/[32.25,62.5,125,250]
@app.route('/analog/<mode>', methods=['GET'])
@app.route('/analog/<string:mode>/<string:dat>', methods=['GET','PUT','POST'])
def get_lradc_data(mode,dat=None):
return app.api_lradc_data(mode,dat,request.method,request.args)
# ==== Utilities ====
# Methods
# GET: /unexport_all
@app.route('/unexport_all', methods=['GET'])
def unexport_all_pins():
return app.api_unexport_all_pins()
# GET: /1v8_pin/voltage
# GET: /1v8_pin/disable
# GET,PUT,POST: 1v8_pin/enable/[1.8,2.0,2.6,3.3]
@app.route('/1v8_pin/<string:command>', methods=['GET'])
@app.route('/1v8_pin/<string:command>/<float:voltage>', methods=['GET','PUT','POST'])
def handler_1v8pin(command,voltage=None):
return app.api_handle_1v8_pin()
# DEBUG Testing
if __name__ == '__main__':
RestApp(debug=True)
|
{
"content_hash": "7464d525bd6cadd34f5dbb60beb5df15",
"timestamp": "",
"source": "github",
"line_count": 519,
"max_line_length": 140,
"avg_line_length": 35.20616570327553,
"alnum_prop": 0.5729531523642732,
"repo_name": "xtacocorex/chip-python-aREST",
"id": "f041cb95e511a91ffcbbc46f1d75dc8d79d1d20b",
"size": "18650",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CHIP_aREST/aREST.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1228"
},
{
"name": "Python",
"bytes": "38562"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import os
import sys
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.exceptions import H2OValueError
from h2o.automl import H2OAutoML
from tests import pyunit_utils as pu
from _automl_utils import import_dataset
def test_get_best_model_per_family():
ds = import_dataset('binary')
aml = H2OAutoML(project_name="py_aml_best_model_per_family_test",
max_models=12,
seed=42)
aml.train(y=ds.target, training_frame=ds.train)
def _check_best_models(model_ids, criterion):
# test case insensitivity in algo specification
top_models = [aml.get_best_model(mtype, criterion) for mtype in ["deeplearning", "drf", "gbm", "GLM",
"STaCKeDEnsEmblE", "xgboost"]]
nones = [v is None for v in top_models]
assert sum(nones) <= 1 and len(nones) >= 6
seen = set()
top_model_ids = [m.model_id for m in top_models if m is not None]
for model_id in model_ids:
model_type = model_id.split("_")[0]
if model_type not in seen:
assert model_id in top_model_ids, "%s not found in top models %s" % (model_id, top_model_ids)
if model_type in ("DRF", "XRT"):
seen.update(["DRF", "XRT"])
else:
seen.add(model_type)
# Check default criterion
model_ids = aml.leaderboard.as_data_frame()["model_id"]
_check_best_models(model_ids, None)
# Check AUC criterion (the higher the better) and check case insensitivity
model_ids = aml.leaderboard.sort(by="auc", ascending=False).as_data_frame()["model_id"]
_check_best_models(model_ids, "AUC")
# Check it works for custom criterion (MSE)
model_ids = aml.leaderboard.sort(by="mse").as_data_frame()["model_id"]
_check_best_models(model_ids, "mse")
# Check it works for without specifying a model type
assert aml.get_best_model().model_id == aml.leaderboard[0, "model_id"]
# Check it works with just criterion
assert aml.get_best_model(criterion="mse").model_id == aml.leaderboard.sort(by="mse")[0, "model_id"]
# Check it works with extra_cols
top_model = h2o.automl.get_leaderboard(aml, extra_columns=["training_time_ms"]).sort(by="training_time_ms")[0, "model_id"]
assert aml.get_best_model(criterion="training_time_ms").model_id == top_model
# Check validation works
try:
aml.get_best_model(algorithm="GXboost")
assert False, "Algorithm validation does not work!"
except H2OValueError:
pass
try:
aml.get_best_model(criterion="lorem_ipsum_dolor_sit_amet")
assert False, "Criterion validation does not work!"
except H2OValueError:
pass
pu.run_tests([
test_get_best_model_per_family,
])
|
{
"content_hash": "52df16247b0542792bd8c0ae05986646",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 126,
"avg_line_length": 39.08108108108108,
"alnum_prop": 0.6189488243430152,
"repo_name": "h2oai/h2o-3",
"id": "1250481ee98b5f5ad8d64fab09ad7416bf6a383d",
"size": "2892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_algos/automl/pyunit_automl_get_best_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "12803"
},
{
"name": "CSS",
"bytes": "882321"
},
{
"name": "CoffeeScript",
"bytes": "7550"
},
{
"name": "DIGITAL Command Language",
"bytes": "106"
},
{
"name": "Dockerfile",
"bytes": "10459"
},
{
"name": "Emacs Lisp",
"bytes": "2226"
},
{
"name": "Groovy",
"bytes": "205646"
},
{
"name": "HCL",
"bytes": "36232"
},
{
"name": "HTML",
"bytes": "8018117"
},
{
"name": "HiveQL",
"bytes": "3985"
},
{
"name": "Java",
"bytes": "15981357"
},
{
"name": "JavaScript",
"bytes": "148426"
},
{
"name": "Jupyter Notebook",
"bytes": "20638329"
},
{
"name": "Makefile",
"bytes": "46043"
},
{
"name": "PHP",
"bytes": "800"
},
{
"name": "Python",
"bytes": "8188608"
},
{
"name": "R",
"bytes": "4149977"
},
{
"name": "Ruby",
"bytes": "64"
},
{
"name": "Sass",
"bytes": "23790"
},
{
"name": "Scala",
"bytes": "4845"
},
{
"name": "Shell",
"bytes": "214495"
},
{
"name": "Smarty",
"bytes": "1792"
},
{
"name": "TeX",
"bytes": "554940"
}
],
"symlink_target": ""
}
|
import datetime
import itertools
import json
import os
import random
import re
class JobData(object):
def __init__(self, jobkind, jobid, module, jobargs):
self.jobkind = jobkind
self.jobid = jobid
self.module = module
self.jobargs = jobargs
(self.input, self.triple, self.out, self.opt) = jobargs[0:4]
def is_driver_job(self):
"""Return true iff self measures a driver job"""
return self.jobkind == 'driver'
def is_frontend_job(self):
"""Return true iff self measures a frontend job"""
return self.jobkind == 'frontend'
class JobProfs(JobData):
"""Object denoting the profile of a single job run during a compilation,
corresponding to a single directory of profiles produced by a single
job process passed -stats-output-dir."""
def __init__(self, jobkind, jobid, module, jobargs, profiles):
self.profiles = profiles
super(JobProfs, self).__init__(jobkind, jobid, module, jobargs)
class JobStats(JobData):
"""Object holding the stats of a single job run during a compilation,
corresponding to a single JSON file produced by a single job process
passed -stats-output-dir."""
def __init__(self, jobkind, jobid, module, start_usec, dur_usec,
jobargs, stats):
self.start_usec = start_usec
self.dur_usec = dur_usec
self.stats = stats
super(JobStats, self).__init__(jobkind, jobid, module, jobargs)
def driver_jobs_ran(self):
"""Return the count of a driver job's ran sub-jobs"""
assert(self.is_driver_job())
return self.stats.get("Driver.NumDriverJobsRun", 0)
def driver_jobs_skipped(self):
"""Return the count of a driver job's skipped sub-jobs"""
assert(self.is_driver_job())
return self.stats.get("Driver.NumDriverJobsSkipped", 0)
def driver_jobs_total(self):
"""Return the total count of a driver job's ran + skipped sub-jobs"""
assert(self.is_driver_job())
return self.driver_jobs_ran() + self.driver_jobs_skipped()
def merged_with(self, other, merge_by="sum"):
"""Return a new JobStats, holding the merger of self and other"""
merged_stats = {}
ops = {"sum": lambda a, b: a + b,
# Because 0 is also a sentinel on counters we do a modified
# "nonzero-min" here. Not ideal but best we can do.
"min": lambda a, b: (min(a, b)
if a != 0 and b != 0
else max(a, b)),
"max": lambda a, b: max(a, b)}
op = ops[merge_by]
for k, v in self.stats.items() + other.stats.items():
if k in merged_stats:
merged_stats[k] = op(v, merged_stats[k])
else:
merged_stats[k] = v
merged_kind = self.jobkind
if other.jobkind != merged_kind:
merged_kind = "<merged>"
merged_module = self.module
if other.module != merged_module:
merged_module = "<merged>"
merged_start = min(self.start_usec, other.start_usec)
merged_end = max(self.start_usec + self.dur_usec,
other.start_usec + other.dur_usec)
merged_dur = merged_end - merged_start
return JobStats(merged_kind, random.randint(0, 1000000000),
merged_module, merged_start, merged_dur,
self.jobargs + other.jobargs, merged_stats)
def prefixed_by(self, prefix):
prefixed_stats = dict([((prefix + "." + k), v)
for (k, v) in self.stats.items()])
return JobStats(self.jobkind, random.randint(0, 1000000000),
self.module, self.start_usec, self.dur_usec,
self.jobargs, prefixed_stats)
def divided_by(self, n):
divided_stats = dict([(k, v / n)
for (k, v) in self.stats.items()])
return JobStats(self.jobkind, random.randint(0, 1000000000),
self.module, self.start_usec, self.dur_usec,
self.jobargs, divided_stats)
def incrementality_percentage(self):
"""Assuming the job is a driver job, return the amount of
jobs that actually ran, as a percentage of the total number."""
assert(self.is_driver_job())
ran = self.driver_jobs_ran()
total = self.driver_jobs_total()
return round((float(ran) / float(total)) * 100.0, 2)
def to_catapult_trace_obj(self):
"""Return a JSON-formattable object fitting chrome's
'catapult' trace format"""
return {"name": self.module,
"cat": self.jobkind,
"ph": "X", # "X" == "complete event"
"pid": self.jobid,
"tid": 1,
"ts": self.start_usec,
"dur": self.dur_usec,
"args": self.jobargs}
def start_timestr(self):
"""Return a formatted timestamp of the job's start-time"""
t = datetime.datetime.fromtimestamp(self.start_usec / 1000000.0)
return t.strftime("%Y-%m-%d %H:%M:%S")
def end_timestr(self):
"""Return a formatted timestamp of the job's end-time"""
t = datetime.datetime.fromtimestamp((self.start_usec +
self.dur_usec) / 1000000.0)
return t.strftime("%Y-%m-%d %H:%M:%S")
def pick_lnt_metric_suffix(self, metric_name):
"""Guess an appropriate LNT metric type for a given metric name"""
if "BytesOutput" in metric_name:
return "code_size"
if "RSS" in metric_name or "BytesAllocated" in metric_name:
return "mem"
return "compile"
def to_lnt_test_obj(self, args):
"""Return a JSON-formattable object fitting LNT's 'submit' format"""
run_info = {
"run_order": str(args.lnt_order),
"tag": str(args.lnt_tag),
}
run_info.update(dict(args.lnt_run_info))
stats = self.stats
return {
"Machine":
{
"Name": args.lnt_machine,
"Info": dict(args.lnt_machine_info)
},
"Run":
{
"Start Time": self.start_timestr(),
"End Time": self.end_timestr(),
"Info": run_info
},
"Tests":
[
{
"Data": [v],
"Info": {},
"Name": "%s.%s.%s.%s" % (args.lnt_tag, self.module,
k, self.pick_lnt_metric_suffix(k))
}
for (k, v) in stats.items()
]
}
AUXPATSTR = (r"(?P<module>[^-]+)-(?P<input>[^-]+)-(?P<triple>[^-]+)" +
r"-(?P<out>[^-]*)-(?P<opt>[^-]+)")
AUXPAT = re.compile(AUXPATSTR)
TIMERPATSTR = (r"time\.swift-(?P<jobkind>\w+)\." + AUXPATSTR +
r"\.(?P<timerkind>\w+)$")
TIMERPAT = re.compile(TIMERPATSTR)
FILEPATSTR = (r"^stats-(?P<start>\d+)-swift-(?P<kind>\w+)-" +
AUXPATSTR +
r"-(?P<pid>\d+)(-.*)?.json$")
FILEPAT = re.compile(FILEPATSTR)
PROFILEPATSTR = (r"^profile-(?P<start>\d+)-swift-(?P<kind>\w+)-" +
AUXPATSTR +
r"-(?P<pid>\d+)(-.*)?.dir$")
PROFILEPAT = re.compile(PROFILEPATSTR)
def match_auxpat(s):
m = AUXPAT.match(s)
if m is not None:
return m.groupdict()
else:
return None
def match_timerpat(s):
m = TIMERPAT.match(s)
if m is not None:
return m.groupdict()
else:
return None
def match_filepat(s):
m = FILEPAT.match(s)
if m is not None:
return m.groupdict()
else:
return None
def match_profilepat(s):
m = PROFILEPAT.match(s)
if m is not None:
return m.groupdict()
else:
return None
def find_profiles_in(profiledir, select_stat=[]):
sre = re.compile('.*' if len(select_stat) == 0 else
'|'.join(select_stat))
profiles = None
for profile in os.listdir(profiledir):
if profile.endswith(".svg"):
continue
if sre.search(profile) is None:
continue
fullpath = os.path.join(profiledir, profile)
s = os.stat(fullpath)
if s.st_size != 0:
if profiles is None:
profiles = dict()
try:
(counter, profiletype) = os.path.splitext(profile)
# drop leading period from extension
profiletype = profiletype[1:]
if profiletype not in profiles:
profiles[profiletype] = dict()
profiles[profiletype][counter] = fullpath
except Exception:
pass
return profiles
def list_stats_dir_profiles(path, select_module=[], select_stat=[], **kwargs):
"""Finds all stats-profiles in path, returning list of JobProfs objects"""
jobprofs = []
for root, dirs, files in os.walk(path):
for d in dirs:
mg = match_profilepat(d)
if not mg:
continue
# NB: "pid" in fpat is a random number, not unix pid.
jobkind = mg['kind']
jobid = int(mg['pid'])
module = mg["module"]
if len(select_module) != 0 and module not in select_module:
continue
jobargs = [mg["input"], mg["triple"], mg["out"], mg["opt"]]
e = JobProfs(jobkind=jobkind, jobid=jobid,
module=module, jobargs=jobargs,
profiles=find_profiles_in(os.path.join(root, d),
select_stat))
jobprofs.append(e)
return jobprofs
def load_stats_dir(path, select_module=[], select_stat=[],
exclude_timers=False, merge_timers=False, **kwargs):
"""Loads all stats-files found in path into a list of JobStats objects"""
jobstats = []
sre = re.compile('.*' if len(select_stat) == 0 else
'|'.join(select_stat))
for root, dirs, files in os.walk(path):
for f in files:
mg = match_filepat(f)
if not mg:
continue
# NB: "pid" in fpat is a random number, not unix pid.
jobkind = mg['kind']
jobid = int(mg['pid'])
start_usec = int(mg['start'])
module = mg["module"]
if len(select_module) != 0 and module not in select_module:
continue
jobargs = [mg["input"], mg["triple"], mg["out"], mg["opt"]]
with open(os.path.join(root, f)) as fp:
j = json.load(fp)
dur_usec = 1
stats = dict()
for (k, v) in j.items():
if sre.search(k) is None:
continue
if k.startswith('time.') and exclude_timers:
continue
tm = match_timerpat(k)
if tm:
v = int(1000000.0 * float(v))
if tm['jobkind'] == jobkind and \
tm['timerkind'] == 'wall':
dur_usec = v
if merge_timers:
k = "time.swift-%s.%s" % (tm['jobkind'],
tm['timerkind'])
stats[k] = v
e = JobStats(jobkind=jobkind, jobid=jobid,
module=module, start_usec=start_usec,
dur_usec=dur_usec, jobargs=jobargs,
stats=stats)
jobstats.append(e)
return jobstats
def merge_all_jobstats(jobstats, select_module=[], group_by_module=False,
merge_by="sum", divide_by=1, **kwargs):
"""Does a pairwise merge of the elements of list of jobs"""
m = None
if len(select_module) > 0:
jobstats = filter(lambda j: j.module in select_module, jobstats)
if group_by_module:
def keyfunc(j):
return j.module
jobstats = list(jobstats)
jobstats.sort(key=keyfunc)
prefixed = []
for mod, group in itertools.groupby(jobstats, keyfunc):
groupmerge = merge_all_jobstats(group, merge_by=merge_by,
divide_by=divide_by)
prefixed.append(groupmerge.prefixed_by(mod))
jobstats = prefixed
for j in jobstats:
if m is None:
m = j
else:
m = m.merged_with(j, merge_by=merge_by)
if m is None:
return m
return m.divided_by(divide_by)
|
{
"content_hash": "956efa88bea567849326df2e48b0e70a",
"timestamp": "",
"source": "github",
"line_count": 353,
"max_line_length": 79,
"avg_line_length": 36.1586402266289,
"alnum_prop": 0.5184895017235976,
"repo_name": "austinzheng/swift",
"id": "f7797ea8112962e4e5efc5238ac29ba9a0f2f57c",
"size": "13422",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "utils/jobstats/jobstats.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "34"
},
{
"name": "C",
"bytes": "205632"
},
{
"name": "C++",
"bytes": "31254111"
},
{
"name": "CMake",
"bytes": "476182"
},
{
"name": "D",
"bytes": "1107"
},
{
"name": "DTrace",
"bytes": "2438"
},
{
"name": "Emacs Lisp",
"bytes": "57215"
},
{
"name": "LLVM",
"bytes": "71817"
},
{
"name": "Makefile",
"bytes": "1841"
},
{
"name": "Objective-C",
"bytes": "400197"
},
{
"name": "Objective-C++",
"bytes": "253762"
},
{
"name": "Perl",
"bytes": "2211"
},
{
"name": "Python",
"bytes": "1380097"
},
{
"name": "Ruby",
"bytes": "2091"
},
{
"name": "Shell",
"bytes": "222400"
},
{
"name": "Swift",
"bytes": "26027332"
},
{
"name": "Vim script",
"bytes": "16273"
}
],
"symlink_target": ""
}
|
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db import connection
from django.db.models import Prefetch, QuerySet
from django.db.models.query import get_prefetcher, prefetch_related_objects
from django.test import TestCase, override_settings
from django.test.utils import CaptureQueriesContext
from .models import (
Article, Author, Author2, AuthorAddress, AuthorWithAge, Bio, Book,
Bookmark, BookReview, BookWithYear, Comment, Department, Employee,
FavoriteAuthors, House, LessonEntry, ModelIterableSubclass, Person,
Qualification, Reader, Room, TaggedItem, Teacher, WordEntry,
)
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.book1 = Book.objects.create(title='Poems')
cls.book2 = Book.objects.create(title='Jane Eyre')
cls.book3 = Book.objects.create(title='Wuthering Heights')
cls.book4 = Book.objects.create(title='Sense and Sensibility')
cls.author1 = Author.objects.create(name='Charlotte', first_book=cls.book1)
cls.author2 = Author.objects.create(name='Anne', first_book=cls.book1)
cls.author3 = Author.objects.create(name='Emily', first_book=cls.book1)
cls.author4 = Author.objects.create(name='Jane', first_book=cls.book4)
cls.book1.authors.add(cls.author1, cls.author2, cls.author3)
cls.book2.authors.add(cls.author1)
cls.book3.authors.add(cls.author3)
cls.book4.authors.add(cls.author4)
cls.reader1 = Reader.objects.create(name='Amy')
cls.reader2 = Reader.objects.create(name='Belinda')
cls.reader1.books_read.add(cls.book1, cls.book4)
cls.reader2.books_read.add(cls.book2, cls.book4)
class PrefetchRelatedTests(TestDataMixin, TestCase):
def assertWhereContains(self, sql, needle):
where_idx = sql.index('WHERE')
self.assertEqual(
sql.count(str(needle), where_idx), 1,
msg="WHERE clause doesn't contain %s, actual SQL: %s" % (needle, sql[where_idx:])
)
def test_m2m_forward(self):
with self.assertNumQueries(2):
lists = [list(b.authors.all()) for b in Book.objects.prefetch_related('authors')]
normal_lists = [list(b.authors.all()) for b in Book.objects.all()]
self.assertEqual(lists, normal_lists)
def test_m2m_reverse(self):
with self.assertNumQueries(2):
lists = [list(a.books.all()) for a in Author.objects.prefetch_related('books')]
normal_lists = [list(a.books.all()) for a in Author.objects.all()]
self.assertEqual(lists, normal_lists)
def test_foreignkey_forward(self):
with self.assertNumQueries(2):
books = [a.first_book for a in Author.objects.prefetch_related('first_book')]
normal_books = [a.first_book for a in Author.objects.all()]
self.assertEqual(books, normal_books)
def test_foreignkey_reverse(self):
with self.assertNumQueries(2):
[list(b.first_time_authors.all())
for b in Book.objects.prefetch_related('first_time_authors')]
self.assertQuerysetEqual(self.book2.authors.all(), ["<Author: Charlotte>"])
def test_onetoone_reverse_no_match(self):
# Regression for #17439
with self.assertNumQueries(2):
book = Book.objects.prefetch_related('bookwithyear').all()[0]
with self.assertNumQueries(0):
with self.assertRaises(BookWithYear.DoesNotExist):
book.bookwithyear
def test_onetoone_reverse_with_to_field_pk(self):
"""
A model (Bio) with a OneToOneField primary key (author) that references
a non-pk field (name) on the related model (Author) is prefetchable.
"""
Bio.objects.bulk_create([
Bio(author=self.author1),
Bio(author=self.author2),
Bio(author=self.author3),
])
authors = Author.objects.filter(
name__in=[self.author1, self.author2, self.author3],
).prefetch_related('bio')
with self.assertNumQueries(2):
for author in authors:
self.assertEqual(author.name, author.bio.author.name)
def test_survives_clone(self):
with self.assertNumQueries(2):
[list(b.first_time_authors.all())
for b in Book.objects.prefetch_related('first_time_authors').exclude(id=1000)]
def test_len(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
len(qs)
[list(b.first_time_authors.all()) for b in qs]
def test_bool(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
bool(qs)
[list(b.first_time_authors.all()) for b in qs]
def test_count(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
[b.first_time_authors.count() for b in qs]
def test_exists(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
[b.first_time_authors.exists() for b in qs]
def test_in_and_prefetch_related(self):
"""
Regression test for #20242 - QuerySet "in" didn't work the first time
when using prefetch_related. This was fixed by the removal of chunked
reads from QuerySet iteration in
70679243d1786e03557c28929f9762a119e3ac14.
"""
qs = Book.objects.prefetch_related('first_time_authors')
self.assertIn(qs[0], qs)
def test_clear(self):
with self.assertNumQueries(5):
with_prefetch = Author.objects.prefetch_related('books')
without_prefetch = with_prefetch.prefetch_related(None)
[list(a.books.all()) for a in without_prefetch]
def test_m2m_then_m2m(self):
"""A m2m can be followed through another m2m."""
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related('books__read_by')
lists = [[[str(r) for r in b.read_by.all()]
for b in a.books.all()]
for a in qs]
self.assertEqual(lists, [
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
])
def test_overriding_prefetch(self):
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related('books', 'books__read_by')
lists = [[[str(r) for r in b.read_by.all()]
for b in a.books.all()]
for a in qs]
self.assertEqual(lists, [
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
])
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related('books__read_by', 'books')
lists = [[[str(r) for r in b.read_by.all()]
for b in a.books.all()]
for a in qs]
self.assertEqual(lists, [
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
])
def test_get(self):
"""
Objects retrieved with .get() get the prefetch behavior.
"""
# Need a double
with self.assertNumQueries(3):
author = Author.objects.prefetch_related('books__read_by').get(name="Charlotte")
lists = [[str(r) for r in b.read_by.all()] for b in author.books.all()]
self.assertEqual(lists, [["Amy"], ["Belinda"]]) # Poems, Jane Eyre
def test_foreign_key_then_m2m(self):
"""
A m2m relation can be followed after a relation like ForeignKey that
doesn't have many objects.
"""
with self.assertNumQueries(2):
qs = Author.objects.select_related('first_book').prefetch_related('first_book__read_by')
lists = [[str(r) for r in a.first_book.read_by.all()]
for a in qs]
self.assertEqual(lists, [["Amy"], ["Amy"], ["Amy"], ["Amy", "Belinda"]])
def test_reverse_one_to_one_then_m2m(self):
"""
A m2m relation can be followed afterr going through the select_related
reverse of an o2o.
"""
qs = Author.objects.prefetch_related('bio__books').select_related('bio')
with self.assertNumQueries(1):
list(qs.all())
Bio.objects.create(author=self.author1)
with self.assertNumQueries(2):
list(qs.all())
def test_attribute_error(self):
qs = Reader.objects.all().prefetch_related('books_read__xyz')
msg = (
"Cannot find 'xyz' on Book object, 'books_read__xyz' "
"is an invalid parameter to prefetch_related()"
)
with self.assertRaisesMessage(AttributeError, msg) as cm:
list(qs)
self.assertIn('prefetch_related', str(cm.exception))
def test_invalid_final_lookup(self):
qs = Book.objects.prefetch_related('authors__name')
msg = (
"'authors__name' does not resolve to an item that supports "
"prefetching - this is an invalid parameter to prefetch_related()."
)
with self.assertRaisesMessage(ValueError, msg) as cm:
list(qs)
self.assertIn('prefetch_related', str(cm.exception))
self.assertIn("name", str(cm.exception))
def test_forward_m2m_to_attr_conflict(self):
msg = 'to_attr=authors conflicts with a field on the Book model.'
authors = Author.objects.all()
with self.assertRaisesMessage(ValueError, msg):
list(Book.objects.prefetch_related(
Prefetch('authors', queryset=authors, to_attr='authors'),
))
# Without the ValueError, an author was deleted due to the implicit
# save of the relation assignment.
self.assertEqual(self.book1.authors.count(), 3)
def test_reverse_m2m_to_attr_conflict(self):
msg = 'to_attr=books conflicts with a field on the Author model.'
poems = Book.objects.filter(title='Poems')
with self.assertRaisesMessage(ValueError, msg):
list(Author.objects.prefetch_related(
Prefetch('books', queryset=poems, to_attr='books'),
))
# Without the ValueError, a book was deleted due to the implicit
# save of reverse relation assignment.
self.assertEqual(self.author1.books.count(), 2)
def test_m2m_then_reverse_fk_object_ids(self):
with CaptureQueriesContext(connection) as queries:
list(Book.objects.prefetch_related('authors__addresses'))
sql = queries[-1]['sql']
self.assertWhereContains(sql, self.author1.name)
def test_m2m_then_m2m_object_ids(self):
with CaptureQueriesContext(connection) as queries:
list(Book.objects.prefetch_related('authors__favorite_authors'))
sql = queries[-1]['sql']
self.assertWhereContains(sql, self.author1.name)
def test_m2m_then_reverse_one_to_one_object_ids(self):
with CaptureQueriesContext(connection) as queries:
list(Book.objects.prefetch_related('authors__authorwithage'))
sql = queries[-1]['sql']
self.assertWhereContains(sql, self.author1.id)
class RawQuerySetTests(TestDataMixin, TestCase):
def test_basic(self):
with self.assertNumQueries(2):
books = Book.objects.raw(
"SELECT * FROM prefetch_related_book WHERE id = %s",
(self.book1.id,)
).prefetch_related('authors')
book1 = list(books)[0]
with self.assertNumQueries(0):
self.assertCountEqual(book1.authors.all(), [self.author1, self.author2, self.author3])
def test_prefetch_before_raw(self):
with self.assertNumQueries(2):
books = Book.objects.prefetch_related('authors').raw(
"SELECT * FROM prefetch_related_book WHERE id = %s",
(self.book1.id,)
)
book1 = list(books)[0]
with self.assertNumQueries(0):
self.assertCountEqual(book1.authors.all(), [self.author1, self.author2, self.author3])
def test_clear(self):
with self.assertNumQueries(5):
with_prefetch = Author.objects.raw(
"SELECT * FROM prefetch_related_author"
).prefetch_related('books')
without_prefetch = with_prefetch.prefetch_related(None)
[list(a.books.all()) for a in without_prefetch]
class CustomPrefetchTests(TestCase):
@classmethod
def traverse_qs(cls, obj_iter, path):
"""
Helper method that returns a list containing a list of the objects in the
obj_iter. Then for each object in the obj_iter, the path will be
recursively travelled and the found objects are added to the return value.
"""
ret_val = []
if hasattr(obj_iter, 'all'):
obj_iter = obj_iter.all()
try:
iter(obj_iter)
except TypeError:
obj_iter = [obj_iter]
for obj in obj_iter:
rel_objs = []
for part in path:
if not part:
continue
try:
related = getattr(obj, part[0])
except ObjectDoesNotExist:
continue
if related is not None:
rel_objs.extend(cls.traverse_qs(related, [part[1:]]))
ret_val.append((obj, rel_objs))
return ret_val
@classmethod
def setUpTestData(cls):
cls.person1 = Person.objects.create(name='Joe')
cls.person2 = Person.objects.create(name='Mary')
# Set main_room for each house before creating the next one for
# databases where supports_nullable_unique_constraints is False.
cls.house1 = House.objects.create(name='House 1', address='123 Main St', owner=cls.person1)
cls.room1_1 = Room.objects.create(name='Dining room', house=cls.house1)
cls.room1_2 = Room.objects.create(name='Lounge', house=cls.house1)
cls.room1_3 = Room.objects.create(name='Kitchen', house=cls.house1)
cls.house1.main_room = cls.room1_1
cls.house1.save()
cls.person1.houses.add(cls.house1)
cls.house2 = House.objects.create(name='House 2', address='45 Side St', owner=cls.person1)
cls.room2_1 = Room.objects.create(name='Dining room', house=cls.house2)
cls.room2_2 = Room.objects.create(name='Lounge', house=cls.house2)
cls.room2_3 = Room.objects.create(name='Kitchen', house=cls.house2)
cls.house2.main_room = cls.room2_1
cls.house2.save()
cls.person1.houses.add(cls.house2)
cls.house3 = House.objects.create(name='House 3', address='6 Downing St', owner=cls.person2)
cls.room3_1 = Room.objects.create(name='Dining room', house=cls.house3)
cls.room3_2 = Room.objects.create(name='Lounge', house=cls.house3)
cls.room3_3 = Room.objects.create(name='Kitchen', house=cls.house3)
cls.house3.main_room = cls.room3_1
cls.house3.save()
cls.person2.houses.add(cls.house3)
cls.house4 = House.objects.create(name='house 4', address="7 Regents St", owner=cls.person2)
cls.room4_1 = Room.objects.create(name='Dining room', house=cls.house4)
cls.room4_2 = Room.objects.create(name='Lounge', house=cls.house4)
cls.room4_3 = Room.objects.create(name='Kitchen', house=cls.house4)
cls.house4.main_room = cls.room4_1
cls.house4.save()
cls.person2.houses.add(cls.house4)
def test_traverse_qs(self):
qs = Person.objects.prefetch_related('houses')
related_objs_normal = [list(p.houses.all()) for p in qs],
related_objs_from_traverse = [[inner[0] for inner in o[1]]
for o in self.traverse_qs(qs, [['houses']])]
self.assertEqual(related_objs_normal, (related_objs_from_traverse,))
def test_ambiguous(self):
# Ambiguous: Lookup was already seen with a different queryset.
msg = (
"'houses' lookup was already seen with a different queryset. You "
"may need to adjust the ordering of your lookups."
)
# lookup.queryset shouldn't be evaluated.
with self.assertNumQueries(3):
with self.assertRaisesMessage(ValueError, msg):
self.traverse_qs(
Person.objects.prefetch_related(
'houses__rooms',
Prefetch('houses', queryset=House.objects.all()),
),
[['houses', 'rooms']],
)
# Ambiguous: Lookup houses_lst doesn't yet exist when performing houses_lst__rooms.
msg = (
"Cannot find 'houses_lst' on Person object, 'houses_lst__rooms' is "
"an invalid parameter to prefetch_related()"
)
with self.assertRaisesMessage(AttributeError, msg):
self.traverse_qs(
Person.objects.prefetch_related(
'houses_lst__rooms',
Prefetch('houses', queryset=House.objects.all(), to_attr='houses_lst')
),
[['houses', 'rooms']]
)
# Not ambiguous.
self.traverse_qs(
Person.objects.prefetch_related('houses__rooms', 'houses'),
[['houses', 'rooms']]
)
self.traverse_qs(
Person.objects.prefetch_related(
'houses__rooms',
Prefetch('houses', queryset=House.objects.all(), to_attr='houses_lst')
),
[['houses', 'rooms']]
)
def test_m2m(self):
# Control lookups.
with self.assertNumQueries(2):
lst1 = self.traverse_qs(
Person.objects.prefetch_related('houses'),
[['houses']]
)
# Test lookups.
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(Prefetch('houses')),
[['houses']]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(Prefetch('houses', to_attr='houses_lst')),
[['houses_lst']]
)
self.assertEqual(lst1, lst2)
def test_reverse_m2m(self):
# Control lookups.
with self.assertNumQueries(2):
lst1 = self.traverse_qs(
House.objects.prefetch_related('occupants'),
[['occupants']]
)
# Test lookups.
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
House.objects.prefetch_related(Prefetch('occupants')),
[['occupants']]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
House.objects.prefetch_related(Prefetch('occupants', to_attr='occupants_lst')),
[['occupants_lst']]
)
self.assertEqual(lst1, lst2)
def test_m2m_through_fk(self):
# Control lookups.
with self.assertNumQueries(3):
lst1 = self.traverse_qs(
Room.objects.prefetch_related('house__occupants'),
[['house', 'occupants']]
)
# Test lookups.
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Room.objects.prefetch_related(Prefetch('house__occupants')),
[['house', 'occupants']]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Room.objects.prefetch_related(Prefetch('house__occupants', to_attr='occupants_lst')),
[['house', 'occupants_lst']]
)
self.assertEqual(lst1, lst2)
def test_m2m_through_gfk(self):
TaggedItem.objects.create(tag="houses", content_object=self.house1)
TaggedItem.objects.create(tag="houses", content_object=self.house2)
# Control lookups.
with self.assertNumQueries(3):
lst1 = self.traverse_qs(
TaggedItem.objects.filter(tag='houses').prefetch_related('content_object__rooms'),
[['content_object', 'rooms']]
)
# Test lookups.
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
TaggedItem.objects.prefetch_related(
Prefetch('content_object'),
Prefetch('content_object__rooms', to_attr='rooms_lst')
),
[['content_object', 'rooms_lst']]
)
self.assertEqual(lst1, lst2)
def test_o2m_through_m2m(self):
# Control lookups.
with self.assertNumQueries(3):
lst1 = self.traverse_qs(
Person.objects.prefetch_related('houses', 'houses__rooms'),
[['houses', 'rooms']]
)
# Test lookups.
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(Prefetch('houses'), 'houses__rooms'),
[['houses', 'rooms']]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(Prefetch('houses'), Prefetch('houses__rooms')),
[['houses', 'rooms']]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(Prefetch('houses', to_attr='houses_lst'), 'houses_lst__rooms'),
[['houses_lst', 'rooms']]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
Prefetch('houses', to_attr='houses_lst'),
Prefetch('houses_lst__rooms', to_attr='rooms_lst')
),
[['houses_lst', 'rooms_lst']]
)
self.assertEqual(lst1, lst2)
def test_generic_rel(self):
bookmark = Bookmark.objects.create(url='http://www.djangoproject.com/')
TaggedItem.objects.create(content_object=bookmark, tag='django')
TaggedItem.objects.create(content_object=bookmark, favorite=bookmark, tag='python')
# Control lookups.
with self.assertNumQueries(4):
lst1 = self.traverse_qs(
Bookmark.objects.prefetch_related('tags', 'tags__content_object', 'favorite_tags'),
[['tags', 'content_object'], ['favorite_tags']]
)
# Test lookups.
with self.assertNumQueries(4):
lst2 = self.traverse_qs(
Bookmark.objects.prefetch_related(
Prefetch('tags', to_attr='tags_lst'),
Prefetch('tags_lst__content_object'),
Prefetch('favorite_tags'),
),
[['tags_lst', 'content_object'], ['favorite_tags']]
)
self.assertEqual(lst1, lst2)
def test_traverse_single_item_property(self):
# Control lookups.
with self.assertNumQueries(5):
lst1 = self.traverse_qs(
Person.objects.prefetch_related(
'houses__rooms',
'primary_house__occupants__houses',
),
[['primary_house', 'occupants', 'houses']]
)
# Test lookups.
with self.assertNumQueries(5):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
'houses__rooms',
Prefetch('primary_house__occupants', to_attr='occupants_lst'),
'primary_house__occupants_lst__houses',
),
[['primary_house', 'occupants_lst', 'houses']]
)
self.assertEqual(lst1, lst2)
def test_traverse_multiple_items_property(self):
# Control lookups.
with self.assertNumQueries(4):
lst1 = self.traverse_qs(
Person.objects.prefetch_related(
'houses',
'all_houses__occupants__houses',
),
[['all_houses', 'occupants', 'houses']]
)
# Test lookups.
with self.assertNumQueries(4):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
'houses',
Prefetch('all_houses__occupants', to_attr='occupants_lst'),
'all_houses__occupants_lst__houses',
),
[['all_houses', 'occupants_lst', 'houses']]
)
self.assertEqual(lst1, lst2)
def test_custom_qs(self):
# Test basic.
with self.assertNumQueries(2):
lst1 = list(Person.objects.prefetch_related('houses'))
with self.assertNumQueries(2):
lst2 = list(Person.objects.prefetch_related(
Prefetch('houses', queryset=House.objects.all(), to_attr='houses_lst')))
self.assertEqual(
self.traverse_qs(lst1, [['houses']]),
self.traverse_qs(lst2, [['houses_lst']])
)
# Test queryset filtering.
with self.assertNumQueries(2):
lst2 = list(
Person.objects.prefetch_related(
Prefetch(
'houses',
queryset=House.objects.filter(pk__in=[self.house1.pk, self.house3.pk]),
to_attr='houses_lst',
)
)
)
self.assertEqual(len(lst2[0].houses_lst), 1)
self.assertEqual(lst2[0].houses_lst[0], self.house1)
self.assertEqual(len(lst2[1].houses_lst), 1)
self.assertEqual(lst2[1].houses_lst[0], self.house3)
# Test flattened.
with self.assertNumQueries(3):
lst1 = list(Person.objects.prefetch_related('houses__rooms'))
with self.assertNumQueries(3):
lst2 = list(Person.objects.prefetch_related(
Prefetch('houses__rooms', queryset=Room.objects.all(), to_attr='rooms_lst')))
self.assertEqual(
self.traverse_qs(lst1, [['houses', 'rooms']]),
self.traverse_qs(lst2, [['houses', 'rooms_lst']])
)
# Test inner select_related.
with self.assertNumQueries(3):
lst1 = list(Person.objects.prefetch_related('houses__owner'))
with self.assertNumQueries(2):
lst2 = list(Person.objects.prefetch_related(
Prefetch('houses', queryset=House.objects.select_related('owner'))))
self.assertEqual(
self.traverse_qs(lst1, [['houses', 'owner']]),
self.traverse_qs(lst2, [['houses', 'owner']])
)
# Test inner prefetch.
inner_rooms_qs = Room.objects.filter(pk__in=[self.room1_1.pk, self.room1_2.pk])
houses_qs_prf = House.objects.prefetch_related(
Prefetch('rooms', queryset=inner_rooms_qs, to_attr='rooms_lst'))
with self.assertNumQueries(4):
lst2 = list(Person.objects.prefetch_related(
Prefetch('houses', queryset=houses_qs_prf.filter(pk=self.house1.pk), to_attr='houses_lst'),
Prefetch('houses_lst__rooms_lst__main_room_of')
))
self.assertEqual(len(lst2[0].houses_lst[0].rooms_lst), 2)
self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0], self.room1_1)
self.assertEqual(lst2[0].houses_lst[0].rooms_lst[1], self.room1_2)
self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0].main_room_of, self.house1)
self.assertEqual(len(lst2[1].houses_lst), 0)
# Test ForwardManyToOneDescriptor.
houses = House.objects.select_related('owner')
with self.assertNumQueries(6):
rooms = Room.objects.all().prefetch_related('house')
lst1 = self.traverse_qs(rooms, [['house', 'owner']])
with self.assertNumQueries(2):
rooms = Room.objects.all().prefetch_related(Prefetch('house', queryset=houses.all()))
lst2 = self.traverse_qs(rooms, [['house', 'owner']])
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
houses = House.objects.select_related('owner')
rooms = Room.objects.all().prefetch_related(Prefetch('house', queryset=houses.all(), to_attr='house_attr'))
lst2 = self.traverse_qs(rooms, [['house_attr', 'owner']])
self.assertEqual(lst1, lst2)
room = Room.objects.all().prefetch_related(
Prefetch('house', queryset=houses.filter(address='DoesNotExist'))
).first()
with self.assertRaises(ObjectDoesNotExist):
getattr(room, 'house')
room = Room.objects.all().prefetch_related(
Prefetch('house', queryset=houses.filter(address='DoesNotExist'), to_attr='house_attr')
).first()
self.assertIsNone(room.house_attr)
rooms = Room.objects.all().prefetch_related(Prefetch('house', queryset=House.objects.only('name')))
with self.assertNumQueries(2):
getattr(rooms.first().house, 'name')
with self.assertNumQueries(3):
getattr(rooms.first().house, 'address')
# Test ReverseOneToOneDescriptor.
houses = House.objects.select_related('owner')
with self.assertNumQueries(6):
rooms = Room.objects.all().prefetch_related('main_room_of')
lst1 = self.traverse_qs(rooms, [['main_room_of', 'owner']])
with self.assertNumQueries(2):
rooms = Room.objects.all().prefetch_related(Prefetch('main_room_of', queryset=houses.all()))
lst2 = self.traverse_qs(rooms, [['main_room_of', 'owner']])
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
rooms = list(
Room.objects.all().prefetch_related(
Prefetch('main_room_of', queryset=houses.all(), to_attr='main_room_of_attr')
)
)
lst2 = self.traverse_qs(rooms, [['main_room_of_attr', 'owner']])
self.assertEqual(lst1, lst2)
room = Room.objects.filter(main_room_of__isnull=False).prefetch_related(
Prefetch('main_room_of', queryset=houses.filter(address='DoesNotExist'))
).first()
with self.assertRaises(ObjectDoesNotExist):
getattr(room, 'main_room_of')
room = Room.objects.filter(main_room_of__isnull=False).prefetch_related(
Prefetch('main_room_of', queryset=houses.filter(address='DoesNotExist'), to_attr='main_room_of_attr')
).first()
self.assertIsNone(room.main_room_of_attr)
# The custom queryset filters should be applied to the queryset
# instance returned by the manager.
person = Person.objects.prefetch_related(
Prefetch('houses', queryset=House.objects.filter(name='House 1')),
).get(pk=self.person1.pk)
self.assertEqual(
list(person.houses.all()),
list(person.houses.all().all()),
)
def test_nested_prefetch_related_are_not_overwritten(self):
# Regression test for #24873
houses_2 = House.objects.prefetch_related(Prefetch('rooms'))
persons = Person.objects.prefetch_related(Prefetch('houses', queryset=houses_2))
houses = House.objects.prefetch_related(Prefetch('occupants', queryset=persons))
list(houses) # queryset must be evaluated once to reproduce the bug.
self.assertEqual(
houses.all()[0].occupants.all()[0].houses.all()[1].rooms.all()[0],
self.room2_1
)
def test_nested_prefetch_related_with_duplicate_prefetcher(self):
"""
Nested prefetches whose name clashes with descriptor names
(Person.houses here) are allowed.
"""
occupants = Person.objects.prefetch_related(
Prefetch('houses', to_attr='some_attr_name'),
Prefetch('houses', queryset=House.objects.prefetch_related('main_room')),
)
houses = House.objects.prefetch_related(Prefetch('occupants', queryset=occupants))
with self.assertNumQueries(5):
self.traverse_qs(list(houses), [['occupants', 'houses', 'main_room']])
def test_values_queryset(self):
with self.assertRaisesMessage(ValueError, 'Prefetch querysets cannot use values().'):
Prefetch('houses', House.objects.values('pk'))
# That error doesn't affect managers with custom ModelIterable subclasses
self.assertIs(Teacher.objects_custom.all()._iterable_class, ModelIterableSubclass)
Prefetch('teachers', Teacher.objects_custom.all())
def test_to_attr_doesnt_cache_through_attr_as_list(self):
house = House.objects.prefetch_related(
Prefetch('rooms', queryset=Room.objects.all(), to_attr='to_rooms'),
).get(pk=self.house3.pk)
self.assertIsInstance(house.rooms.all(), QuerySet)
def test_to_attr_cached_property(self):
persons = Person.objects.prefetch_related(
Prefetch('houses', House.objects.all(), to_attr='cached_all_houses'),
)
for person in persons:
# To bypass caching at the related descriptor level, don't use
# person.houses.all() here.
all_houses = list(House.objects.filter(occupants=person))
with self.assertNumQueries(0):
self.assertEqual(person.cached_all_houses, all_houses)
class DefaultManagerTests(TestCase):
def setUp(self):
self.qual1 = Qualification.objects.create(name="BA")
self.qual2 = Qualification.objects.create(name="BSci")
self.qual3 = Qualification.objects.create(name="MA")
self.qual4 = Qualification.objects.create(name="PhD")
self.teacher1 = Teacher.objects.create(name="Mr Cleese")
self.teacher2 = Teacher.objects.create(name="Mr Idle")
self.teacher3 = Teacher.objects.create(name="Mr Chapman")
self.teacher1.qualifications.add(self.qual1, self.qual2, self.qual3, self.qual4)
self.teacher2.qualifications.add(self.qual1)
self.teacher3.qualifications.add(self.qual2)
self.dept1 = Department.objects.create(name="English")
self.dept2 = Department.objects.create(name="Physics")
self.dept1.teachers.add(self.teacher1, self.teacher2)
self.dept2.teachers.add(self.teacher1, self.teacher3)
def test_m2m_then_m2m(self):
with self.assertNumQueries(3):
# When we prefetch the teachers, and force the query, we don't want
# the default manager on teachers to immediately get all the related
# qualifications, since this will do one query per teacher.
qs = Department.objects.prefetch_related('teachers')
depts = "".join("%s department: %s\n" %
(dept.name, ", ".join(str(t) for t in dept.teachers.all()))
for dept in qs)
self.assertEqual(depts,
"English department: Mr Cleese (BA, BSci, MA, PhD), Mr Idle (BA)\n"
"Physics department: Mr Cleese (BA, BSci, MA, PhD), Mr Chapman (BSci)\n")
class GenericRelationTests(TestCase):
@classmethod
def setUpTestData(cls):
book1 = Book.objects.create(title="Winnie the Pooh")
book2 = Book.objects.create(title="Do you like green eggs and spam?")
book3 = Book.objects.create(title="Three Men In A Boat")
reader1 = Reader.objects.create(name="me")
reader2 = Reader.objects.create(name="you")
reader3 = Reader.objects.create(name="someone")
book1.read_by.add(reader1, reader2)
book2.read_by.add(reader2)
book3.read_by.add(reader3)
cls.book1, cls.book2, cls.book3 = book1, book2, book3
cls.reader1, cls.reader2, cls.reader3 = reader1, reader2, reader3
def test_prefetch_GFK(self):
TaggedItem.objects.create(tag="awesome", content_object=self.book1)
TaggedItem.objects.create(tag="great", content_object=self.reader1)
TaggedItem.objects.create(tag="outstanding", content_object=self.book2)
TaggedItem.objects.create(tag="amazing", content_object=self.reader3)
# 1 for TaggedItem table, 1 for Book table, 1 for Reader table
with self.assertNumQueries(3):
qs = TaggedItem.objects.prefetch_related('content_object')
list(qs)
def test_prefetch_GFK_nonint_pk(self):
Comment.objects.create(comment="awesome", content_object=self.book1)
# 1 for Comment table, 1 for Book table
with self.assertNumQueries(2):
qs = Comment.objects.prefetch_related('content_object')
[c.content_object for c in qs]
def test_prefetch_GFK_uuid_pk(self):
article = Article.objects.create(name='Django')
Comment.objects.create(comment='awesome', content_object_uuid=article)
qs = Comment.objects.prefetch_related('content_object_uuid')
self.assertEqual([c.content_object_uuid for c in qs], [article])
def test_prefetch_GFK_fk_pk(self):
book = Book.objects.create(title='Poems')
book_with_year = BookWithYear.objects.create(book=book, published_year=2019)
Comment.objects.create(comment='awesome', content_object=book_with_year)
qs = Comment.objects.prefetch_related('content_object')
self.assertEqual([c.content_object for c in qs], [book_with_year])
def test_traverse_GFK(self):
"""
A 'content_object' can be traversed with prefetch_related() and
get to related objects on the other side (assuming it is suitably
filtered)
"""
TaggedItem.objects.create(tag="awesome", content_object=self.book1)
TaggedItem.objects.create(tag="awesome", content_object=self.book2)
TaggedItem.objects.create(tag="awesome", content_object=self.book3)
TaggedItem.objects.create(tag="awesome", content_object=self.reader1)
TaggedItem.objects.create(tag="awesome", content_object=self.reader2)
ct = ContentType.objects.get_for_model(Book)
# We get 3 queries - 1 for main query, 1 for content_objects since they
# all use the same table, and 1 for the 'read_by' relation.
with self.assertNumQueries(3):
# If we limit to books, we know that they will have 'read_by'
# attributes, so the following makes sense:
qs = TaggedItem.objects.filter(content_type=ct, tag='awesome').prefetch_related('content_object__read_by')
readers_of_awesome_books = {r.name for tag in qs
for r in tag.content_object.read_by.all()}
self.assertEqual(readers_of_awesome_books, {"me", "you", "someone"})
def test_nullable_GFK(self):
TaggedItem.objects.create(tag="awesome", content_object=self.book1,
created_by=self.reader1)
TaggedItem.objects.create(tag="great", content_object=self.book2)
TaggedItem.objects.create(tag="rubbish", content_object=self.book3)
with self.assertNumQueries(2):
result = [t.created_by for t in TaggedItem.objects.prefetch_related('created_by')]
self.assertEqual(result,
[t.created_by for t in TaggedItem.objects.all()])
def test_generic_relation(self):
bookmark = Bookmark.objects.create(url='http://www.djangoproject.com/')
TaggedItem.objects.create(content_object=bookmark, tag='django')
TaggedItem.objects.create(content_object=bookmark, tag='python')
with self.assertNumQueries(2):
tags = [t.tag for b in Bookmark.objects.prefetch_related('tags')
for t in b.tags.all()]
self.assertEqual(sorted(tags), ["django", "python"])
def test_charfield_GFK(self):
b = Bookmark.objects.create(url='http://www.djangoproject.com/')
TaggedItem.objects.create(content_object=b, tag='django')
TaggedItem.objects.create(content_object=b, favorite=b, tag='python')
with self.assertNumQueries(3):
bookmark = Bookmark.objects.filter(pk=b.pk).prefetch_related('tags', 'favorite_tags')[0]
self.assertEqual(sorted(i.tag for i in bookmark.tags.all()), ["django", "python"])
self.assertEqual([i.tag for i in bookmark.favorite_tags.all()], ["python"])
def test_custom_queryset(self):
bookmark = Bookmark.objects.create(url='http://www.djangoproject.com/')
django_tag = TaggedItem.objects.create(content_object=bookmark, tag='django')
TaggedItem.objects.create(content_object=bookmark, tag='python')
with self.assertNumQueries(2):
bookmark = Bookmark.objects.prefetch_related(
Prefetch('tags', TaggedItem.objects.filter(tag='django')),
).get()
with self.assertNumQueries(0):
self.assertEqual(list(bookmark.tags.all()), [django_tag])
# The custom queryset filters should be applied to the queryset
# instance returned by the manager.
self.assertEqual(list(bookmark.tags.all()), list(bookmark.tags.all().all()))
class MultiTableInheritanceTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.book1 = BookWithYear.objects.create(title='Poems', published_year=2010)
cls.book2 = BookWithYear.objects.create(title='More poems', published_year=2011)
cls.author1 = AuthorWithAge.objects.create(name='Jane', first_book=cls.book1, age=50)
cls.author2 = AuthorWithAge.objects.create(name='Tom', first_book=cls.book1, age=49)
cls.author3 = AuthorWithAge.objects.create(name='Robert', first_book=cls.book2, age=48)
cls.author_address = AuthorAddress.objects.create(author=cls.author1, address='SomeStreet 1')
cls.book2.aged_authors.add(cls.author2, cls.author3)
cls.br1 = BookReview.objects.create(book=cls.book1, notes='review book1')
cls.br2 = BookReview.objects.create(book=cls.book2, notes='review book2')
def test_foreignkey(self):
with self.assertNumQueries(2):
qs = AuthorWithAge.objects.prefetch_related('addresses')
addresses = [[str(address) for address in obj.addresses.all()] for obj in qs]
self.assertEqual(addresses, [[str(self.author_address)], [], []])
def test_foreignkey_to_inherited(self):
with self.assertNumQueries(2):
qs = BookReview.objects.prefetch_related('book')
titles = [obj.book.title for obj in qs]
self.assertEqual(titles, ["Poems", "More poems"])
def test_m2m_to_inheriting_model(self):
qs = AuthorWithAge.objects.prefetch_related('books_with_year')
with self.assertNumQueries(2):
lst = [[str(book) for book in author.books_with_year.all()] for author in qs]
qs = AuthorWithAge.objects.all()
lst2 = [[str(book) for book in author.books_with_year.all()] for author in qs]
self.assertEqual(lst, lst2)
qs = BookWithYear.objects.prefetch_related('aged_authors')
with self.assertNumQueries(2):
lst = [[str(author) for author in book.aged_authors.all()] for book in qs]
qs = BookWithYear.objects.all()
lst2 = [[str(author) for author in book.aged_authors.all()] for book in qs]
self.assertEqual(lst, lst2)
def test_parent_link_prefetch(self):
with self.assertNumQueries(2):
[a.author for a in AuthorWithAge.objects.prefetch_related('author')]
@override_settings(DEBUG=True)
def test_child_link_prefetch(self):
with self.assertNumQueries(2):
authors = [a.authorwithage for a in Author.objects.prefetch_related('authorwithage')]
# Regression for #18090: the prefetching query must include an IN clause.
# Note that on Oracle the table name is upper case in the generated SQL,
# thus the .lower() call.
self.assertIn('authorwithage', connection.queries[-1]['sql'].lower())
self.assertIn(' IN ', connection.queries[-1]['sql'])
self.assertEqual(authors, [a.authorwithage for a in Author.objects.all()])
class ForeignKeyToFieldTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.book = Book.objects.create(title='Poems')
cls.author1 = Author.objects.create(name='Jane', first_book=cls.book)
cls.author2 = Author.objects.create(name='Tom', first_book=cls.book)
cls.author3 = Author.objects.create(name='Robert', first_book=cls.book)
cls.author_address = AuthorAddress.objects.create(author=cls.author1, address='SomeStreet 1')
FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2)
FavoriteAuthors.objects.create(author=cls.author2, likes_author=cls.author3)
FavoriteAuthors.objects.create(author=cls.author3, likes_author=cls.author1)
def test_foreignkey(self):
with self.assertNumQueries(2):
qs = Author.objects.prefetch_related('addresses')
addresses = [[str(address) for address in obj.addresses.all()]
for obj in qs]
self.assertEqual(addresses, [[str(self.author_address)], [], []])
def test_m2m(self):
with self.assertNumQueries(3):
qs = Author.objects.all().prefetch_related('favorite_authors', 'favors_me')
favorites = [(
[str(i_like) for i_like in author.favorite_authors.all()],
[str(likes_me) for likes_me in author.favors_me.all()]
) for author in qs]
self.assertEqual(
favorites,
[
([str(self.author2)], [str(self.author3)]),
([str(self.author3)], [str(self.author1)]),
([str(self.author1)], [str(self.author2)])
]
)
class LookupOrderingTest(TestCase):
"""
Test cases that demonstrate that ordering of lookups is important, and
ensure it is preserved.
"""
def setUp(self):
self.person1 = Person.objects.create(name="Joe")
self.person2 = Person.objects.create(name="Mary")
# Set main_room for each house before creating the next one for
# databases where supports_nullable_unique_constraints is False.
self.house1 = House.objects.create(address="123 Main St")
self.room1_1 = Room.objects.create(name="Dining room", house=self.house1)
self.room1_2 = Room.objects.create(name="Lounge", house=self.house1)
self.room1_3 = Room.objects.create(name="Kitchen", house=self.house1)
self.house1.main_room = self.room1_1
self.house1.save()
self.person1.houses.add(self.house1)
self.house2 = House.objects.create(address="45 Side St")
self.room2_1 = Room.objects.create(name="Dining room", house=self.house2)
self.room2_2 = Room.objects.create(name="Lounge", house=self.house2)
self.house2.main_room = self.room2_1
self.house2.save()
self.person1.houses.add(self.house2)
self.house3 = House.objects.create(address="6 Downing St")
self.room3_1 = Room.objects.create(name="Dining room", house=self.house3)
self.room3_2 = Room.objects.create(name="Lounge", house=self.house3)
self.room3_3 = Room.objects.create(name="Kitchen", house=self.house3)
self.house3.main_room = self.room3_1
self.house3.save()
self.person2.houses.add(self.house3)
self.house4 = House.objects.create(address="7 Regents St")
self.room4_1 = Room.objects.create(name="Dining room", house=self.house4)
self.room4_2 = Room.objects.create(name="Lounge", house=self.house4)
self.house4.main_room = self.room4_1
self.house4.save()
self.person2.houses.add(self.house4)
def test_order(self):
with self.assertNumQueries(4):
# The following two queries must be done in the same order as written,
# otherwise 'primary_house' will cause non-prefetched lookups
qs = Person.objects.prefetch_related('houses__rooms',
'primary_house__occupants')
[list(p.primary_house.occupants.all()) for p in qs]
class NullableTest(TestCase):
@classmethod
def setUpTestData(cls):
boss = Employee.objects.create(name="Peter")
Employee.objects.create(name="Joe", boss=boss)
Employee.objects.create(name="Angela", boss=boss)
def test_traverse_nullable(self):
# Because we use select_related() for 'boss', it doesn't need to be
# prefetched, but we can still traverse it although it contains some nulls
with self.assertNumQueries(2):
qs = Employee.objects.select_related('boss').prefetch_related('boss__serfs')
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
for e in qs]
qs2 = Employee.objects.select_related('boss')
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2]
self.assertEqual(co_serfs, co_serfs2)
def test_prefetch_nullable(self):
# One for main employee, one for boss, one for serfs
with self.assertNumQueries(3):
qs = Employee.objects.prefetch_related('boss__serfs')
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
for e in qs]
qs2 = Employee.objects.all()
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2]
self.assertEqual(co_serfs, co_serfs2)
def test_in_bulk(self):
"""
In-bulk does correctly prefetch objects by not using .iterator()
directly.
"""
boss1 = Employee.objects.create(name="Peter")
boss2 = Employee.objects.create(name="Jack")
with self.assertNumQueries(2):
# Prefetch is done and it does not cause any errors.
bulk = Employee.objects.prefetch_related('serfs').in_bulk([boss1.pk, boss2.pk])
for b in bulk.values():
list(b.serfs.all())
class MultiDbTests(TestCase):
databases = {'default', 'other'}
def test_using_is_honored_m2m(self):
B = Book.objects.using('other')
A = Author.objects.using('other')
book1 = B.create(title="Poems")
book2 = B.create(title="Jane Eyre")
book3 = B.create(title="Wuthering Heights")
book4 = B.create(title="Sense and Sensibility")
author1 = A.create(name="Charlotte", first_book=book1)
author2 = A.create(name="Anne", first_book=book1)
author3 = A.create(name="Emily", first_book=book1)
author4 = A.create(name="Jane", first_book=book4)
book1.authors.add(author1, author2, author3)
book2.authors.add(author1)
book3.authors.add(author3)
book4.authors.add(author4)
# Forward
qs1 = B.prefetch_related('authors')
with self.assertNumQueries(2, using='other'):
books = "".join("%s (%s)\n" %
(book.title, ", ".join(a.name for a in book.authors.all()))
for book in qs1)
self.assertEqual(books,
"Poems (Charlotte, Anne, Emily)\n"
"Jane Eyre (Charlotte)\n"
"Wuthering Heights (Emily)\n"
"Sense and Sensibility (Jane)\n")
# Reverse
qs2 = A.prefetch_related('books')
with self.assertNumQueries(2, using='other'):
authors = "".join("%s: %s\n" %
(author.name, ", ".join(b.title for b in author.books.all()))
for author in qs2)
self.assertEqual(authors,
"Charlotte: Poems, Jane Eyre\n"
"Anne: Poems\n"
"Emily: Poems, Wuthering Heights\n"
"Jane: Sense and Sensibility\n")
def test_using_is_honored_fkey(self):
B = Book.objects.using('other')
A = Author.objects.using('other')
book1 = B.create(title="Poems")
book2 = B.create(title="Sense and Sensibility")
A.create(name="Charlotte Bronte", first_book=book1)
A.create(name="Jane Austen", first_book=book2)
# Forward
with self.assertNumQueries(2, using='other'):
books = ", ".join(a.first_book.title for a in A.prefetch_related('first_book'))
self.assertEqual("Poems, Sense and Sensibility", books)
# Reverse
with self.assertNumQueries(2, using='other'):
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related('first_time_authors'))
self.assertEqual(books,
"Poems (Charlotte Bronte)\n"
"Sense and Sensibility (Jane Austen)\n")
def test_using_is_honored_inheritance(self):
B = BookWithYear.objects.using('other')
A = AuthorWithAge.objects.using('other')
book1 = B.create(title="Poems", published_year=2010)
B.create(title="More poems", published_year=2011)
A.create(name='Jane', first_book=book1, age=50)
A.create(name='Tom', first_book=book1, age=49)
# parent link
with self.assertNumQueries(2, using='other'):
authors = ", ".join(a.author.name for a in A.prefetch_related('author'))
self.assertEqual(authors, "Jane, Tom")
# child link
with self.assertNumQueries(2, using='other'):
ages = ", ".join(str(a.authorwithage.age) for a in A.prefetch_related('authorwithage'))
self.assertEqual(ages, "50, 49")
def test_using_is_honored_custom_qs(self):
B = Book.objects.using('other')
A = Author.objects.using('other')
book1 = B.create(title="Poems")
book2 = B.create(title="Sense and Sensibility")
A.create(name="Charlotte Bronte", first_book=book1)
A.create(name="Jane Austen", first_book=book2)
# Implicit hinting
with self.assertNumQueries(2, using='other'):
prefetch = Prefetch('first_time_authors', queryset=Author.objects.all())
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch))
self.assertEqual(books,
"Poems (Charlotte Bronte)\n"
"Sense and Sensibility (Jane Austen)\n")
# Explicit using on the same db.
with self.assertNumQueries(2, using='other'):
prefetch = Prefetch('first_time_authors', queryset=Author.objects.using('other'))
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch))
self.assertEqual(books,
"Poems (Charlotte Bronte)\n"
"Sense and Sensibility (Jane Austen)\n")
# Explicit using on a different db.
with self.assertNumQueries(1, using='default'), self.assertNumQueries(1, using='other'):
prefetch = Prefetch('first_time_authors', queryset=Author.objects.using('default'))
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch))
self.assertEqual(books,
"Poems ()\n"
"Sense and Sensibility ()\n")
class Ticket19607Tests(TestCase):
def setUp(self):
for id, name1, name2 in [
(1, 'einfach', 'simple'),
(2, 'schwierig', 'difficult'),
]:
LessonEntry.objects.create(id=id, name1=name1, name2=name2)
for id, lesson_entry_id, name in [
(1, 1, 'einfach'),
(2, 1, 'simple'),
(3, 2, 'schwierig'),
(4, 2, 'difficult'),
]:
WordEntry.objects.create(id=id, lesson_entry_id=lesson_entry_id, name=name)
def test_bug(self):
list(WordEntry.objects.prefetch_related('lesson_entry', 'lesson_entry__wordentry_set'))
class Ticket21410Tests(TestCase):
def setUp(self):
self.book1 = Book.objects.create(title="Poems")
self.book2 = Book.objects.create(title="Jane Eyre")
self.book3 = Book.objects.create(title="Wuthering Heights")
self.book4 = Book.objects.create(title="Sense and Sensibility")
self.author1 = Author2.objects.create(name="Charlotte", first_book=self.book1)
self.author2 = Author2.objects.create(name="Anne", first_book=self.book1)
self.author3 = Author2.objects.create(name="Emily", first_book=self.book1)
self.author4 = Author2.objects.create(name="Jane", first_book=self.book4)
self.author1.favorite_books.add(self.book1, self.book2, self.book3)
self.author2.favorite_books.add(self.book1)
self.author3.favorite_books.add(self.book2)
self.author4.favorite_books.add(self.book3)
def test_bug(self):
list(Author2.objects.prefetch_related('first_book', 'favorite_books'))
class Ticket21760Tests(TestCase):
def setUp(self):
self.rooms = []
for _ in range(3):
house = House.objects.create()
for _ in range(3):
self.rooms.append(Room.objects.create(house=house))
# Set main_room for each house before creating the next one for
# databases where supports_nullable_unique_constraints is False.
house.main_room = self.rooms[-3]
house.save()
def test_bug(self):
prefetcher = get_prefetcher(self.rooms[0], 'house', 'house')[0]
queryset = prefetcher.get_prefetch_queryset(list(Room.objects.all()))[0]
self.assertNotIn(' JOIN ', str(queryset.query))
class DirectPrefechedObjectCacheReuseTests(TestCase):
"""
prefetch_related() reuses objects fetched in _prefetched_objects_cache.
When objects are prefetched and not stored as an instance attribute (often
intermediary relationships), they are saved to the
_prefetched_objects_cache attribute. prefetch_related() takes
_prefetched_objects_cache into account when determining whether an object
has been fetched[1] and retrieves results from it when it is populated [2].
[1]: #25546 (duplicate queries on nested Prefetch)
[2]: #27554 (queryset evaluation fails with a mix of nested and flattened
prefetches)
"""
@classmethod
def setUpTestData(cls):
cls.book1, cls.book2 = [
Book.objects.create(title='book1'),
Book.objects.create(title='book2'),
]
cls.author11, cls.author12, cls.author21 = [
Author.objects.create(first_book=cls.book1, name='Author11'),
Author.objects.create(first_book=cls.book1, name='Author12'),
Author.objects.create(first_book=cls.book2, name='Author21'),
]
cls.author1_address1, cls.author1_address2, cls.author2_address1 = [
AuthorAddress.objects.create(author=cls.author11, address='Happy place'),
AuthorAddress.objects.create(author=cls.author12, address='Haunted house'),
AuthorAddress.objects.create(author=cls.author21, address='Happy place'),
]
cls.bookwithyear1 = BookWithYear.objects.create(title='Poems', published_year=2010)
cls.bookreview1 = BookReview.objects.create(book=cls.bookwithyear1)
def test_detect_is_fetched(self):
"""
Nested prefetch_related() shouldn't trigger duplicate queries for the same
lookup.
"""
with self.assertNumQueries(3):
books = Book.objects.filter(
title__in=['book1', 'book2'],
).prefetch_related(
Prefetch(
'first_time_authors',
Author.objects.prefetch_related(
Prefetch(
'addresses',
AuthorAddress.objects.filter(address='Happy place'),
)
),
),
)
book1, book2 = list(books)
with self.assertNumQueries(0):
self.assertSequenceEqual(book1.first_time_authors.all(), [self.author11, self.author12])
self.assertSequenceEqual(book2.first_time_authors.all(), [self.author21])
self.assertSequenceEqual(book1.first_time_authors.all()[0].addresses.all(), [self.author1_address1])
self.assertSequenceEqual(book1.first_time_authors.all()[1].addresses.all(), [])
self.assertSequenceEqual(book2.first_time_authors.all()[0].addresses.all(), [self.author2_address1])
self.assertEqual(
list(book1.first_time_authors.all()), list(book1.first_time_authors.all().all())
)
self.assertEqual(
list(book2.first_time_authors.all()), list(book2.first_time_authors.all().all())
)
self.assertEqual(
list(book1.first_time_authors.all()[0].addresses.all()),
list(book1.first_time_authors.all()[0].addresses.all().all())
)
self.assertEqual(
list(book1.first_time_authors.all()[1].addresses.all()),
list(book1.first_time_authors.all()[1].addresses.all().all())
)
self.assertEqual(
list(book2.first_time_authors.all()[0].addresses.all()),
list(book2.first_time_authors.all()[0].addresses.all().all())
)
def test_detect_is_fetched_with_to_attr(self):
with self.assertNumQueries(3):
books = Book.objects.filter(
title__in=['book1', 'book2'],
).prefetch_related(
Prefetch(
'first_time_authors',
Author.objects.prefetch_related(
Prefetch(
'addresses',
AuthorAddress.objects.filter(address='Happy place'),
to_attr='happy_place',
)
),
to_attr='first_authors',
),
)
book1, book2 = list(books)
with self.assertNumQueries(0):
self.assertEqual(book1.first_authors, [self.author11, self.author12])
self.assertEqual(book2.first_authors, [self.author21])
self.assertEqual(book1.first_authors[0].happy_place, [self.author1_address1])
self.assertEqual(book1.first_authors[1].happy_place, [])
self.assertEqual(book2.first_authors[0].happy_place, [self.author2_address1])
def test_prefetch_reverse_foreign_key(self):
with self.assertNumQueries(2):
bookwithyear1, = BookWithYear.objects.prefetch_related('bookreview_set')
with self.assertNumQueries(0):
self.assertCountEqual(bookwithyear1.bookreview_set.all(), [self.bookreview1])
with self.assertNumQueries(0):
prefetch_related_objects([bookwithyear1], 'bookreview_set')
def test_add_clears_prefetched_objects(self):
bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk)
prefetch_related_objects([bookwithyear], 'bookreview_set')
self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1])
new_review = BookReview.objects.create()
bookwithyear.bookreview_set.add(new_review)
self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1, new_review])
def test_remove_clears_prefetched_objects(self):
bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk)
prefetch_related_objects([bookwithyear], 'bookreview_set')
self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1])
bookwithyear.bookreview_set.remove(self.bookreview1)
self.assertCountEqual(bookwithyear.bookreview_set.all(), [])
class ReadPrefetchedObjectsCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.book1 = Book.objects.create(title='Les confessions Volume I')
cls.book2 = Book.objects.create(title='Candide')
cls.author1 = AuthorWithAge.objects.create(name='Rousseau', first_book=cls.book1, age=70)
cls.author2 = AuthorWithAge.objects.create(name='Voltaire', first_book=cls.book2, age=65)
cls.book1.authors.add(cls.author1)
cls.book2.authors.add(cls.author2)
FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2)
def test_retrieves_results_from_prefetched_objects_cache(self):
"""
When intermediary results are prefetched without a destination
attribute, they are saved in the RelatedManager's cache
(_prefetched_objects_cache). prefetch_related() uses this cache
(#27554).
"""
authors = AuthorWithAge.objects.prefetch_related(
Prefetch(
'author',
queryset=Author.objects.prefetch_related(
# Results are saved in the RelatedManager's cache
# (_prefetched_objects_cache) and do not replace the
# RelatedManager on Author instances (favorite_authors)
Prefetch('favorite_authors__first_book'),
),
),
)
with self.assertNumQueries(4):
# AuthorWithAge -> Author -> FavoriteAuthors, Book
self.assertQuerysetEqual(authors, ['<AuthorWithAge: Rousseau>', '<AuthorWithAge: Voltaire>'])
|
{
"content_hash": "1f805b27241b8431432b835614ff3f0c",
"timestamp": "",
"source": "github",
"line_count": 1530,
"max_line_length": 119,
"avg_line_length": 43.79150326797386,
"alnum_prop": 0.5961851315652005,
"repo_name": "schinckel/django",
"id": "49c7fd8ff67b1528d3eee2fa4e0dba77300c5117",
"size": "67001",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tests/prefetch_related/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "85024"
},
{
"name": "HTML",
"bytes": "224566"
},
{
"name": "JavaScript",
"bytes": "251536"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "13234142"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
"""Statistics helper for sensor."""
from __future__ import annotations
import datetime
import itertools
import logging
from typing import Callable
from homeassistant.components.recorder import history, statistics
from homeassistant.components.sensor import (
ATTR_STATE_CLASS,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_GAS,
DEVICE_CLASS_MONETARY,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
STATE_CLASSES,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_UNIT_OF_MEASUREMENT,
DEVICE_CLASS_POWER,
ENERGY_KILO_WATT_HOUR,
ENERGY_WATT_HOUR,
POWER_KILO_WATT,
POWER_WATT,
PRESSURE_BAR,
PRESSURE_HPA,
PRESSURE_INHG,
PRESSURE_MBAR,
PRESSURE_PA,
PRESSURE_PSI,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
TEMP_KELVIN,
VOLUME_CUBIC_FEET,
VOLUME_CUBIC_METERS,
)
from homeassistant.core import HomeAssistant, State
from homeassistant.helpers.entity import entity_sources
import homeassistant.util.dt as dt_util
import homeassistant.util.pressure as pressure_util
import homeassistant.util.temperature as temperature_util
import homeassistant.util.volume as volume_util
from . import ATTR_LAST_RESET, DOMAIN
_LOGGER = logging.getLogger(__name__)
DEVICE_CLASS_STATISTICS: dict[str, dict[str, set[str]]] = {
STATE_CLASS_MEASUREMENT: {
# Deprecated, support will be removed in Home Assistant 2021.11
DEVICE_CLASS_ENERGY: {"sum"},
DEVICE_CLASS_GAS: {"sum"},
DEVICE_CLASS_MONETARY: {"sum"},
},
STATE_CLASS_TOTAL_INCREASING: {},
}
DEFAULT_STATISTICS = {
STATE_CLASS_MEASUREMENT: {"mean", "min", "max"},
STATE_CLASS_TOTAL_INCREASING: {"sum"},
}
# Normalized units which will be stored in the statistics table
DEVICE_CLASS_UNITS = {
DEVICE_CLASS_ENERGY: ENERGY_KILO_WATT_HOUR,
DEVICE_CLASS_POWER: POWER_WATT,
DEVICE_CLASS_PRESSURE: PRESSURE_PA,
DEVICE_CLASS_TEMPERATURE: TEMP_CELSIUS,
DEVICE_CLASS_GAS: VOLUME_CUBIC_METERS,
}
UNIT_CONVERSIONS: dict[str, dict[str, Callable]] = {
# Convert energy to kWh
DEVICE_CLASS_ENERGY: {
ENERGY_KILO_WATT_HOUR: lambda x: x,
ENERGY_WATT_HOUR: lambda x: x / 1000,
},
# Convert power W
DEVICE_CLASS_POWER: {
POWER_WATT: lambda x: x,
POWER_KILO_WATT: lambda x: x * 1000,
},
# Convert pressure to Pa
# Note: pressure_util.convert is bypassed to avoid redundant error checking
DEVICE_CLASS_PRESSURE: {
PRESSURE_BAR: lambda x: x / pressure_util.UNIT_CONVERSION[PRESSURE_BAR],
PRESSURE_HPA: lambda x: x / pressure_util.UNIT_CONVERSION[PRESSURE_HPA],
PRESSURE_INHG: lambda x: x / pressure_util.UNIT_CONVERSION[PRESSURE_INHG],
PRESSURE_MBAR: lambda x: x / pressure_util.UNIT_CONVERSION[PRESSURE_MBAR],
PRESSURE_PA: lambda x: x / pressure_util.UNIT_CONVERSION[PRESSURE_PA],
PRESSURE_PSI: lambda x: x / pressure_util.UNIT_CONVERSION[PRESSURE_PSI],
},
# Convert temperature to °C
# Note: temperature_util.convert is bypassed to avoid redundant error checking
DEVICE_CLASS_TEMPERATURE: {
TEMP_CELSIUS: lambda x: x,
TEMP_FAHRENHEIT: temperature_util.fahrenheit_to_celsius,
TEMP_KELVIN: temperature_util.kelvin_to_celsius,
},
# Convert volume to cubic meter
DEVICE_CLASS_GAS: {
VOLUME_CUBIC_METERS: lambda x: x,
VOLUME_CUBIC_FEET: volume_util.cubic_feet_to_cubic_meter,
},
}
# Keep track of entities for which a warning about decreasing value has been logged
SEEN_DIP = "sensor_seen_total_increasing_dip"
WARN_DIP = "sensor_warn_total_increasing_dip"
# Keep track of entities for which a warning about unsupported unit has been logged
WARN_UNSUPPORTED_UNIT = "sensor_warn_unsupported_unit"
WARN_UNSTABLE_UNIT = "sensor_warn_unstable_unit"
def _get_entities(hass: HomeAssistant) -> list[tuple[str, str, str | None]]:
"""Get (entity_id, state_class, device_class) of all sensors for which to compile statistics."""
all_sensors = hass.states.all(DOMAIN)
entity_ids = []
for state in all_sensors:
if (state_class := state.attributes.get(ATTR_STATE_CLASS)) not in STATE_CLASSES:
continue
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
entity_ids.append((state.entity_id, state_class, device_class))
return entity_ids
# Faster than try/except
# From https://stackoverflow.com/a/23639915
def _is_number(s: str) -> bool: # pylint: disable=invalid-name
"""Return True if string is a number."""
return s.replace(".", "", 1).isdigit()
def _time_weighted_average(
fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime
) -> float:
"""Calculate a time weighted average.
The average is calculated by, weighting the states by duration in seconds between
state changes.
Note: there's no interpolation of values between state changes.
"""
old_fstate: float | None = None
old_start_time: datetime.datetime | None = None
accumulated = 0.0
for fstate, state in fstates:
# The recorder will give us the last known state, which may be well
# before the requested start time for the statistics
start_time = start if state.last_updated < start else state.last_updated
if old_start_time is None:
# Adjust start time, if there was no last known state
start = start_time
else:
duration = start_time - old_start_time
# Accumulate the value, weighted by duration until next state change
assert old_fstate is not None
accumulated += old_fstate * duration.total_seconds()
old_fstate = fstate
old_start_time = start_time
if old_fstate is not None:
# Accumulate the value, weighted by duration until end of the period
assert old_start_time is not None
duration = end - old_start_time
accumulated += old_fstate * duration.total_seconds()
return accumulated / (end - start).total_seconds()
def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]:
"""Return True if all states have the same unit."""
return {item[1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) for item in fstates}
def _normalize_states(
hass: HomeAssistant,
entity_history: list[State],
device_class: str | None,
entity_id: str,
) -> tuple[str | None, list[tuple[float, State]]]:
"""Normalize units."""
unit = None
if device_class not in UNIT_CONVERSIONS:
# We're not normalizing this device class, return the state as they are
fstates = [
(float(el.state), el) for el in entity_history if _is_number(el.state)
]
if fstates:
all_units = _get_units(fstates)
if len(all_units) > 1:
if WARN_UNSTABLE_UNIT not in hass.data:
hass.data[WARN_UNSTABLE_UNIT] = set()
if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
extra = ""
if old_metadata := statistics.get_metadata(hass, entity_id):
extra = (
" and matches the unit of already compiled statistics "
f"({old_metadata['unit_of_measurement']})"
)
_LOGGER.warning(
"The unit of %s is changing, got multiple %s, generation of long term "
"statistics will be suppressed unless the unit is stable%s",
entity_id,
all_units,
extra,
)
return None, []
unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT)
return unit, fstates
fstates = []
for state in entity_history:
# Exclude non numerical states from statistics
if not _is_number(state.state):
continue
fstate = float(state.state)
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
# Exclude unsupported units from statistics
if unit not in UNIT_CONVERSIONS[device_class]:
if WARN_UNSUPPORTED_UNIT not in hass.data:
hass.data[WARN_UNSUPPORTED_UNIT] = set()
if entity_id not in hass.data[WARN_UNSUPPORTED_UNIT]:
hass.data[WARN_UNSUPPORTED_UNIT].add(entity_id)
_LOGGER.warning("%s has unknown unit %s", entity_id, unit)
continue
fstates.append((UNIT_CONVERSIONS[device_class][unit](fstate), state))
return DEVICE_CLASS_UNITS[device_class], fstates
def warn_dip(hass: HomeAssistant, entity_id: str) -> None:
"""Log a warning once if a sensor with state_class_total has a decreasing value.
The log will be suppressed until two dips have been seen to prevent warning due to
rounding issues with databases storing the state as a single precision float, which
was fixed in recorder DB version 20.
"""
if SEEN_DIP not in hass.data:
hass.data[SEEN_DIP] = set()
if entity_id not in hass.data[SEEN_DIP]:
hass.data[SEEN_DIP].add(entity_id)
return
if WARN_DIP not in hass.data:
hass.data[WARN_DIP] = set()
if entity_id not in hass.data[WARN_DIP]:
hass.data[WARN_DIP].add(entity_id)
domain = entity_sources(hass).get(entity_id, {}).get("domain")
if domain in ["energy", "growatt_server", "solaredge"]:
return
_LOGGER.warning(
"Entity %s %shas state class total_increasing, but its state is "
"not strictly increasing. Please create a bug report at %s",
entity_id,
f"from integration {domain} " if domain else "",
"https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue"
"+label%3A%22integration%3A+recorder%22",
)
def reset_detected(
hass: HomeAssistant, entity_id: str, state: float, previous_state: float | None
) -> bool:
"""Test if a total_increasing sensor has been reset."""
if previous_state is None:
return False
if 0.9 * previous_state <= state < previous_state:
warn_dip(hass, entity_id)
return state < 0.9 * previous_state
def _wanted_statistics(
entities: list[tuple[str, str, str | None]]
) -> dict[str, set[str]]:
"""Prepare a dict with wanted statistics for entities."""
wanted_statistics = {}
for entity_id, state_class, device_class in entities:
if device_class in DEVICE_CLASS_STATISTICS[state_class]:
wanted_statistics[entity_id] = DEVICE_CLASS_STATISTICS[state_class][
device_class
]
else:
wanted_statistics[entity_id] = DEFAULT_STATISTICS[state_class]
return wanted_statistics
def compile_statistics( # noqa: C901
hass: HomeAssistant, start: datetime.datetime, end: datetime.datetime
) -> dict:
"""Compile statistics for all entities during start-end.
Note: This will query the database and must not be run in the event loop
"""
result: dict = {}
entities = _get_entities(hass)
wanted_statistics = _wanted_statistics(entities)
# Get history between start and end
entities_full_history = [i[0] for i in entities if "sum" in wanted_statistics[i[0]]]
history_list = {}
if entities_full_history:
history_list = history.get_significant_states( # type: ignore
hass,
start - datetime.timedelta.resolution,
end,
entity_ids=entities_full_history,
significant_changes_only=False,
)
entities_significant_history = [
i[0] for i in entities if "sum" not in wanted_statistics[i[0]]
]
if entities_significant_history:
_history_list = history.get_significant_states( # type: ignore
hass,
start - datetime.timedelta.resolution,
end,
entity_ids=entities_significant_history,
)
history_list = {**history_list, **_history_list}
for entity_id, state_class, device_class in entities:
if entity_id not in history_list:
continue
entity_history = history_list[entity_id]
unit, fstates = _normalize_states(hass, entity_history, device_class, entity_id)
if not fstates:
continue
# Check metadata
if old_metadata := statistics.get_metadata(hass, entity_id):
if old_metadata["unit_of_measurement"] != unit:
if WARN_UNSTABLE_UNIT not in hass.data:
hass.data[WARN_UNSTABLE_UNIT] = set()
if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
_LOGGER.warning(
"The unit of %s (%s) does not match the unit of already "
"compiled statistics (%s). Generation of long term statistics "
"will be suppressed unless the unit changes back to %s",
entity_id,
unit,
old_metadata["unit_of_measurement"],
old_metadata["unit_of_measurement"],
)
continue
result[entity_id] = {}
# Set meta data
result[entity_id]["meta"] = {
"unit_of_measurement": unit,
"has_mean": "mean" in wanted_statistics[entity_id],
"has_sum": "sum" in wanted_statistics[entity_id],
}
# Make calculations
stat: dict = {}
if "max" in wanted_statistics[entity_id]:
stat["max"] = max(*itertools.islice(zip(*fstates), 1))
if "min" in wanted_statistics[entity_id]:
stat["min"] = min(*itertools.islice(zip(*fstates), 1))
if "mean" in wanted_statistics[entity_id]:
stat["mean"] = _time_weighted_average(fstates, start, end)
if "sum" in wanted_statistics[entity_id]:
last_reset = old_last_reset = None
new_state = old_state = None
_sum = 0
last_stats = statistics.get_last_statistics(hass, 1, entity_id)
if entity_id in last_stats:
# We have compiled history for this sensor before, use that as a starting point
last_reset = old_last_reset = last_stats[entity_id][0]["last_reset"]
new_state = old_state = last_stats[entity_id][0]["state"]
_sum = last_stats[entity_id][0]["sum"] or 0
for fstate, state in fstates:
# Deprecated, will be removed in Home Assistant 2021.10
if (
"last_reset" not in state.attributes
and state_class == STATE_CLASS_MEASUREMENT
):
continue
reset = False
if (
state_class != STATE_CLASS_TOTAL_INCREASING
and (last_reset := state.attributes.get("last_reset"))
!= old_last_reset
):
if old_state is None:
_LOGGER.info(
"Compiling initial sum statistics for %s, zero point set to %s",
entity_id,
fstate,
)
else:
_LOGGER.info(
"Detected new cycle for %s, last_reset set to %s (old last_reset %s)",
entity_id,
last_reset,
old_last_reset,
)
reset = True
elif old_state is None and last_reset is None:
reset = True
_LOGGER.info(
"Compiling initial sum statistics for %s, zero point set to %s",
entity_id,
fstate,
)
elif state_class == STATE_CLASS_TOTAL_INCREASING and (
old_state is None
or reset_detected(hass, entity_id, fstate, new_state)
):
reset = True
_LOGGER.info(
"Detected new cycle for %s, value dropped from %s to %s",
entity_id,
fstate,
new_state,
)
if reset:
# The sensor has been reset, update the sum
if old_state is not None:
_sum += new_state - old_state
# ..and update the starting point
new_state = fstate
old_last_reset = last_reset
# Force a new cycle for an existing sensor to start at 0
if old_state is not None:
old_state = 0.0
else:
old_state = new_state
else:
new_state = fstate
# Deprecated, will be removed in Home Assistant 2021.11
if last_reset is None and state_class == STATE_CLASS_MEASUREMENT:
# No valid updates
result.pop(entity_id)
continue
if new_state is None or old_state is None:
# No valid updates
result.pop(entity_id)
continue
# Update the sum with the last state
_sum += new_state - old_state
if last_reset is not None:
stat["last_reset"] = dt_util.parse_datetime(last_reset)
stat["sum"] = _sum
stat["state"] = new_state
result[entity_id]["stat"] = stat
return result
def list_statistic_ids(hass: HomeAssistant, statistic_type: str | None = None) -> dict:
"""Return statistic_ids and meta data."""
entities = _get_entities(hass)
statistic_ids = {}
for entity_id, state_class, device_class in entities:
if device_class in DEVICE_CLASS_STATISTICS[state_class]:
provided_statistics = DEVICE_CLASS_STATISTICS[state_class][device_class]
else:
provided_statistics = DEFAULT_STATISTICS[state_class]
if statistic_type is not None and statistic_type not in provided_statistics:
continue
state = hass.states.get(entity_id)
assert state
if (
"sum" in provided_statistics
and ATTR_LAST_RESET not in state.attributes
and state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
):
continue
metadata = statistics.get_metadata(hass, entity_id)
if metadata:
native_unit: str | None = metadata["unit_of_measurement"]
else:
native_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
if device_class not in UNIT_CONVERSIONS:
statistic_ids[entity_id] = native_unit
continue
if native_unit not in UNIT_CONVERSIONS[device_class]:
continue
statistics_unit = DEVICE_CLASS_UNITS[device_class]
statistic_ids[entity_id] = statistics_unit
return statistic_ids
|
{
"content_hash": "615991fd3ae7662c853048af21439751",
"timestamp": "",
"source": "github",
"line_count": 521,
"max_line_length": 100,
"avg_line_length": 37.39923224568138,
"alnum_prop": 0.5846548627149089,
"repo_name": "FreekingDean/home-assistant",
"id": "0054b01abd2c87a103eb39ff42a00538056430c3",
"size": "19486",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sensor/recorder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2335"
},
{
"name": "Python",
"bytes": "36746639"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
import io
import json
def read_docs(f, field=1):
input_stream = io.TextIOWrapper(f.buffer, encoding='utf-8')
for line in input_stream:
yield json.loads(line.strip().split("\t")[field-1])
def revision2doc(revision, page):
"""
Implements RevisionDocument v0.0.2
"""
redirect = None
if page.redirect is not None:
redirect = page.redirect.title
page_doc = {
'id': page.id,
'title': page.title,
'namespace': page.namespace,
'redirect_title': redirect,
'restrictions': page.restrictions
}
if revision.contributor is not None:
contributor_doc = {
'id': revision.contributor.id,
'user_text': revision.contributor.user_text
}
else:
contributor_doc = None
revision_doc = {
'page': page_doc,
'id': revision.id,
'timestamp': revision.timestamp.long_format(),
'contributor': contributor_doc,
'minor': revision.minor,
'comment': str(revision.comment) \
if revision.comment is not None \
else None,
'text': str(revision.text) \
if revision.text is not None \
else None,
'bytes': revision.bytes,
'sha1': revision.sha1,
'parent_id': revision.parent_id,
'model': revision.model,
'format': revision.format
}
return revision_doc
def op2doc(operation, a, b):
name, a1, a2, b1, b2 = operation
doc = {
'name': name,
'a1': a1,
'a2': a2,
'b1': b1,
'b2': b2
}
if name == "insert": doc['tokens'] = b[b1:b2]
elif name == "delete": doc['tokens'] = a[a1:a2]
else: pass
return doc
|
{
"content_hash": "be393564917509df6399fc1e6ee5bbb0",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 63,
"avg_line_length": 25.73913043478261,
"alnum_prop": 0.535472972972973,
"repo_name": "halfak/MediaWiki-Streaming",
"id": "a526860d3142443bb2133e0e13694280427ca725",
"size": "1776",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mwstreaming/utilities/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "48140"
}
],
"symlink_target": ""
}
|
from qutip.about import about
from qutip import settings as qset
def run(full=False):
"""
Run the test scripts for QuTiP.
Parameters
----------
full: bool
If True run all test (30 min). Otherwise skip few variants of the
slowest tests.
"""
# Call about to get all version info printed with tests
about()
import pytest
real_num_cpu = qset.num_cpus
real_thresh = qset.openmp_thresh
if qset.has_openmp:
# For travis which VMs have only 1 cpu.
# Make sure the openmp version of the functions are tested.
qset.num_cpus = 2
qset.openmp_thresh = 100
test_options = ["--verbosity=1", "--disable-pytest-warnings", "--pyargs"]
if not full:
test_options += ['-m', 'not slow']
pytest.main(test_options + ["qutip"])
# runs tests in qutip.tests module only
# Restore previous settings
if qset.has_openmp:
qset.num_cpus = real_num_cpu
qset.openmp_thresh = real_thresh
|
{
"content_hash": "7810e95a80a4bc273fd4b14e2a073ab8",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 77,
"avg_line_length": 29.441176470588236,
"alnum_prop": 0.6213786213786214,
"repo_name": "qutip/qutip",
"id": "0281a2c1472a94d6854039cb7f0eb0411a21099d",
"size": "1001",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "qutip/testing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "13979"
},
{
"name": "Cython",
"bytes": "354994"
},
{
"name": "OpenQASM",
"bytes": "1718"
},
{
"name": "Python",
"bytes": "2810040"
}
],
"symlink_target": ""
}
|
'''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on Jan 1, 2011
@author:Drew Bratcher
@contact: dbratcher@gatech.edu
@summary: Contains tutorial for backtester and report.
'''
#
# MonthlyRebalancingExample.py
#
# Usage: python MonthlyRebalancingExample.py 1-1-2004' '1-1-2009' 'alloc.pkl'
#
# A strategy script which creates a monthly allocation table using
# start date and end date along with the first 20 symbols of S&P500.
# It then dumps the allocation table to a pickle file.
#
#
#python imports
import cPickle
from pylab import *
from pandas import *
import matplotlib.pyplot as plt
import datetime as dt
import os
#qstk imports
import qstkutil.DataAccess as da
import qstkutil.qsdateutil as du
if __name__ == "__main__":
print "Running Monthly Rebalancing strategy starting "+sys.argv[1]+" and ending "+sys.argv[2]+"."
#Get first 20 S&P Symbols
symbols = list(np.loadtxt(os.environ['QS']+'/quicksim/strategies/S&P500.csv',dtype='str',delimiter=',',comments='#',skiprows=0))
symbols = symbols[0:19]
#Set start and end boundary times
t = map(int,sys.argv[1].split('-'))
startday = dt.datetime(t[2],t[0],t[1])
t = map(int,sys.argv[2].split('-'))
endday = dt.datetime(t[2],t[0],t[1])
#Get desired timestamps
timeofday=dt.timedelta(hours=16)
timestamps = du.getNYSEdays(startday,endday,timeofday)
# Get the data from the data store
dataobj = da.DataAccess('Norgate')
historic = dataobj.get_data(timestamps, symbols, "close")
# Setup the allocation table
alloc_vals=.8/(len(historic.values[0,:])-1)*ones((1,len(historic.values[0,:])))
alloc=DataMatrix(index=[historic.index[0]], data=alloc_vals, columns=symbols)
for date in range(1, len(historic.index)):
if(historic.index[date].day==1):
alloc=alloc.append(DataMatrix(index=[historic.index[date]], data=alloc_vals, columns=symbols))
alloc[symbols[0]] = .1
alloc['_CASH'] = .1
#Dump to a pkl file
output=open(sys.argv[3],"wb")
cPickle.dump(alloc, output)
|
{
"content_hash": "c4477087964277bcac236163b32bd5db",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 129,
"avg_line_length": 30.845070422535212,
"alnum_prop": 0.6981735159817352,
"repo_name": "grahesh/Stock-Market-Event-Analysis",
"id": "8fed4256e35ea9a43c277395dc5300744e7533df",
"size": "2190",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "quicksim/strategies/MonthlyRebalancing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5708"
},
{
"name": "Java",
"bytes": "8096"
},
{
"name": "JavaScript",
"bytes": "21455"
},
{
"name": "Python",
"bytes": "1741311"
},
{
"name": "Shell",
"bytes": "2145"
}
],
"symlink_target": ""
}
|
import os
import pipes
import platform
from build_swift.build_swift import argparse
from build_swift.build_swift.constants import BUILD_SCRIPT_IMPL_PATH
from build_swift.build_swift.constants import SWIFT_BUILD_ROOT
from build_swift.build_swift.constants import SWIFT_REPO_NAME
from build_swift.build_swift.constants import SWIFT_SOURCE_ROOT
from swift_build_support.swift_build_support import products
from swift_build_support.swift_build_support import shell
from swift_build_support.swift_build_support import targets
from swift_build_support.swift_build_support import workspace
from swift_build_support.swift_build_support.cmake import CMake
from swift_build_support.swift_build_support.host_specific_configuration \
import HostSpecificConfiguration
from swift_build_support.swift_build_support.productpipeline_list_builder \
import ProductPipelineListBuilder
from swift_build_support.swift_build_support.targets \
import StdlibDeploymentTarget
from swift_build_support.swift_build_support.utils \
import exit_rejecting_arguments
from swift_build_support.swift_build_support.utils import fatal_error
class BuildScriptInvocation(object):
"""Represent a single build script invocation.
"""
def __init__(self, toolchain, args):
self.toolchain = toolchain
self.args = args
self.workspace = workspace.Workspace(
source_root=SWIFT_SOURCE_ROOT,
build_root=os.path.join(SWIFT_BUILD_ROOT, args.build_subdir))
self.build_libparser_only = args.build_libparser_only
@property
def install_all(self):
return self.args.install_all or self.args.infer_dependencies
def build_ninja(self):
if not os.path.exists(self.workspace.source_dir("ninja")):
fatal_error(
"can't find source directory for ninja "
"(tried %s)" % (self.workspace.source_dir("ninja")))
ninja_build = products.Ninja.new_builder(
args=self.args,
toolchain=self.toolchain,
workspace=self.workspace,
host=StdlibDeploymentTarget.get_target_for_name(
self.args.host_target))
ninja_build.build()
self.toolchain.ninja = ninja_build.ninja_bin_path
def convert_to_impl_arguments(self):
"""convert_to_impl_arguments() -> (env, args)
Convert the invocation to an environment and list of arguments suitable
for invoking `build-script-impl`.
"""
# Create local shadows, for convenience.
args = self.args
toolchain = self.toolchain
cmake = CMake(args=args,
toolchain=self.toolchain)
impl_args = [
"--workspace", self.workspace.source_root,
"--build-dir", self.workspace.build_root,
"--install-prefix", args.install_prefix,
"--host-target", args.host_target,
"--stdlib-deployment-targets={}".format(
" ".join(args.stdlib_deployment_targets)),
"--host-cc", toolchain.cc,
"--host-cxx", toolchain.cxx,
"--darwin-xcrun-toolchain", args.darwin_xcrun_toolchain,
"--darwin-deployment-version-osx=%s" % (
args.darwin_deployment_version_osx),
"--darwin-deployment-version-ios=%s" % (
args.darwin_deployment_version_ios),
"--darwin-deployment-version-tvos=%s" % (
args.darwin_deployment_version_tvos),
"--darwin-deployment-version-watchos=%s" % (
args.darwin_deployment_version_watchos),
"--cmake", toolchain.cmake,
"--cmark-build-type", args.cmark_build_variant,
"--llvm-build-type", args.llvm_build_variant,
"--swift-build-type", args.swift_build_variant,
"--swift-stdlib-build-type", args.swift_stdlib_build_variant,
"--lldb-build-type", args.lldb_build_variant,
"--foundation-build-type", args.foundation_build_variant,
"--libdispatch-build-type", args.libdispatch_build_variant,
"--libicu-build-type", args.libicu_build_variant,
"--xctest-build-type", args.build_variant,
"--llbuild-build-type", args.build_variant,
"--swift-enable-assertions", str(args.swift_assertions).lower(),
"--swift-stdlib-enable-assertions", str(
args.swift_stdlib_assertions).lower(),
"--swift-analyze-code-coverage", str(
args.swift_analyze_code_coverage).lower(),
"--llbuild-enable-assertions", str(
args.llbuild_assertions).lower(),
"--lldb-assertions", str(
args.lldb_assertions).lower(),
"--cmake-generator", args.cmake_generator,
"--cross-compile-append-host-target-to-destdir", str(
args.cross_compile_append_host_target_to_destdir).lower(),
"--build-jobs", str(args.build_jobs),
"--common-cmake-options=%s" % ' '.join(
pipes.quote(opt) for opt in cmake.common_options()),
"--build-args=%s" % ' '.join(
pipes.quote(arg) for arg in cmake.build_args()),
"--dsymutil-jobs", str(args.dsymutil_jobs),
]
# Compute any product specific cmake arguments.
#
# NOTE: The sum(list(...)) is b/c compute_product_pipelines returns a
# tuple of lists of which the first is the build-script-impl products
# and the second is the non-build-script-impl-products. It guarantees
# that when we concatenate these two lists together we get a valid
# dependency graph.
for product_class in sum(list(self.compute_product_pipelines()[0]), []):
if not product_class.is_build_script_impl_product():
continue
product_name = product_class.product_name()
product_source_name = product_class.product_source_name()
source_dir = self.workspace.source_dir(product_source_name)
if not os.path.exists(source_dir):
fatal_error(
"can't find source directory for %s "
"(tried %s)" % (product_name, source_dir))
product = product_class(
args=args,
toolchain=self.toolchain,
source_dir=source_dir,
# FIXME: This is incorrect since it always assumes the host
# target I think?
build_dir=self.workspace.build_dir(
args.host_target, product_name))
cmake_opts = product.cmake_options
# FIXME: We should be using pipes.quote here but we run into issues
# with build-script-impl/cmake not being happy with all of the
# extra "'" in the strings. To fix this easily, we really need to
# just invoke cmake from build-script directly rather than futzing
# with build-script-impl. This makes even more sense since there
# really isn't a security issue here.
if cmake_opts:
impl_args += [
"--{}-cmake-options={}".format(
product_name, ' '.join(cmake_opts))
]
if args.build_stdlib_deployment_targets:
impl_args += [
"--build-stdlib-deployment-targets", " ".join(
args.build_stdlib_deployment_targets)]
if args.cross_compile_hosts:
impl_args += [
"--cross-compile-hosts", " ".join(args.cross_compile_hosts)]
if args.cross_compile_deps_path is not None:
impl_args += [
"--cross-compile-deps-path=%s" % args.cross_compile_deps_path
]
if args.test_paths:
impl_args += ["--test-paths", " ".join(args.test_paths)]
if toolchain.ninja:
impl_args += ["--ninja-bin=%s" % toolchain.ninja]
if args.distcc:
impl_args += [
"--distcc",
"--distcc-pump=%s" % toolchain.distcc_pump
]
if args.sccache:
args.cmake_c_launcher = toolchain.sccache
args.cmake_cxx_launcher = toolchain.sccache
# *NOTE* We use normal cmake to pass through tsan/ubsan options. We do
# NOT pass them to build-script-impl.
if args.enable_asan:
impl_args += ["--enable-asan"]
# If we are on linux, disable leak detection when running ASAN. We
# have a separate bot that checks for leaks.
if platform.system() == 'Linux':
os.environ['ASAN_OPTIONS'] = 'detect_leaks=0'
if args.enable_ubsan:
impl_args += ["--enable-ubsan"]
# If we have lsan, we need to export our suppression list. The actual
# passing in of the LSAN flag is done via the normal cmake method. We
# do not pass the flag to build-script-impl.
if args.enable_lsan:
supp_file = os.path.join(SWIFT_SOURCE_ROOT, SWIFT_REPO_NAME,
"utils",
"lsan_leaks_suppression_list.txt")
os.environ['LSAN_OPTIONS'] = 'suppressions={}'.format(supp_file)
if args.verbose_build:
impl_args += ["--verbose-build"]
if args.install_symroot:
impl_args += [
"--install-symroot", os.path.abspath(args.install_symroot)
]
if args.install_destdir:
impl_args += [
"--install-destdir", os.path.abspath(args.install_destdir)
]
if args.skip_build:
impl_args += ["--skip-build"]
if not args.build_benchmarks:
impl_args += ["--skip-build-benchmarks"]
if args.swift_disable_dead_stripping:
args.extra_cmake_options.append('-DSWIFT_DISABLE_DEAD_STRIPPING:BOOL=TRUE')
if args.build_backdeployconcurrency:
args.extra_cmake_options.append(
'-DSWIFT_BACK_DEPLOY_CONCURRENCY:BOOL=TRUE')
# Then add subproject install flags that either skip building them /or/
# if we are going to build them and install_all is set, we also install
# them.
conditional_subproject_configs = [
(args.build_cmark, "cmark"),
(args.build_llvm, "llvm"),
(args.build_swift, "swift"),
(args.build_foundation, "foundation"),
(args.build_xctest, "xctest"),
(args.build_lldb, "lldb"),
(args.build_llbuild, "llbuild"),
(args.build_libcxx, "libcxx"),
(args.build_libdispatch, "libdispatch"),
(args.build_libicu, "libicu")
]
for (should_build, string_name) in conditional_subproject_configs:
if not should_build and not self.args.infer_dependencies:
impl_args += ["--skip-build-{}".format(string_name)]
elif self.install_all:
impl_args += ["--install-{}".format(string_name)]
if args.build_swift_dynamic_stdlib:
impl_args += ["--build-swift-dynamic-stdlib"]
if args.build_swift_static_stdlib:
impl_args += ["--build-swift-static-stdlib"]
if args.build_swift_stdlib_unittest_extra:
impl_args += ["--build-swift-stdlib-unittest-extra"]
if args.build_swift_dynamic_sdk_overlay:
impl_args += ["--build-swift-dynamic-sdk-overlay"]
if args.build_swift_static_sdk_overlay:
impl_args += ["--build-swift-static-sdk-overlay"]
if not args.build_android:
impl_args += ["--skip-build-android"]
if not args.build_clang_tools_extra:
impl_args += ["--skip-build-clang-tools-extra"]
if not args.test and not args.long_test and not args.stress_test:
impl_args += ["--skip-test-swift"]
if not args.test:
impl_args += [
"--skip-test-cmark",
"--skip-test-lldb",
"--skip-test-llbuild",
"--skip-test-xctest",
"--skip-test-foundation",
"--skip-test-libdispatch",
"--skip-test-libicu",
]
if args.build_runtime_with_host_compiler:
impl_args += ["--build-runtime-with-host-compiler"]
if args.validation_test:
impl_args += ["--validation-test"]
if args.long_test:
impl_args += ["--long-test"]
if args.stress_test:
impl_args += ["--stress-test"]
if args.skip_local_build:
impl_args += ["--skip-local-build"]
if args.only_executable_test:
impl_args += ["--only-executable-test"]
if not args.benchmark:
impl_args += ["--skip-test-benchmarks"]
if args.build_libparser_only:
impl_args += ["--build-libparser-only"]
if args.android:
impl_args += [
"--android-arch", args.android_arch,
"--android-ndk", args.android_ndk,
"--android-api-level", args.android_api_level,
]
# If building natively on an Android host, only pass the API level.
if StdlibDeploymentTarget.Android.contains(StdlibDeploymentTarget
.host_target().name):
impl_args += ["--android-api-level", args.android_api_level]
if args.android_deploy_device_path:
impl_args += [
"--android-deploy-device-path",
args.android_deploy_device_path,
]
if platform.system() == 'Darwin':
impl_args += [
"--toolchain-prefix",
targets.darwin_toolchain_prefix(
args.install_prefix),
"--host-lipo", toolchain.lipo,
]
# Isolate build from the system; Darwin toolchains build against SDKs.
# For additional isolation, disable pkg-config. Homebrew's pkg-config
# prioritizes CommandLineTools paths, resulting in compile errors.
args.extra_cmake_options += [
'-DCMAKE_IGNORE_PATH=/usr/lib;/usr/local/lib;/lib',
'-DPKG_CONFIG_EXECUTABLE=/usr/bin/false',
]
if toolchain.libtool is not None:
impl_args += [
"--host-libtool", toolchain.libtool,
]
if args.native_clang_tools_path is not None:
impl_args += [
"--native-clang-tools-path=%s" % args.native_clang_tools_path
]
if args.native_llvm_tools_path is not None:
impl_args += [
"--native-llvm-tools-path=%s" % args.native_llvm_tools_path
]
if args.native_swift_tools_path is not None:
impl_args += [
"--native-swift-tools-path=%s" % args.native_swift_tools_path
]
# If we have extra_swift_args, combine all of them together and then
# add them as one command.
if args.extra_swift_args:
impl_args += [
"--extra-swift-args=%s" % ';'.join(args.extra_swift_args)
]
# Enable macCatalyst
if args.maccatalyst:
(args.extra_cmake_options
.append('-DSWIFT_ENABLE_MACCATALYST:BOOL=TRUE'))
if args.maccatalyst_ios_tests:
impl_args += ["--darwin-test-maccatalyst-ios-like=1"]
# If we have extra_cmake_options, combine all of them together and then
# add them as one command.
if args.extra_cmake_options:
impl_args += [
"--extra-cmake-options=%s" % ' '.join(
pipes.quote(opt) for opt in args.extra_cmake_options)
]
if args.lto_type is not None:
impl_args += [
"--llvm-enable-lto=%s" % args.lto_type,
"--swift-tools-enable-lto=%s" % args.lto_type
]
if args.llvm_max_parallel_lto_link_jobs is not None:
impl_args += [
"--llvm-num-parallel-lto-link-jobs=%s" %
min(args.llvm_max_parallel_lto_link_jobs, args.build_jobs)
]
if args.swift_tools_max_parallel_lto_link_jobs is not None:
impl_args += [
"--swift-tools-num-parallel-lto-link-jobs=%s" %
min(args.swift_tools_max_parallel_lto_link_jobs,
args.build_jobs)
]
if args.bootstrapping_mode is not None:
impl_args += [
"--bootstrapping=%s" % args.bootstrapping_mode,
]
impl_args += args.build_script_impl_args
if args.dry_run:
impl_args += ["--dry-run"]
if args.reconfigure:
impl_args += ["--reconfigure"]
if args.clang_profile_instr_use:
impl_args += [
"--clang-profile-instr-use=%s" %
os.path.abspath(args.clang_profile_instr_use)
]
if args.lit_args:
impl_args += ["--llvm-lit-args=%s" % args.lit_args]
if args.coverage_db:
impl_args += [
"--coverage-db=%s" %
os.path.abspath(args.coverage_db)
]
if args.llvm_install_components:
impl_args += [
"--llvm-install-components=%s" % args.llvm_install_components
]
if not args.clean_libdispatch:
impl_args += [
"--skip-clean-libdispatch"
]
if not args.clean_foundation:
impl_args += [
"--skip-clean-foundation"
]
if not args.clean_xctest:
impl_args += [
"--skip-clean-xctest"
]
if not args.clean_llbuild:
impl_args += [
"--skip-clean-llbuild"
]
if args.llvm_ninja_targets:
impl_args += [
"--llvm-ninja-targets=%s" % ' '.join(args.llvm_ninja_targets)
]
if args.llvm_ninja_targets_for_cross_compile_hosts:
impl_args += [
"--llvm-ninja-targets-for-cross-compile-hosts=%s" %
' '.join(args.llvm_ninja_targets_for_cross_compile_hosts)
]
if args.darwin_symroot_path_filters:
impl_args += [
"--darwin_symroot_path_filters=%s" %
' '.join(args.darwin_symroot_path_filters)
]
# Compute the set of host-specific variables, which we pass through to
# the build script via environment variables.
host_specific_variables = self.compute_host_specific_variables()
impl_env = {}
for (host_target, options) in host_specific_variables.items():
for (name, value) in options.items():
# We mangle into an environment variable we can easily evaluate
# from the `build-script-impl`.
impl_env["HOST_VARIABLE_{}__{}".format(
host_target.replace("-", "_"), name)] = value
return (impl_env, impl_args)
def compute_host_specific_variables(self):
"""compute_host_specific_variables(args) -> dict
Compute the host-specific options, organized as a dictionary keyed by
host of options.
"""
args = self.args
args.build_root = self.workspace.build_root
options = {}
for host_target in [args.host_target] + args.cross_compile_hosts:
# Compute the host specific configuration.
try:
config = HostSpecificConfiguration(host_target, args)
except argparse.ArgumentError as e:
exit_rejecting_arguments(str(e))
# Convert into `build-script-impl` style variables.
options[host_target] = {
"SWIFT_SDKS": " ".join(sorted(
config.sdks_to_configure)),
"SWIFT_STDLIB_TARGETS": " ".join(
config.swift_stdlib_build_targets),
"SWIFT_BENCHMARK_TARGETS": " ".join(
config.swift_benchmark_build_targets),
"SWIFT_RUN_BENCHMARK_TARGETS": " ".join(
config.swift_benchmark_run_targets),
"SWIFT_TEST_TARGETS": " ".join(
config.swift_test_run_targets),
"SWIFT_FLAGS": config.swift_flags,
"SWIFT_TARGET_CMAKE_OPTIONS": config.cmake_options,
}
return options
def compute_product_pipelines(self):
"""compute_product_pipelines() -> [[Product]]
A list of lists of products.
Compute lists of product pipelines that we should run. It is guaranteed
that all product pipeline lists consist of solely build-script-impl
products or build-script products. So one can always check the first
element to know if a pipeline returned from the builder is an impl
product or not.
"""
builder = ProductPipelineListBuilder(self.args)
builder.begin_pipeline()
# If --skip-early-swift-driver is passed in, swift will be built
# as usual, but relying on its own C++-based (Legacy) driver.
# Otherwise, we build an "early" swift-driver using the host
# toolchain, which the later-built compiler will forward
# `swiftc` invocations to. That is, if we find a Swift compiler
# in the host toolchain. If the host toolchain is not equpipped with
# a Swift compiler, a warning is emitted. In the future, it may become
# mandatory that the host toolchain come with its own `swiftc`.
builder.add_product(products.EarlySwiftDriver,
is_enabled=self.args.build_early_swift_driver)
builder.add_product(products.CMark,
is_enabled=self.args.build_cmark)
# Begin a build-script-impl pipeline for handling the compiler toolchain
# and a subset of the tools that we build. We build these in this manner
# to preserve current build-script-impl run behavior as we transition
# the build-script code base. The main difference is that these are all
# build, tested, and installed all at once instead of performing build,
# test, install like a normal build-script product.
builder.begin_impl_pipeline(should_run_epilogue_operations=False)
# If --skip-build-llvm is passed in, LLVM cannot be completely disabled, as
# Swift still needs a few LLVM targets like tblgen to be built for it to be
# configured. Instead, handle this in build-script-impl for now.
builder.add_impl_product(products.LLVM,
is_enabled=True)
builder.add_impl_product(products.LibCXX,
is_enabled=self.args.build_libcxx)
builder.add_impl_product(products.LibICU,
is_enabled=self.args.build_libicu)
builder.add_impl_product(products.Swift,
is_enabled=self.args.build_swift)
builder.add_impl_product(products.LLDB,
is_enabled=self.args.build_lldb)
# Begin a new build-script-impl pipeline that builds libraries that we
# build as part of build-script-impl but that we should eventually move
# onto build-script products.
builder.begin_impl_pipeline(should_run_epilogue_operations=True)
builder.add_impl_product(products.LibDispatch,
is_enabled=self.args.build_libdispatch)
builder.add_impl_product(products.Foundation,
is_enabled=self.args.build_foundation)
builder.add_impl_product(products.XCTest,
is_enabled=self.args.build_xctest)
builder.add_impl_product(products.LLBuild,
is_enabled=self.args.build_llbuild)
# Begin the post build-script-impl build phase.
builder.begin_pipeline()
builder.add_product(products.BackDeployConcurrency,
is_enabled=self.args.build_backdeployconcurrency)
builder.add_product(products.SwiftPM,
is_enabled=self.args.build_swiftpm)
builder.add_product(products.SwiftSyntax,
is_enabled=self.args.build_swiftsyntax)
builder.add_product(products.SKStressTester,
is_enabled=self.args.build_skstresstester)
builder.add_product(products.SwiftFormat,
is_enabled=self.args.build_swiftformat)
builder.add_product(products.SwiftEvolve,
is_enabled=self.args.build_swiftevolve)
builder.add_product(products.IndexStoreDB,
is_enabled=self.args.build_indexstoredb)
builder.add_product(products.PlaygroundSupport,
is_enabled=self.args.build_playgroundsupport)
builder.add_product(products.SourceKitLSP,
is_enabled=self.args.build_sourcekitlsp)
builder.add_product(products.Benchmarks,
is_enabled=self.args.build_toolchainbenchmarks)
builder.add_product(products.SwiftInspect,
is_enabled=self.args.build_swift_inspect)
builder.add_product(products.TSanLibDispatch,
is_enabled=self.args.tsan_libdispatch_test)
builder.add_product(products.SwiftDocC,
is_enabled=self.args.build_swiftdocc)
builder.add_product(products.SwiftDocCRender,
is_enabled=self.args.install_swiftdocc)
# Keep SwiftDriver at last.
# swift-driver's integration with the build scripts is not fully
# supported. Using swift-driver to build these products may hit
# failures.
builder.add_product(products.SwiftDriver,
is_enabled=self.args.build_swift_driver
or self.args.install_swift_driver)
# Now that we have constructed our pass pipelines using our builder, get
# the final schedule and finalize the builder.
return builder.finalize(shouldInfer=self.args.infer_dependencies)
def execute(self):
"""Execute the invocation with the configured arguments."""
# Convert to a build-script-impl invocation.
(self.impl_env, self.impl_args) = self.convert_to_impl_arguments()
# If using the legacy implementation, delegate all behavior to
# `build-script-impl`.
if self.args.legacy_impl:
# Execute the underlying build script implementation.
shell.call_without_sleeping([BUILD_SCRIPT_IMPL_PATH] + self.impl_args,
env=self.impl_env, echo=True)
return
# Otherwise, we compute and execute the individual actions ourselves.
# Compute the list of hosts to operate on.
all_host_names = [
self.args.host_target] + self.args.cross_compile_hosts
all_hosts = [StdlibDeploymentTarget.get_target_for_name(name)
for name in all_host_names]
# Compute the list of lists of product classes to operate on.
#
# FIXME: This should really be per-host, but the current structure
# matches that of `build-script-impl`.
(product_pipelines, last_impl_index) = self.compute_product_pipelines()
# Execute each "product pipeline".
for index in range(len(product_pipelines)):
perform_epilogue_opts = last_impl_index == index
pipeline = product_pipelines[index]
# Skip empty pipelines.
if len(pipeline) == 0:
if perform_epilogue_opts:
self._execute_merged_host_lipo_core_action()
continue
is_impl = pipeline[0].is_build_script_impl_product()
if is_impl:
self._execute_impl(pipeline, all_hosts, perform_epilogue_opts)
else:
assert(index != last_impl_index)
if index > last_impl_index:
non_darwin_cross_compile_hostnames = [
target for target in self.args.cross_compile_hosts if not
StdlibDeploymentTarget.get_target_for_name(
target).platform.is_darwin
]
self._execute(pipeline, [self.args.host_target] +
non_darwin_cross_compile_hostnames)
else:
self._execute(pipeline, all_host_names)
# And then perform the rest of the non-core epilogue actions.
# Extract symbols...
for host_target in all_hosts:
self._execute_extract_symbols_action(host_target)
# Package...
for host_target in all_hosts:
self._execute_package_action(host_target)
# Lipo...
self._execute_merged_host_lipo_action()
def _execute_impl(self, pipeline, all_hosts, should_run_epilogue_operations):
# Build...
for host_target in all_hosts:
# FIXME: We should only compute these once.
try:
config = HostSpecificConfiguration(host_target.name, self.args)
except argparse.ArgumentError as e:
exit_rejecting_arguments(str(e))
print("Building the standard library for: {}".format(
" ".join(config.swift_stdlib_build_targets)))
if config.swift_test_run_targets and (
self.args.test or self.args.long_test):
print("Running Swift tests for: {}".format(
" ".join(config.swift_test_run_targets)))
if config.swift_benchmark_run_targets and self.args.benchmark:
print("Running Swift benchmarks for: {}".format(
" ".join(config.swift_benchmark_run_targets)))
for product_class in pipeline:
self._execute_build_action(host_target, product_class)
# Test...
for host_target in all_hosts:
for product_class in pipeline:
self._execute_test_action(host_target, product_class)
# Install...
for host_target in all_hosts:
for product_class in pipeline:
self._execute_install_action(host_target, product_class)
# And then we may be asked to perform several post-processing operations
# on what we have built. If we are not supposed to do so, bail now.
if not should_run_epilogue_operations:
return
# Core Lipo...
self._execute_merged_host_lipo_core_action()
def _execute(self, pipeline, all_host_names):
for host_target in all_host_names:
if self.args.skip_local_build and host_target == self.args.host_target:
continue
for product_class in pipeline:
# Execute clean, build, test, install
self.execute_product_build_steps(product_class, host_target)
def _execute_build_action(self, host_target, product_class):
action_name = "{}-{}-build".format(host_target.name,
product_class.product_name())
self._execute_action(action_name)
def _execute_test_action(self, host_target, product_class):
action_name = "{}-{}-test".format(host_target.name,
product_class.product_name())
self._execute_action(action_name)
def _execute_install_action(self, host_target, product_class):
action_name = "{}-{}-install".format(host_target.name,
product_class.product_name())
self._execute_action(action_name)
def _execute_extract_symbols_action(self, host_target):
action_name = "{}-extractsymbols".format(host_target.name)
self._execute_action(action_name)
def _execute_package_action(self, host_target):
action_name = "{}-package".format(host_target.name)
self._execute_action(action_name)
def _execute_merged_host_lipo_action(self):
self._execute_action("merged-hosts-lipo")
def _execute_merged_host_lipo_core_action(self):
self._execute_action("merged-hosts-lipo-core")
def _execute_action(self, action_name):
shell.call_without_sleeping(
[BUILD_SCRIPT_IMPL_PATH] + self.impl_args +
["--only-execute", action_name],
env=self.impl_env, echo=self.args.verbose_build)
def execute_product_build_steps(self, product_class, host_target):
product_source = product_class.product_source_name()
product_name = product_class.product_name()
if product_class.is_swiftpm_unified_build_product():
build_dir = self.workspace.swiftpm_unified_build_dir(
host_target)
else:
build_dir = self.workspace.build_dir(
host_target, product_name)
product = product_class(
args=self.args,
toolchain=self.toolchain,
source_dir=self.workspace.source_dir(product_source),
build_dir=build_dir)
if product.should_clean(host_target):
print("--- Cleaning %s ---" % product_name)
product.clean(host_target)
if product.should_build(host_target):
print("--- Building %s ---" % product_name)
product.build(host_target)
if product.should_test(host_target):
print("--- Running tests for %s ---" % product_name)
product.test(host_target)
print("--- Finished tests for %s ---" % product_name)
# Install the product if it should be installed specifically, or
# if it should be built and `install_all` is set to True.
# The exception is select before_build_script_impl products
# which set `is_ignore_install_all_product` to True, ensuring
# they are never installed. (e.g. earlySwiftDriver).
if product.should_install(host_target) or \
(self.install_all and product.should_build(host_target) and
not product.is_ignore_install_all_product()):
print("--- Installing %s ---" % product_name)
product.install(host_target)
|
{
"content_hash": "c1b4a610e9e2f0aeac18a7116fde5297",
"timestamp": "",
"source": "github",
"line_count": 793,
"max_line_length": 87,
"avg_line_length": 43.833543505674655,
"alnum_prop": 0.5720943613348677,
"repo_name": "rudkx/swift",
"id": "5343318ad80fac5670add53c8e268e7e7cf96d17",
"size": "35265",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "utils/swift_build_support/swift_build_support/build_script_invocation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "45808"
},
{
"name": "C",
"bytes": "5375236"
},
{
"name": "C++",
"bytes": "46670890"
},
{
"name": "CMake",
"bytes": "676617"
},
{
"name": "D",
"bytes": "1107"
},
{
"name": "DTrace",
"bytes": "2593"
},
{
"name": "Emacs Lisp",
"bytes": "57594"
},
{
"name": "LLVM",
"bytes": "74528"
},
{
"name": "Makefile",
"bytes": "2361"
},
{
"name": "Objective-C",
"bytes": "458866"
},
{
"name": "Objective-C++",
"bytes": "159669"
},
{
"name": "Python",
"bytes": "1956421"
},
{
"name": "Roff",
"bytes": "3683"
},
{
"name": "Ruby",
"bytes": "2132"
},
{
"name": "Shell",
"bytes": "211956"
},
{
"name": "Swift",
"bytes": "38081215"
},
{
"name": "Vim script",
"bytes": "20025"
},
{
"name": "sed",
"bytes": "1050"
}
],
"symlink_target": ""
}
|
from conans import python_requires
import os
common = python_requires('llvm-common/0.0.0@orbitdeps/stable')
class LLVMTransformUtils(common.LLVMModulePackage):
version = common.LLVMModulePackage.version
name = 'llvm_transform_utils'
llvm_component = 'llvm'
llvm_module = 'TransformUtils'
llvm_requires = ['llvm_headers', 'llvm_analysis', 'llvm_core', 'llvm_support']
|
{
"content_hash": "9c83919ad5f76d5dc37d533f81039808",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 82,
"avg_line_length": 35.36363636363637,
"alnum_prop": 0.7352185089974294,
"repo_name": "pierricgimmig/orbitprofiler",
"id": "dec17791ecfa6745ee75c1ed6d79a6ea8418ec17",
"size": "389",
"binary": false,
"copies": "1",
"ref": "refs/heads/headless",
"path": "contrib/conan/recipes/llvm_transform_utils/conanfile.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "5798"
},
{
"name": "Batchfile",
"bytes": "5600"
},
{
"name": "C",
"bytes": "105310"
},
{
"name": "C++",
"bytes": "1978191"
},
{
"name": "CMake",
"bytes": "55219"
},
{
"name": "Objective-C",
"bytes": "1392"
},
{
"name": "Python",
"bytes": "102532"
},
{
"name": "QMake",
"bytes": "1219"
},
{
"name": "Shell",
"bytes": "8737"
}
],
"symlink_target": ""
}
|
import sqlalchemy_utils
from flask import request, render_template
from flask_babel import get_locale
from stand.app import babel, app
sqlalchemy_utils.i18n.get_locale = get_locale
'''
# @app.before_request
def before():
print request.args
if request.args and 'lang' in request.args:
if request.args['lang'] not in ('es', 'en'):
return abort(404)
'''
@babel.localeselector
def get_locale():
return request.args.get('lang', 'en')
@app.route('/')
def index():
"""Serve the client-side application."""
return render_template('index.html')
"""
def main(container=False):
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", help="Config file")
args = parser.parse_args()
if args.config:
with open(args.config) as f:
config = json.load(f)
app.config["RESTFUL_JSON"] = {
'cls': app.json_encoder,
'sort_keys': False,
}
server_config = config.get('servers', {})
app.config['SQLALCHEMY_DATABASE_URI'] = server_config.get(
'database_url')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_POOL_SIZE'] = 10
app.config['SQLALCHEMY_POOL_RECYCLE'] = 240
db.init_app(app)
with app.app_context():
db.create_all()
if server_config.get('environment', 'dev') == 'dev':
if not container:
app.run(debug=True, host='0.0.0.0', port=3320)
else:
app.debug = True
socketio_app = socketio.Middleware(sio, app)
return True, socketio_app
else:
parser.print_usage()
return False, None
# main()
"""
|
{
"content_hash": "a3d470ba2f74e0899886b65a1718b460",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 66,
"avg_line_length": 25.014492753623188,
"alnum_prop": 0.5886442641946698,
"repo_name": "eubr-bigsea/stand",
"id": "cca13072673072bc7e276b6cbcd9471a49a3398a",
"size": "1773",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stand/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "608"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "193876"
},
{
"name": "Shell",
"bytes": "2807"
}
],
"symlink_target": ""
}
|
import re
import urllib
RESERVED = ":/?#[]@!$&'()*+,;="
OPERATOR = "+./;?|!@"
EXPLODE = "*+"
MODIFIER = ":^"
TEMPLATE = re.compile("{([^\}]+)}")
def _tostring(varname, value, explode, operator, safe=""):
if type(value) == type([]):
if explode == "+":
return ",".join([varname + "." + urllib.quote(x, safe) for x in value])
else:
return ",".join([urllib.quote(x, safe) for x in value])
if type(value) == type({}):
keys = value.keys()
keys.sort()
if explode == "+":
return ",".join([varname + "." + urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
return urllib.quote(value, safe)
def _tostring_path(varname, value, explode, operator, safe=""):
joiner = operator
if type(value) == type([]):
if explode == "+":
return joiner.join([varname + "." + urllib.quote(x, safe) for x in value])
elif explode == "*":
return joiner.join([urllib.quote(x, safe) for x in value])
else:
return ",".join([urllib.quote(x, safe) for x in value])
elif type(value) == type({}):
keys = value.keys()
keys.sort()
if explode == "+":
return joiner.join([varname + "." + urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
elif explode == "*":
return joiner.join([urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
else:
return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
if value:
return urllib.quote(value, safe)
else:
return ""
def _tostring_semi(varname, value, explode, operator, safe=""):
joiner = operator
if operator == "?":
joiner = "&"
if type(value) == type([]):
if explode == "+":
return joiner.join([varname + "=" + urllib.quote(x, safe) for x in value])
elif explode == "*":
return joiner.join([urllib.quote(x, safe) for x in value])
else:
return ",".join([urllib.quote(x, safe) for x in value])
elif type(value) == type({}):
keys = value.keys()
keys.sort()
if explode == "+":
return joiner.join([varname + "." + urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
elif explode == "*":
return joiner.join([urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
else:
return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
if value:
return varname + "=" + urllib.quote(value, safe)
else:
return varname
def _tostring_query(varname, value, explode, operator, safe=""):
joiner = operator
if operator == "?":
joiner = "&"
if type(value) == type([]):
if 0 == len(value):
return ""
if explode == "+":
return joiner.join([varname + "=" + urllib.quote(x, safe) for x in value])
elif explode == "*":
return joiner.join([urllib.quote(x, safe) for x in value])
else:
return varname + "=" + ",".join([urllib.quote(x, safe) for x in value])
elif type(value) == type({}):
if 0 == len(value):
return ""
keys = value.keys()
keys.sort()
if explode == "+":
return joiner.join([varname + "." + urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
elif explode == "*":
return joiner.join([urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
else:
return varname + "=" + ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
if value:
return varname + "=" + urllib.quote(value, safe)
else:
return varname
TOSTRING = {
"" : _tostring,
"+": _tostring,
";": _tostring_semi,
"?": _tostring_query,
"/": _tostring_path,
".": _tostring_path,
}
def expand(template, vars):
def _sub(match):
expression = match.group(1)
operator = ""
if expression[0] in OPERATOR:
operator = expression[0]
varlist = expression[1:]
else:
varlist = expression
safe = ""
explode = ""
if operator == '+':
safe = RESERVED
varspecs = varlist.split(",")
varnames = []
defaults = {}
for varspec in varspecs:
default = None
if "=" in varspec:
varname, default = tuple(varspec.split("=", 1))
else:
varname = varspec
if varname[-1] in EXPLODE:
explode = varname[-1]
varname = varname[:-1]
if default:
defaults[varname] = default
varnames.append((varname, explode))
retval = []
joiner = operator
prefix = operator
if operator == "+":
prefix = ""
joiner = ","
if operator == "?":
joiner = "&"
if operator == "":
joiner = ","
for varname, explode in varnames:
if varname in vars:
value = vars[varname]
#if not value and (type(value) == type({}) or type(value) == type([])) and varname in defaults:
if not value and value != "" and varname in defaults:
value = defaults[varname]
elif varname in defaults:
value = defaults[varname]
else:
continue
retval.append(TOSTRING[operator](varname, value, explode, operator, safe=safe))
if "".join(retval):
return prefix + joiner.join(retval)
else:
return ""
return TEMPLATE.sub(_sub, template)
|
{
"content_hash": "efbeb5c9ae740a58948bad725c3cb831",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 125,
"avg_line_length": 31.64,
"alnum_prop": 0.5658298717717175,
"repo_name": "KaranToor/MA450",
"id": "c04363e59a1741ca1616f293b52f83db3f596e48",
"size": "5608",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/platform/bq/third_party/uritemplate/uritemplate.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
}
|
"""
This modules loads a JSON file from the etc/ directory and provides it as a
Python dictionary for other scripts to utilize.
"""
__all__ = [
'json2py',
'load',
'sanity_check',
'CONFIG_DIRECTORY',
]
import json
import os
CONFIG_DIRECTORY = 'etc'
def json2py(data):
if isinstance(data, dict):
return {json2py(key): json2py(data) for key, data in data.items()}
elif isinstance(data, list):
return [json2py(element) for element in data]
else:
return data
def load(filename):
old_cwd = os.getcwd()
for directory in ('../%s', './%s'):
directory = directory % CONFIG_DIRECTORY
if os.path.isdir(directory):
os.chdir(directory)
break
with open(filename, 'r') as fh:
try:
raw = json.load(fh)
except json.decoder.JSONDecodeError as err:
print("Unable to read config file '%s': %s" % (filename, err))
return None
os.chdir(old_cwd)
dictionary = json2py(raw)
dictionary["filename"] = filename
return dictionary
def sanity_check(dictionary, fields):
success = True
for field, ftype in fields.items():
if field not in dictionary.keys():
print("%s: Config file does not have a '%s' value." % (dictionary['filename'], field))
success = False
elif type(ftype) == dictionary:
success &= sanity_check(field, ftype)
else:
try:
if type(dictionary[field]) not in ftype:
print("%s: Config file has invalid type for '%s': %s (expected one of %s)." %
(dictionary['filename'], field, type(dictionary[field]), ", ".join(ftype)))
success = False
except:
if type(dictionary[field]) != ftype:
print("%s: Config file has invalid type for '%s': %s (expected %s)." %
(dictionary['filename'], field, type(dictionary[field]), ftype))
success = False
if not success:
exit(1)
|
{
"content_hash": "128250802d401fc3013990a857664f50",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 101,
"avg_line_length": 28.698630136986303,
"alnum_prop": 0.5560859188544153,
"repo_name": "ammongit/ucr-class-map",
"id": "73d08b1313bc21735e2ef3cc7699196355a22eeb",
"size": "2206",
"binary": false,
"copies": "3",
"ref": "refs/heads/update-build-regex",
"path": "scripts/do_target/jsonconfig.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7621"
},
{
"name": "HTML",
"bytes": "7758"
},
{
"name": "JavaScript",
"bytes": "197137"
},
{
"name": "Python",
"bytes": "9849"
},
{
"name": "Shell",
"bytes": "125"
}
],
"symlink_target": ""
}
|
import time
import os
import socket
import threading
import SocketServer
import uuid
VERSION = '0.1'
class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler):
def handle(self):
while(1):
data = self.request.recv(1024)
if not data:
break
cur_thread = threading.current_thread()
print cur_thread
self.processData(data)
def processData(self, data):
print 'Got:', data
try:
client_time = data.split(":")[0]
client_id = data.split(":")[1]
client_build = data.split(":")[2]
client_server_id_flag = data.split(":")[3]
client_status = data.split(":")[4]
if client_status == "50":
# request more information from the client
self.client_HANDSHAKE()
elif client_status == "100":
self.client_HEARTBEAT()
else:
self.request.sendall(self.protocolFromat('1','0'))
except IndexError:
print "broken protocol format"
#print "Bot_time = %s\nBot_id = %s\nBot_build = %s\nBot_data = %s\n" %(bot_time,bot_id,bot_build,bot_data)
return 0
def client_HEARTBEAT(self):
self.request.sendall(self.protocolFromat('1','0'))
def client_HANDSHAKE(self):
self.request.sendall(self.protocolFromat('1','0'))
def protocolFromat(self, status, data):
# See py0live_protocol_documentation.txt
global VERSION
protocol = "%s:%s:%s:%s:%s:%s" % (self.timeNow(), self.getID(), VERSION, 's', status, data)
return str(protocol)
def timeNow(self):
return int(round(time.time() * 1000))
def getID(self):
if os.path.isfile("IDF"):
f = open("IDF", 'r')
reading = f.readline()
return str(reading)
else:
f = open("IDF", 'w')
ident = uuid.uuid4()
f.write(str(ident))
return str(ident)
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
if __name__ == "__main__":
# Port 0 means to select an arbitrary unused port
HOST, PORT = "127.0.0.1", 4069
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
ip, port = server.server_address
# Start a thread with the server -- that thread will then start one
# more thread for each request
server_thread = threading.Thread(target=server.serve_forever())
# Exit the server thread when the main thread terminates
server_thread.daemon = True
server_thread.start()
print "Server loop running in thread:", server_thread.name
'''
try:
#server.serve_forever()
except KeyboardInterrupt:
print 'KeyboardInterrupt'
server.shutdown()
server.server_close()
except:
server.shutdown()
server.server_close()
'''
|
{
"content_hash": "8ff3c98817b9d03f1a425d4e38016d9d",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 108,
"avg_line_length": 23.174311926605505,
"alnum_prop": 0.6797307996832938,
"repo_name": "GBHsl/py0live",
"id": "7f3a2be7a5293ff369131257bed6aa408276d3c4",
"size": "2526",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/cncserver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3893"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .conf import settings
#==============================================================================
class Fieldline(object):
def __init__(self, form, fields, layout, labels):
self.form = form
if not hasattr(fields, "__iter__") or isinstance(fields, six.text_type):
self.fields = [fields]
else:
self.fields = fields
if not hasattr(layout, "__iter__") or isinstance(layout, six.integer_types):
self.layout = [layout]
else:
self.layout = layout
if not hasattr(labels, "__iter__") or isinstance(labels, six.integer_types):
self.labels = [labels]
else:
self.labels = labels
self.__index = 0
self.__len = len(self.fields)
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
if self.__index >= self.__len:
raise StopIteration()
field = self.form[self.fields[self.__index]]
field_size = self.layout[self.__index] if self.__index < len(self.layout) else None
if field_size is None:
field_size = settings.DEFAULT_FIELD_COLUMN_SIZE
label_size = self.labels[self.__index] if self.__index < len(self.labels) else None
if label_size is None:
label_size = settings.FIRST_LABEL_COLUMN_SIZE if self.__index == 0 else settings.DEFAULT_LABEL_COLUMN_SIZE
self.__index += 1
return field, field_size, label_size
#==============================================================================
class Fieldset(object):
def __init__(self, form, legend, description, classes, fields, layout, labels):
self.form = form
self.legend = legend
self.description = description
self.classes = classes
self.fields = fields
self.layout = layout
self.labels = labels
self.__index = 0
self.__len = len(self.fields)
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
if self.__index >= self.__len:
raise StopIteration()
fieldline = Fieldline(
form=self.form,
fields=self.fields[self.__index] if self.__index < len(self.fields) else None,
layout=self.layout[self.__index] if self.__index < len(self.layout) else None,
labels=self.labels[self.__index] if self.__index < len(self.labels) else None,
)
self.__index += 1
return fieldline
#==============================================================================
class FieldsetFormMixin(object):
def __init__(self, *args, **kwargs):
super(FieldsetFormMixin, self).__init__(*args, **kwargs)
assert(isinstance(self, forms.BaseForm))
self.meta = getattr(self, 'MetaForm', None)
self.fieldsets = None
if self.meta and self.meta.fieldsets:
self.fieldsets = self._fieldsets
def _fieldsets(self):
if self.meta and self.meta.fieldsets:
for legend, data in self.meta.fieldsets:
yield Fieldset(
form=self,
legend=legend,
description=data.get('description', ''),
classes=data.get('classes', ''),
fields=data.get('fields', tuple()),
layout=data.get('layout', tuple()),
labels=data.get('labels', tuple()),
)
|
{
"content_hash": "45ec58541e454169bb136f986e606a6b",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 118,
"avg_line_length": 31.370689655172413,
"alnum_prop": 0.5237702665567464,
"repo_name": "kunitoki/django-formaldehyde",
"id": "df1fb5512c8a4cb2c0914ed81d5d609c4709c026",
"size": "3639",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "formaldehyde/fieldsets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17728"
}
],
"symlink_target": ""
}
|
import gym
import time
import torch
from torch import nn, autograd, optim
import torch.nn.functional as F
import numpy as np
num_hidden = 4
print_every = 100
class Policy(nn.Module):
"""
Given parameters, calculates probability of any action,
given any state as input
samples from these probabilities
"""
def __init__(self, num_inputs, num_hidden, num_actions):
super().__init__()
self.num_actions = num_actions
self.num_inputs = num_inputs
self.num_hidden = num_hidden
# self.h1 = nn.Linear(num_inputs, num_hidden)
# self.h2 = nn.Linear(num_hidden, num_actions)
self.h1 = nn.Linear(num_inputs, num_actions)
def forward(self, x):
# print('x.data.shape', x.data.shape)
# print('self.num_inputs', self.num_inputs)
# print('self.num_actions', self.num_actions)
x = self.h1(x)
# x = F.tanh(x)
# x = self.h2(x)
x = F.softmax(x)
multinomial_res = torch.multinomial(x, num_samples=1)
# x =
# _, x = x.max(dim=1)
# print('x', x)
return multinomial_res
def run_episode(policy, render):
x = env.reset()
reward = 0
multinomial_res_nodes = []
for _ in range(1000):
if render:
env.render()
a_idx = policy(autograd.Variable(torch.from_numpy(x.astype(np.float32)).view(1, -1)))
multinomial_res_nodes.append(a_idx)
# a_idx = a_idx.data[0]
# print('a_idx', a_idx.data[0])
# a = env.action_space.sample()
x, r, done, info = env.step(a_idx.data[0][0])
# print('a', a, 'x', x, 'r', r, 'done', done)
reward += r
if done:
break
# time.sleep(0.02)
# avg_reward = reward / len(multinomial_res_nodes)
# reward = reward - 18
reward = reward * reward
for node in multinomial_res_nodes:
node.reinforce(reward)
return reward
env = gym.make('CartPole-v0')
print('action_space', env.action_space)
print(dir(env.action_space))
print('num_actions', env.action_space.n)
print('num_inputs', env.observation_space.shape[0])
policy = Policy(
num_inputs=env.observation_space.shape[0],
num_hidden=num_hidden,
num_actions=env.action_space.n)
opt = optim.Adam(params=policy.parameters(), lr=0.001)
print('observation_space', env.observation_space)
episode = 0
sum_reward = 0
while True:
opt.zero_grad()
render = episode % print_every == 0
reward = run_episode(policy=policy, render=render)
sum_reward += reward
# loss = - reward
# loss.backward()
# help(opt.step)
opt.step()
if render:
print('episode %s avg_reward %s reward %s' % (episode, sum_reward / print_every, reward))
sum_reward = 0
episode += 1
|
{
"content_hash": "bb42c5553e86800230f3b8812392f221",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 97,
"avg_line_length": 28.947916666666668,
"alnum_prop": 0.5980568549838071,
"repo_name": "hughperkins/pub-prototyping",
"id": "2bce9c499b1ac6a3ff58a5c3b249440806238dcc",
"size": "2779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py/reinf/test_reinf1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "122"
},
{
"name": "Batchfile",
"bytes": "157"
},
{
"name": "C",
"bytes": "201959"
},
{
"name": "C++",
"bytes": "1891943"
},
{
"name": "CMake",
"bytes": "94913"
},
{
"name": "Cool",
"bytes": "21"
},
{
"name": "Cuda",
"bytes": "11657"
},
{
"name": "Cython",
"bytes": "10071"
},
{
"name": "Dockerfile",
"bytes": "82"
},
{
"name": "Fortran",
"bytes": "1365"
},
{
"name": "HTML",
"bytes": "37898"
},
{
"name": "Java",
"bytes": "50309"
},
{
"name": "Jupyter Notebook",
"bytes": "803282"
},
{
"name": "LLVM",
"bytes": "132606"
},
{
"name": "Lua",
"bytes": "7881"
},
{
"name": "Makefile",
"bytes": "1629"
},
{
"name": "Perl",
"bytes": "198"
},
{
"name": "Python",
"bytes": "192911"
},
{
"name": "SWIG",
"bytes": "71"
},
{
"name": "Scala",
"bytes": "32521"
},
{
"name": "Shell",
"bytes": "19865"
},
{
"name": "Starlark",
"bytes": "1286"
},
{
"name": "TeX",
"bytes": "7125"
}
],
"symlink_target": ""
}
|
import os
import urlparse
from redis import Redis
from rq import Worker, Queue, Connection
from recruit_app.app import create_app
from recruit_app.settings import DevConfig, ProdConfig
listen = ['high', 'medium', 'low']
redis_url = os.getenv('REDISTOGO_URL')
# if not redis_url:
# raise RuntimeError('Set up Redis To Go first.')
if redis_url:
urlparse.uses_netloc.append('redis')
url = urlparse.urlparse(redis_url)
conn = Redis(host=url.hostname, port=url.port, db=0, password=url.password)
else:
conn = Redis()
if __name__ == '__main__':
if os.environ.get("RECRUIT_APP_ENV") == 'prod':
app = create_app(ProdConfig)
else:
app = create_app(DevConfig)
with app.app_context():
with Connection(conn):
worker = Worker(map(Queue, listen))
worker.work()
|
{
"content_hash": "b7c52b3507e5f29791aba97734404c52",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 79,
"avg_line_length": 26.870967741935484,
"alnum_prop": 0.6542617046818727,
"repo_name": "tyler274/Recruitment-App",
"id": "da50f1be4db364713c2860f2467388d118632e1d",
"size": "833",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run_worker.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2845"
},
{
"name": "HTML",
"bytes": "55851"
},
{
"name": "JavaScript",
"bytes": "222936"
},
{
"name": "Python",
"bytes": "140234"
}
],
"symlink_target": ""
}
|
import bson.json_util
from girder import events
from girder.constants import AccessType
from girder.utility.model_importer import ModelImporter
from girder.api.describe import Description, describeRoute
from girder.api.rest import Resource, RestException
from girder.api import access
class ResourceExt(Resource):
@access.public
@describeRoute(
Description('Run any search against a set of mongo collections.')
.notes('Results will be filtered by permissions.')
.param('type', 'The name of the collection to search, e.g. "item".')
.param('q', 'The search query as a JSON object.')
.param('limit', "Result set size limit (default=50).", required=False,
dataType='int')
.param('offset', "Offset into result set (default=0).", required=False,
dataType='int')
.errorResponse()
)
def mongoSearch(self, params):
self.requireParams(('type', 'q'), params)
allowed = {
'collection': ['_id', 'name', 'description'],
'folder': ['_id', 'name', 'description'],
'item': ['_id', 'name', 'description', 'folderId'],
'user': ['_id', 'firstName', 'lastName', 'login']
}
limit, offset, sort = self.getPagingParameters(params, 'name')
coll = params['type']
events.trigger('mongo_search.allowed_collections', info=allowed)
if coll not in allowed:
raise RestException('Invalid resource type: %s' % coll)
try:
query = bson.json_util.loads(params['q'])
except ValueError:
raise RestException('The query parameter must be a JSON object.')
model = ModelImporter().model(coll)
if hasattr(model, 'filterResultsByPermission'):
cursor = model.find(
query, fields=allowed[coll] + ['public', 'access'])
return list(model.filterResultsByPermission(
cursor, user=self.getCurrentUser(), level=AccessType.READ,
limit=limit, offset=offset, removeKeys=('public', 'access')))
else:
return list(model.find(query, fields=allowed[coll], limit=limit,
offset=offset))
def load(info):
ext = ResourceExt()
info['apiRoot'].resource.route('GET', ('mongo_search',), ext.mongoSearch)
|
{
"content_hash": "6e0f1c8c2d4c05020c88c31fc4686af6",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 79,
"avg_line_length": 39.86440677966102,
"alnum_prop": 0.6075680272108843,
"repo_name": "essamjoubori/girder",
"id": "58061e23a82c6c727b8a2ee96ffb753b7bbaaab3",
"size": "3141",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "plugins/mongo_search/server/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "36635"
},
{
"name": "CSS",
"bytes": "156740"
},
{
"name": "HTML",
"bytes": "161646"
},
{
"name": "JavaScript",
"bytes": "1358011"
},
{
"name": "Mako",
"bytes": "1483"
},
{
"name": "Python",
"bytes": "1202837"
},
{
"name": "Ruby",
"bytes": "9923"
},
{
"name": "Shell",
"bytes": "3298"
}
],
"symlink_target": ""
}
|
"""Generate docs for the TensorFlow Python API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import fnmatch
import os
import shutil
import tempfile
import six
from tensorflow.python.util import tf_inspect
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
from tensorflow.tools.docs import doc_controls
from tensorflow.tools.docs import doc_generator_visitor
from tensorflow.tools.docs import parser
from tensorflow.tools.docs import pretty_docs
from tensorflow.tools.docs import py_guide_parser
def write_docs(output_dir,
parser_config,
yaml_toc,
root_title='TensorFlow',
search_hints=True,
site_api_path='api_docs/python'):
"""Write previously extracted docs to disk.
Write a docs page for each symbol included in the indices of parser_config to
a tree of docs at `output_dir`.
Symbols with multiple aliases will have only one page written about
them, which is referenced for all aliases.
Args:
output_dir: Directory to write documentation markdown files to. Will be
created if it doesn't exist.
parser_config: A `parser.ParserConfig` object, containing all the necessary
indices.
yaml_toc: Set to `True` to generate a "_toc.yaml" file.
root_title: The title name for the root level index.md.
search_hints: (bool) include meta-data search hints at the top of each
output file.
site_api_path: The output path relative to the site root. Used in the
`_toc.yaml` and `_redirects.yaml` files.
Raises:
ValueError: if `output_dir` is not an absolute path
"""
# Make output_dir.
if not os.path.isabs(output_dir):
raise ValueError("'output_dir' must be an absolute path.\n"
" output_dir='%s'" % output_dir)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# These dictionaries are used for table-of-contents generation below
# They will contain, after the for-loop below::
# - module name(string):classes and functions the module contains(list)
module_children = {}
# - symbol name(string):pathname (string)
symbol_to_file = {}
# Collect redirects for an api _redirects.yaml file.
redirects = []
# Parse and write Markdown pages, resolving cross-links (@{symbol}).
for full_name, py_object in six.iteritems(parser_config.index):
parser_config.reference_resolver.current_doc_full_name = full_name
if full_name in parser_config.duplicate_of:
continue
# Methods and some routines are documented only as part of their class.
if not (tf_inspect.ismodule(py_object) or tf_inspect.isclass(py_object) or
parser.is_free_function(py_object, full_name, parser_config.index)):
continue
sitepath = os.path.join(parser.documentation_path(full_name)[:-3])
# For TOC, we need to store a mapping from full_name to the file
# we're generating
symbol_to_file[full_name] = sitepath
# For a module, remember the module for the table-of-contents
if tf_inspect.ismodule(py_object):
if full_name in parser_config.tree:
module_children.setdefault(full_name, [])
# For something else that's documented,
# figure out what module it lives in
else:
subname = str(full_name)
while True:
subname = subname[:subname.rindex('.')]
if tf_inspect.ismodule(parser_config.index[subname]):
module_children.setdefault(subname, []).append(full_name)
break
# Generate docs for `py_object`, resolving references.
page_info = parser.docs_for_object(full_name, py_object, parser_config)
path = os.path.join(output_dir, parser.documentation_path(full_name))
directory = os.path.dirname(path)
try:
if not os.path.exists(directory):
os.makedirs(directory)
# This function returns raw bytes in PY2 or unicode in PY3.
if search_hints:
content = [page_info.get_metadata_html()]
else:
content = ['']
content.append(pretty_docs.build_md_page(page_info))
text = '\n'.join(content)
if six.PY3:
text = text.encode('utf-8')
with open(path, 'wb') as f:
f.write(text)
except OSError:
raise OSError(
'Cannot write documentation for %s to %s' % (full_name, directory))
duplicates = parser_config.duplicates.get(full_name, [])
if not duplicates:
continue
duplicates = [item for item in duplicates if item != full_name]
for dup in duplicates:
from_path = os.path.join(site_api_path,
six.ensure_str(dup).replace('.', '/'))
to_path = os.path.join(site_api_path,
six.ensure_str(full_name).replace('.', '/'))
redirects.append((
os.path.join('/', from_path),
os.path.join('/', to_path)))
if redirects:
redirects = sorted(redirects)
template = ('- from: {}\n'
' to: {}\n')
redirects = [template.format(f, t) for f, t in redirects]
api_redirects_path = os.path.join(output_dir, '_redirects.yaml')
with open(api_redirects_path, 'w') as redirect_file:
redirect_file.write('redirects:\n')
redirect_file.write(''.join(redirects))
if yaml_toc:
# Generate table of contents
# Put modules in alphabetical order, case-insensitive
modules = sorted(list(module_children.keys()), key=lambda a: a.upper())
leftnav_path = os.path.join(output_dir, '_toc.yaml')
with open(leftnav_path, 'w') as f:
# Generate header
f.write('# Automatically generated file; please do not edit\ntoc:\n')
for module in modules:
indent_num = module.count('.')
# Don't list `tf.submodule` inside `tf`
indent_num = max(indent_num, 1)
indent = ' '*indent_num
if indent_num > 1:
# tf.contrib.baysflow.entropy will be under
# tf.contrib->baysflow->entropy
title = six.ensure_str(module).split('.')[-1]
else:
title = module
header = [
'- title: ' + six.ensure_str(title), ' section:',
' - title: Overview', ' path: ' +
os.path.join('/', site_api_path, symbol_to_file[module])
]
header = ''.join([indent+line+'\n' for line in header])
f.write(header)
symbols_in_module = module_children.get(module, [])
# Sort case-insensitive, if equal sort case sensitive (upper first)
symbols_in_module.sort(key=lambda a: (a.upper(), a))
for full_name in symbols_in_module:
item = [
' - title: ' + full_name[len(module) + 1:],
' path: ' + os.path.join('/', site_api_path,
symbol_to_file[full_name])]
item = ''.join([indent+line+'\n' for line in item])
f.write(item)
# Write a global index containing all full names with links.
with open(os.path.join(output_dir, 'index.md'), 'w') as f:
f.write(
six.ensure_str(
parser.generate_global_index(root_title, parser_config.index,
parser_config.reference_resolver)))
def add_dict_to_dict(add_from, add_to):
for key in add_from:
if key in add_to:
add_to[key].extend(add_from[key])
else:
add_to[key] = add_from[key]
# Exclude some libraries in contrib from the documentation altogether.
def _get_default_private_map():
return {
'tf.contrib.autograph': ['utils', 'operators'],
'tf.test': ['mock'],
'tf.compat': ['v1', 'v2'],
'tf.contrib.estimator': ['python'],
}
# Exclude members of some libraries.
def _get_default_do_not_descend_map():
# TODO(markdaoust): Use docs_controls decorators, locally, instead.
return {
'tf': ['cli', 'lib', 'wrappers'],
'tf.contrib': [
'compiler',
'grid_rnn',
# Block contrib.keras to de-clutter the docs
'keras',
'labeled_tensor',
'quantization',
'session_bundle',
'slim',
'solvers',
'specs',
'tensor_forest',
'tensorboard',
'testing',
'tfprof',
],
'tf.contrib.bayesflow': [
'special_math', 'stochastic_gradient_estimators',
'stochastic_variables'
],
'tf.contrib.ffmpeg': ['ffmpeg_ops'],
'tf.contrib.graph_editor': [
'edit', 'match', 'reroute', 'subgraph', 'transform', 'select', 'util'
],
'tf.contrib.keras': ['api', 'python'],
'tf.contrib.layers': ['feature_column', 'summaries'],
'tf.contrib.learn': [
'datasets',
'head',
'graph_actions',
'io',
'models',
'monitors',
'ops',
'preprocessing',
'utils',
],
'tf.contrib.util': ['loader'],
}
class DocControlsAwareCrawler(public_api.PublicAPIVisitor):
"""A `docs_controls` aware API-crawler."""
def _is_private(self, path, name, obj):
if doc_controls.should_skip(obj):
return True
return super(DocControlsAwareCrawler, self)._is_private(path, name, obj)
def extract(py_modules,
private_map,
do_not_descend_map,
visitor_cls=doc_generator_visitor.DocGeneratorVisitor):
"""Extract docs from tf namespace and write them to disk."""
# Traverse the first module.
visitor = visitor_cls(py_modules[0][0])
api_visitor = DocControlsAwareCrawler(visitor)
api_visitor.set_root_name(py_modules[0][0])
add_dict_to_dict(private_map, api_visitor.private_map)
add_dict_to_dict(do_not_descend_map, api_visitor.do_not_descend_map)
traverse.traverse(py_modules[0][1], api_visitor)
# Traverse all py_modules after the first:
for module_name, module in py_modules[1:]:
visitor.set_root_name(module_name)
api_visitor.set_root_name(module_name)
traverse.traverse(module, api_visitor)
return visitor
class _GetMarkdownTitle(py_guide_parser.PyGuideParser):
"""Extract the title from a .md file."""
def __init__(self):
self.title = None
py_guide_parser.PyGuideParser.__init__(self)
def process_title(self, _, title):
if self.title is None: # only use the first title
self.title = title
class _DocInfo(object):
"""A simple struct for holding a doc's url and title."""
def __init__(self, url, title):
self.url = url
self.title = title
def build_doc_index(src_dir):
"""Build an index from a keyword designating a doc to _DocInfo objects."""
doc_index = {}
if not os.path.isabs(src_dir):
raise ValueError("'src_dir' must be an absolute path.\n"
" src_dir='%s'" % src_dir)
if not os.path.exists(src_dir):
raise ValueError("'src_dir' path must exist.\n"
" src_dir='%s'" % src_dir)
for dirpath, _, filenames in os.walk(src_dir):
suffix = os.path.relpath(path=dirpath, start=src_dir)
for base_name in filenames:
if not six.ensure_str(base_name).endswith('.md'):
continue
title_parser = _GetMarkdownTitle()
title_parser.process(os.path.join(dirpath, base_name))
if title_parser.title is None:
msg = ('`{}` has no markdown title (# title)'.format(
os.path.join(dirpath, base_name)))
raise ValueError(msg)
key_parts = six.ensure_str(os.path.join(suffix,
base_name[:-3])).split('/')
if key_parts[-1] == 'index':
key_parts = key_parts[:-1]
doc_info = _DocInfo(os.path.join(suffix, base_name), title_parser.title)
doc_index[key_parts[-1]] = doc_info
if len(key_parts) > 1:
doc_index['/'.join(key_parts[-2:])] = doc_info
return doc_index
class _GuideRef(object):
def __init__(self, base_name, title, section_title, section_tag):
self.url = 'api_guides/python/' + six.ensure_str(
(('%s#%s' % (base_name, section_tag)) if section_tag else base_name))
self.link_text = (('%s > %s' % (title, section_title))
if section_title else title)
def make_md_link(self, url_prefix):
return '[%s](%s%s)' % (self.link_text, url_prefix, self.url)
class _GenerateGuideIndex(py_guide_parser.PyGuideParser):
"""Turn guide files into an index from symbol name to a list of _GuideRefs."""
def __init__(self):
self.index = {}
py_guide_parser.PyGuideParser.__init__(self)
def process(self, full_path, base_name):
"""Index a file, reading from `full_path`, with `base_name` as the link."""
self.full_path = full_path
self.base_name = base_name
self.title = None
self.section_title = None
self.section_tag = None
py_guide_parser.PyGuideParser.process(self, full_path)
def process_title(self, _, title):
if self.title is None: # only use the first title
self.title = title
def process_section(self, _, section_title, tag):
self.section_title = section_title
self.section_tag = tag
def process_line(self, _, line):
"""Index the file and section of each `symbol` reference."""
for match in parser.AUTO_REFERENCE_RE.finditer(line):
val = self.index.get(match.group(1), [])
val.append(
_GuideRef(self.base_name, self.title, self.section_title,
self.section_tag))
self.index[match.group(1)] = val
def _build_guide_index(guide_src_dir):
"""Return dict: symbol name -> _GuideRef from the files in `guide_src_dir`."""
index_generator = _GenerateGuideIndex()
if os.path.exists(guide_src_dir):
for full_path, base_name in py_guide_parser.md_files_in_dir(guide_src_dir):
index_generator.process(full_path, base_name)
return index_generator.index
class _UpdateTags(py_guide_parser.PyGuideParser):
"""Rewrites a Python guide so that each section has an explicit id tag.
"section" here refers to blocks delimited by second level headings.
"""
def process_section(self, line_number, section_title, tag):
self.replace_line(line_number, '<h2 id="%s">%s</h2>' % (tag, section_title))
def update_id_tags_inplace(src_dir):
"""Set explicit ids on all second-level headings to ensure back-links work.
Args:
src_dir: The directory of md-files to convert (inplace).
"""
tag_updater = _UpdateTags()
for dirpath, _, filenames in os.walk(src_dir):
for base_name in filenames:
if not base_name.endswith('.md'):
continue
full_path = os.path.join(src_dir, dirpath, base_name)
# Tag updater loads the file, makes the replacements, and returns the
# modified file contents
content = tag_updater.process(full_path)
with open(full_path, 'w') as f:
f.write(six.ensure_str(content))
EXCLUDED = set(['__init__.py', 'OWNERS', 'README.txt'])
def replace_refs(src_dir,
output_dir,
reference_resolver,
file_pattern='*.md',
api_docs_relpath='api_docs'):
"""Fix @{} references in all files under `src_dir` matching `file_pattern`.
A matching directory structure, with the modified files is
written to `output_dir`.
`{"__init__.py","OWNERS","README.txt"}` are skipped.
Files not matching `file_pattern` (using `fnmatch`) are copied with no change.
Also, files in the `api_guides/python` directory get explicit ids set on all
heading-2s to ensure back-links work.
Args:
src_dir: The directory to convert files from.
output_dir: The root directory to write the resulting files to.
reference_resolver: A `parser.ReferenceResolver` to make the replacements.
file_pattern: Only replace references in files matching file_patters,
using fnmatch. Non-matching files are copied unchanged.
api_docs_relpath: Relative-path string to the api_docs, from the src_dir.
"""
# Iterate through all the source files and process them.
for dirpath, _, filenames in os.walk(src_dir):
depth = os.path.relpath(src_dir, start=dirpath)
# How to get from `dirpath` to api_docs/python/
relative_path_to_root = os.path.join(depth, api_docs_relpath, 'python')
# Make the directory under output_dir.
new_dir = os.path.join(output_dir,
os.path.relpath(path=dirpath, start=src_dir))
if not os.path.exists(new_dir):
os.makedirs(new_dir)
for base_name in filenames:
if base_name in EXCLUDED:
continue
full_in_path = os.path.join(dirpath, base_name)
# Set the `current_doc_full_name` so bad files can be reported on errors.
reference_resolver.current_doc_full_name = full_in_path
suffix = os.path.relpath(path=full_in_path, start=src_dir)
full_out_path = os.path.join(output_dir, suffix)
# Copy files that do not match the file_pattern, unmodified.
if not fnmatch.fnmatch(base_name, file_pattern):
if full_in_path != full_out_path:
shutil.copyfile(full_in_path, full_out_path)
continue
with open(full_in_path, 'rb') as f:
content = f.read().decode('utf-8')
content = reference_resolver.replace_references(content,
relative_path_to_root)
with open(full_out_path, 'wb') as f:
f.write(six.ensure_binary(content, 'utf-8'))
class DocGenerator(object):
"""Main entry point for generating docs."""
def __init__(self):
self.argument_parser = argparse.ArgumentParser()
self._py_modules = None
self._private_map = _get_default_private_map()
self._do_not_descend_map = _get_default_do_not_descend_map()
self.yaml_toc = True
self.argument_parser.add_argument(
'--no_search_hints',
dest='search_hints',
action='store_false',
default=True)
self.argument_parser.add_argument(
'--site_api_path',
type=str, default='api_docs/python',
help='The path from the site-root to api_docs'
'directory for this project')
self.argument_parser.add_argument(
'--api_cache_out_path',
type=str,
default=None,
help='Path to store a json-serialized api-index, so links can be '
'inserted into docs without rebuilding the api_docs')
def add_output_dir_argument(self):
self.argument_parser.add_argument(
'--output_dir',
type=str,
default=None,
required=True,
help='Directory to write docs to.')
def add_src_dir_argument(self):
self.argument_parser.add_argument(
'--src_dir',
type=str,
default=tempfile.mkdtemp(),
required=False,
help='Optional directory of source docs to add api_docs links to')
def add_base_dir_argument(self, default_base_dir):
self.argument_parser.add_argument(
'--base_dir',
type=str,
default=default_base_dir,
help='Base directory to strip from file names referenced in docs.')
def parse_known_args(self):
flags, _ = self.argument_parser.parse_known_args()
return flags
def add_to_private_map(self, d):
add_dict_to_dict(d, self._private_map)
def add_to_do_not_descend_map(self, d):
add_dict_to_dict(d, self._do_not_descend_map)
def set_private_map(self, d):
self._private_map = d
def set_do_not_descend_map(self, d):
self._do_not_descend_map = d
def set_py_modules(self, py_modules):
self._py_modules = py_modules
def py_module_names(self):
if self._py_modules is None:
raise RuntimeError(
'Must call set_py_modules() before running py_module_names().')
return [name for (name, _) in self._py_modules]
def make_reference_resolver(self, visitor, doc_index):
return parser.ReferenceResolver.from_visitor(
visitor, doc_index, py_module_names=self.py_module_names())
def make_parser_config(self, visitor, reference_resolver, guide_index,
base_dir):
return parser.ParserConfig(
reference_resolver=reference_resolver,
duplicates=visitor.duplicates,
duplicate_of=visitor.duplicate_of,
tree=visitor.tree,
index=visitor.index,
reverse_index=visitor.reverse_index,
guide_index=guide_index,
base_dir=base_dir)
def run_extraction(self):
return extract(self._py_modules, self._private_map,
self._do_not_descend_map)
def build(self, flags):
"""Build all the docs.
This produces two outputs
python api docs:
* generated from modules set with `set_py_modules`.
* written to '{FLAGS.output_dir}/api_docs/python/'
non-api docs:
* Everything in '{FLAGS.src_dir}' is copied to '{FLAGS.output_dir}'.
* '@{}' references in '.md' files are replaced with links.
* '.md' files under 'api_guides/python' have explicit ids set for their
second level headings.
Args:
flags:
* src_dir: Where to fetch the non-api-docs.
* base_dir: Base of the docs directory (Used to build correct
relative links).
* output_dir: Where to write the resulting docs.
Returns:
The number of errors encountered while processing.
"""
# Extract the python api from the _py_modules
doc_index = build_doc_index(flags.src_dir)
visitor = self.run_extraction()
reference_resolver = self.make_reference_resolver(visitor, doc_index)
if getattr(flags, 'api_cache_out_path', None):
reference_resolver.to_json_file(flags.api_cache_out_path)
# Build the guide_index for the api_docs back links.
root_title = getattr(flags, 'root_title', 'TensorFlow')
guide_index = _build_guide_index(
os.path.join(flags.src_dir, 'api_guides/python'))
# Write the api docs.
parser_config = self.make_parser_config(visitor, reference_resolver,
guide_index, flags.base_dir)
output_dir = os.path.join(flags.output_dir, 'api_docs/python')
write_docs(
output_dir,
parser_config,
yaml_toc=self.yaml_toc,
root_title=root_title,
search_hints=getattr(flags, 'search_hints', True),
site_api_path=getattr(flags, 'site_api_path', ''))
# Replace all the @{} references in files under `FLAGS.src_dir`
replace_refs(flags.src_dir, flags.output_dir, reference_resolver, '*.md')
# Fix the tags in the guide dir.
guide_dir = os.path.join(flags.output_dir, 'api_guides/python')
if os.path.exists(guide_dir):
update_id_tags_inplace(guide_dir)
# Report all errors found by the reference resolver, and return the error
# code.
parser_config.reference_resolver.log_errors()
return parser_config.reference_resolver.num_errors()
|
{
"content_hash": "a166afe9403932732462741ef6563c62",
"timestamp": "",
"source": "github",
"line_count": 668,
"max_line_length": 80,
"avg_line_length": 34.14071856287425,
"alnum_prop": 0.6285626589493993,
"repo_name": "ppwwyyxx/tensorflow",
"id": "77a685062aec1e6440c72484517938f66e150343",
"size": "23523",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/tools/docs/generate_lib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5003"
},
{
"name": "Batchfile",
"bytes": "45318"
},
{
"name": "C",
"bytes": "796611"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "76521274"
},
{
"name": "CMake",
"bytes": "6545"
},
{
"name": "Dockerfile",
"bytes": "81136"
},
{
"name": "Go",
"bytes": "1679107"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "952883"
},
{
"name": "Jupyter Notebook",
"bytes": "567243"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1254789"
},
{
"name": "Makefile",
"bytes": "61284"
},
{
"name": "Objective-C",
"bytes": "104706"
},
{
"name": "Objective-C++",
"bytes": "297774"
},
{
"name": "PHP",
"bytes": "24055"
},
{
"name": "Pascal",
"bytes": "3752"
},
{
"name": "Pawn",
"bytes": "17546"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "38709528"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "7469"
},
{
"name": "Shell",
"bytes": "643731"
},
{
"name": "Smarty",
"bytes": "34743"
},
{
"name": "Swift",
"bytes": "62814"
}
],
"symlink_target": ""
}
|
import gevxlpy.util.configuration as cfg
import gevxl.gui_wx as gui_wx
import os
import errno
import wx
import wx.lib.mixins.inspection
import wx.lib.masked.timectrl
import wx.lib.masked.numctrl
import wx.gizmos
import wx.calendar
import wx.lib.scrolledpanel
import sys
import util
import subprocess
from wx.lib.agw.aui import AuiNotebook
import wx.lib.agw.aui as aui
from pprint import pprint
from datetime import datetime
from datetime import time
from datetime import timedelta
from ObjectListView import ObjectListView, ColumnDefn
import matplotlib
matplotlib.use('WXAgg')
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas
from matplotlib.figure import Figure
import ConfigParser
from change_password_dialog import ChangePasswordDialog
from logon import *
from database import *
from olv_dialog import *
from olv_dialog_controller import OlvDialogController
from patient_identification import *
from patient_identification_olv import *
from system_configuration import SystemConfiguration
from system_configuration_olv import OlvSystemConfiguration, OlvSystemConfigurationCols
from system_configuration_controller import SystemConfigurationController
from wound_assessment import WoundAssessment
from wound_assessment_controller import WoundAssessmentController
from assessment_session import AssessmentSession
from assessment_session_olv import OlvAssessmentSession
from assessment_session_olv import OlvAssessmentSessionCols
from assessment_session_controller import AssessmentSessionController
from add_modify_wound_dialog import AddModifyWoundDialog
from wound_assessment_dialog import WoundAssessmentDialog
from algorithm import Algorithm
from algorithm_olv import OlvAlgorithm, OlvAlgorithmCols
from algorithm_dialog import AlgorithmDialog
from experiment_dialog import ExperimentDialog
from experiment import Experiment
from experiment_controller import ExperimentController
from wound_assessment_controller import WoundAssessmentController
from clinical_wound_controller import ClinicalWoundController
from clinical_wound_assessment import ClinicalWoundAssessment
from assessment_measurements import AssessmentMeasurements
from assessment_measurements_olv import OlvAssessmentMeasurements, OlvAssessmentMeasurementsCols
from assessment_measurements_controller import AssessmentMeasurementsController
from assessment_segmentation import AssessmentSegmentation
from assessment_segmentation_olv import OlvAssessmentSegmentation, OlvAssessmentSegmentationCols
from assessment_segmentation_controller import AssessmentSegmentationController
from assessment_temperature import AssessmentTemperature
from assessment_temperature_olv import OlvAssessmentTemperature, OlvAssessmentTemperatureCols
from assessment_temperature_controller import AssessmentTemperatureController
from pu_file_director import PUFileDirector
from biochemical_collection import BiochemicalCollection
from assessment_multi_camera_canvases import AssessmentMultiCameraCanvases
from touchscreen import TouchFrame
# begin wxGlade: dependencies
import gettext
# end wxGlade
# begin wxGlade: extracode
# end wxGlade
db = ""
cameraAlgorithm = "assessment camera"
measurementAlgorithm = "assessment measure"
class MainFrame(wx.Frame):
"""
This class implements the main window (or frame) for the Assessment System GUI.
It defines the window, the menubars, the tabbed notebook panes, and a status line.
For the Assessment GUI the notebook panes consist of a collection pane and a clinical
pane. The collection pane will contain a GLCanvas which will display the video from
the depth camera.
Methods:
__init__(*args, **kwds) - creates the widgets in the frame and performs initialization
__set_properties() - set various properties of the widgets
__do_layout() - lays out the widgets
DoNewPatient(event) - Menu handler for adding new patients
DoSelectPatient(event) - Menu handler for selecting a patient to record
"""
def __init__(self, *args, **kwds):
"""
Creates the widgets in the frame and performs initialization.
Also, creates and attaches an OpenGL canvas for the camera process to use.
"""
# begin wxGlade: MainFrame.__init__
kwds["style"] = wx.CAPTION | wx.CLOSE_BOX | wx.MINIMIZE_BOX | wx.MAXIMIZE_BOX | wx.SYSTEM_MENU | wx.RESIZE_BORDER | wx.TAB_TRAVERSAL | wx.CLIP_CHILDREN
wx.Frame.__init__(self, *args, **kwds)
# Menu Bar
self.main_frame_menubar = wx.MenuBar()
self.mainFileMenu = wx.Menu()
self.exitFileMenuItem = wx.MenuItem(self.mainFileMenu, wx.ID_ANY, _("Exit"), "", wx.ITEM_NORMAL)
self.mainFileMenu.AppendItem(self.exitFileMenuItem)
self.main_frame_menubar.Append(self.mainFileMenu, _("File"))
self.mainPatientMenu = wx.Menu()
self.newPatientMenuItem = wx.MenuItem(self.mainPatientMenu, wx.ID_ANY, _("New/Modify..."), "", wx.ITEM_NORMAL)
self.mainPatientMenu.AppendItem(self.newPatientMenuItem)
self.selectPatientMenuItem = wx.MenuItem(self.mainPatientMenu, wx.ID_ANY, _("Select..."), "", wx.ITEM_NORMAL)
self.mainPatientMenu.AppendItem(self.selectPatientMenuItem)
self.newWoundMenuItem = wx.MenuItem(self.mainPatientMenu, wx.ID_ANY, _("New/Modify/Delete Wound..."), "", wx.ITEM_NORMAL)
self.mainPatientMenu.AppendItem(self.newWoundMenuItem)
self.newAssessmentMenuItem = wx.MenuItem(self.mainPatientMenu, wx.ID_ANY, _("New/Modify/Delete Assessment..."), "", wx.ITEM_NORMAL)
self.mainPatientMenu.AppendItem(self.newAssessmentMenuItem)
self.main_frame_menubar.Append(self.mainPatientMenu, _("Patient"))
self.mainAlgorithmMenu = wx.Menu()
self.newAlgorithmItem = wx.MenuItem(self.mainAlgorithmMenu, wx.ID_ANY, _("New/Modify/Delete Algorithm"), "", wx.ITEM_NORMAL)
self.mainAlgorithmMenu.AppendItem(self.newAlgorithmItem)
self.newExperimentItem = wx.MenuItem(self.mainAlgorithmMenu, wx.ID_ANY, _("New/Modify/Delete Experiment..."), "", wx.ITEM_NORMAL)
self.mainAlgorithmMenu.AppendItem(self.newExperimentItem)
self.main_frame_menubar.Append(self.mainAlgorithmMenu, _("Algorithm"))
self.mainAdminMenu = wx.Menu()
self.changePasswordItem = wx.MenuItem(self.mainAdminMenu, wx.ID_ANY, _("Change Password..."), "", wx.ITEM_NORMAL)
self.mainAdminMenu.AppendItem(self.changePasswordItem)
self.manageUsersItem = wx.MenuItem(self.mainAdminMenu, wx.ID_ANY, _("Manage Users..."), "", wx.ITEM_NORMAL)
self.mainAdminMenu.AppendItem(self.manageUsersItem)
self.accessControlItem = wx.MenuItem(self.mainAdminMenu, wx.ID_ANY, _("Access Control..."), "", wx.ITEM_NORMAL)
self.mainAdminMenu.AppendItem(self.accessControlItem)
self.newSystemConfiguration = wx.MenuItem(self.mainAdminMenu, wx.ID_ANY, _("System Configuration..."), "", wx.ITEM_NORMAL)
self.mainAdminMenu.AppendItem(self.newSystemConfiguration)
self.main_frame_menubar.Append(self.mainAdminMenu, _("Admin"))
self.mainHelpMenu = wx.Menu()
self.main_frame_menubar.Append(self.mainHelpMenu, _("Help"))
self.SetMenuBar(self.main_frame_menubar)
# Menu Bar end
self.main_frame_statusbar = self.CreateStatusBar(1, 0)
self.panel_1 = wx.Panel(self, wx.ID_ANY)
self.assessment_notebook = AuiNotebook(self.panel_1, wx.ID_ANY, style=0)
self.chemical_collection_pane = wx.Panel(self.assessment_notebook, wx.ID_ANY)
self.chem_plot_panel = wx.Panel(self.chemical_collection_pane, wx.ID_ANY)
self.ch_sampling_label = wx.StaticText(self.chemical_collection_pane, wx.ID_ANY, _("Sampling Plot (Impedance vs. Frequency)"), style=wx.ALIGN_CENTRE)
self.ch_patient_label = wx.StaticText(self.chemical_collection_pane, wx.ID_ANY, _("Patient:"))
self.ch_patient_name = wx.TextCtrl(self.chemical_collection_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.ch_patient_select_button = wx.Button(self.chemical_collection_pane, wx.ID_ANY, _("Select"))
self.ch_wound_dropdown_label_copy = wx.StaticText(self.chemical_collection_pane, wx.ID_ANY, _("Wound Location:"))
self.ch_wound_loc = wx.ComboBox(self.chemical_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.ch_wound_new_button = wx.Button(self.chemical_collection_pane, wx.ID_ANY, _("New"))
self.ch_assessement_dropdown_label = wx.StaticText(self.chemical_collection_pane, wx.ID_ANY, _("Assessment Number:"))
self.ch_assessment_id = wx.ComboBox(self.chemical_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.ch_assessment_new_button = wx.Button(self.chemical_collection_pane, wx.ID_NEW, "")
self.baseline_label = wx.StaticText(self.chemical_collection_pane, wx.ID_ANY, _("Baseline Sampling"), style=wx.ALIGN_CENTRE)
self.ch_baseline_msg = wx.TextCtrl(self.chemical_collection_pane, wx.ID_ANY, "", style=wx.TE_CENTRE)
self.baseline_start_stop_button = wx.Button(self.chemical_collection_pane, wx.ID_ANY, _("Start Baseline Sampling"))
self.baseline_reset_button = wx.Button(self.chemical_collection_pane, wx.ID_ANY, _("Reset"))
self.chemical_wound_label = wx.StaticText(self.chemical_collection_pane, wx.ID_ANY, _("Wound Sampling"), style=wx.ALIGN_CENTRE)
self.ch_wound_msg = wx.TextCtrl(self.chemical_collection_pane, wx.ID_ANY, "", style=wx.TE_CENTRE)
self.wound_start_stop_button = wx.Button(self.chemical_collection_pane, wx.ID_ANY, _("Start Wound Sampling"))
self.wound_reset_button = wx.Button(self.chemical_collection_pane, wx.ID_ANY, _("Reset"))
self.visual_collection_pane = wx.Panel(self.assessment_notebook, wx.ID_ANY)
self.rgb_video_panel = wx.Panel(self.visual_collection_pane, wx.ID_ANY)
self.rgb_video_panel_label = wx.StaticText(self.visual_collection_pane, wx.ID_ANY, _("RGB Video Feed"), style=wx.ALIGN_CENTRE)
self.depth_video_panel = wx.Panel(self.visual_collection_pane, wx.ID_ANY)
self.thermal_video_panel = wx.Panel(self.visual_collection_pane, wx.ID_ANY)
self.visual_depth_label = wx.StaticText(self.visual_collection_pane, wx.ID_ANY, _("Depth Video Feed"))
self.visual_thermal_label = wx.StaticText(self.visual_collection_pane, wx.ID_ANY, _("Thermal Video Feed"))
self.cv_patient_label = wx.StaticText(self.visual_collection_pane, wx.ID_ANY, _("Patient:"))
self.patient_name = wx.TextCtrl(self.visual_collection_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.cv_patient_select_button = wx.Button(self.visual_collection_pane, wx.ID_ANY, _("Select"))
self.cv_wound_dropdown_label = wx.StaticText(self.visual_collection_pane, wx.ID_ANY, _("Wound Location:"))
self.wound_loc = wx.ComboBox(self.visual_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.cv_wound_new_button = wx.Button(self.visual_collection_pane, wx.ID_ANY, _("New"))
self.cv_assessement_dropdown_label = wx.StaticText(self.visual_collection_pane, wx.ID_ANY, _("Assessment Number:"))
self.cv_assessment_id = wx.ComboBox(self.visual_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.cv_assessment_new_button = wx.Button(self.visual_collection_pane, wx.ID_NEW, "")
self.start_stop_button = wx.Button(self.visual_collection_pane, wx.ID_ANY, _("Record"))
self.cv_reset_button = wx.Button(self.visual_collection_pane, wx.ID_ANY, _("Reset"))
self.preview_button = wx.Button(self.visual_collection_pane, wx.ID_ANY, _("Preview"))
self.visual_msg = wx.TextCtrl(self.visual_collection_pane, wx.ID_ANY, "")
self.cv_grid_uc = wx.Button(self.visual_collection_pane, 801, _("Upper\n(Not Taken)"))
self.cv_grid_ml = wx.Button(self.visual_collection_pane, 803, _("Left\n(Not Taken)"))
self.cv_grid_mc = wx.Button(self.visual_collection_pane, 804, _("Center\n(Not Taken)"))
self.cv_grid_mr = wx.Button(self.visual_collection_pane, 805, _("Right\n(Not Taken)"))
self.cv_grid_lc = wx.Button(self.visual_collection_pane, 807, _("Lower\n(Not Taken)"))
self.multi_spectral_collection_pane = wx.Panel(self.assessment_notebook, wx.ID_ANY)
self.rgb_video_panel_copy = wx.Panel(self.multi_spectral_collection_pane, wx.ID_ANY)
self.rgb_video_panel_label_copy = wx.StaticText(self.multi_spectral_collection_pane, wx.ID_ANY, _("RGB Video Feed"), style=wx.ALIGN_CENTRE)
self.multi_video_panel = wx.Panel(self.multi_spectral_collection_pane, wx.ID_ANY)
self.multi_video_label = wx.StaticText(self.multi_spectral_collection_pane, wx.ID_ANY, _("Multi-Spectral Camera Image"), style=wx.ALIGN_CENTRE)
self.ms_patient_label = wx.StaticText(self.multi_spectral_collection_pane, wx.ID_ANY, _("Patient:"))
self.ms_patient_name = wx.TextCtrl(self.multi_spectral_collection_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.ms_patient_select_button = wx.Button(self.multi_spectral_collection_pane, wx.ID_ANY, _("Select"))
self.ms_wound_dropdown_label = wx.StaticText(self.multi_spectral_collection_pane, wx.ID_ANY, _("Wound Location:"))
self.ms_wound_loc = wx.ComboBox(self.multi_spectral_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.ms_wound_new_button = wx.Button(self.multi_spectral_collection_pane, wx.ID_ANY, _("New"))
self.ms_assessement_dropdown_label = wx.StaticText(self.multi_spectral_collection_pane, wx.ID_ANY, _("Assessment Number:"))
self.ms_assessment_id = wx.ComboBox(self.multi_spectral_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.ms_assessment_new_button = wx.Button(self.multi_spectral_collection_pane, wx.ID_NEW, "")
self.ms_snapshot_button = wx.Button(self.multi_spectral_collection_pane, wx.ID_ANY, _("Snapshot"))
self.ms_reset_button = wx.Button(self.multi_spectral_collection_pane, wx.ID_ANY, _("Reset"))
self.ms_preview_button = wx.Button(self.multi_spectral_collection_pane, wx.ID_ANY, _("Preview"))
self.ms_msg = wx.TextCtrl(self.multi_spectral_collection_pane, wx.ID_ANY, "")
self.analysis_measurements_pane = wx.Panel(self.assessment_notebook, wx.ID_ANY)
self.wm_3d_panel = wx.Panel(self.analysis_measurements_pane, wx.ID_ANY)
self.wm3d_label = wx.StaticText(self.analysis_measurements_pane, wx.ID_ANY, _("Wound 3-D Reconstruction"), style=wx.ALIGN_CENTRE)
self.wm_no_overlay_radio_btn = wx.RadioButton(self.analysis_measurements_pane, wx.ID_ANY, _("No Overlay"), style=wx.RB_GROUP)
self.wm_rgb_radio_btn = wx.RadioButton(self.analysis_measurements_pane, wx.ID_ANY, _("Tissue Type Texture Mapping"))
self.wm_thermal_radio_btn = wx.RadioButton(self.analysis_measurements_pane, wx.ID_ANY, _("Thermal Texture Mapping"))
self.wound_measurement_notebook = wx.Notebook(self.analysis_measurements_pane, wx.ID_ANY, style=0)
self.wm_config_pane = wx.Panel(self.wound_measurement_notebook, wx.ID_ANY)
self.wm_results_pane = wx.Panel(self.wound_measurement_notebook, wx.ID_ANY)
self.wm_patient_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Patient:"))
self.wm_patient_name = wx.TextCtrl(self.wm_results_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.wm_patient_select_button = wx.Button(self.wm_results_pane, wx.ID_ANY, _("Select"))
self.wm_wound_dropdown_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Wound Location:"))
self.wm_wound_loc = wx.ComboBox(self.wm_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.wm_assessement_dropdown_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Assessment Number:"))
self.wm_assessment_id = wx.ComboBox(self.wm_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.wm_reslts_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Computed Results"))
self.wm_l_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Length (cm.):"))
self.wm_length = wx.lib.masked.NumCtrl(self.wm_results_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.wm_w_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Width (cm.):"))
self.wm_width = wx.lib.masked.NumCtrl(self.wm_results_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.wm_d_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Depth (cm.):"))
self.wm_depth = wx.lib.masked.NumCtrl(self.wm_results_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.wm_lw_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Length*Width (cm**2):"))
self.wm_lw = wx.lib.masked.NumCtrl(self.wm_results_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.wm_a_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Surface Area (cm**2):"))
self.wm_area = wx.lib.masked.NumCtrl(self.wm_results_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.wm_v_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("Wound Volume (cm**3):"))
self.wm_volume = wx.lib.masked.NumCtrl(self.wm_results_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.wm_push_label = wx.StaticText(self.wm_results_pane, wx.ID_ANY, _("PUSH Score:"))
self.wm_push = wx.lib.masked.NumCtrl(self.wm_results_pane, wx.ID_ANY, _("0"), style=wx.TE_RIGHT)
self.wm_historical_pane = wx.Panel(self.wound_measurement_notebook, wx.ID_ANY, style=wx.TAB_TRAVERSAL | wx.FULL_REPAINT_ON_RESIZE)
self.wmh_patient_label = wx.StaticText(self.wm_historical_pane, wx.ID_ANY, _("Patient:"))
self.wmh_patient_name = wx.TextCtrl(self.wm_historical_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.wmh_patient_select_button = wx.Button(self.wm_historical_pane, wx.ID_ANY, _("Select"))
self.wmh_wound_dropdown_label = wx.StaticText(self.wm_historical_pane, wx.ID_ANY, _("Wound Location:"))
self.wmh_wound_loc = wx.ComboBox(self.wm_historical_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.wm_historical_label = wx.StaticText(self.wm_historical_pane, wx.ID_ANY, _("Historical Measurements"))
self.wm_historical_list = ObjectListView(self.wm_historical_pane, wx.ID_ANY, style=wx.LC_REPORT|wx.SUNKEN_BORDER)
self.analysis_segmentation_pane = wx.Panel(self.assessment_notebook, wx.ID_ANY)
self.ts_2d_panel = wx.Panel(self.analysis_segmentation_pane, wx.ID_ANY)
self.ts_2d_label = wx.StaticText(self.analysis_segmentation_pane, wx.ID_ANY, _("2-D Tissue Segmentation"))
self.ts_3d_panel = wx.Panel(self.analysis_segmentation_pane, wx.ID_ANY)
self.ts_3d_label = wx.StaticText(self.analysis_segmentation_pane, wx.ID_ANY, _("3-D Tissue Segmentation"), style=wx.ALIGN_CENTRE)
self.tissue_segmentation_notebook = wx.Notebook(self.analysis_segmentation_pane, wx.ID_ANY, style=0)
self.ts_config_pane = wx.Panel(self.tissue_segmentation_notebook, wx.ID_ANY)
self.ts_results_pane = wx.Panel(self.tissue_segmentation_notebook, wx.ID_ANY)
self.ts_patient_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Patient:"))
self.ts_patient_name = wx.TextCtrl(self.ts_results_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.ts_patient_select_button = wx.Button(self.ts_results_pane, wx.ID_ANY, _("Select"))
self.ts_wound_dropdown_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Wound Location:"))
self.ts_wound_loc = wx.ComboBox(self.ts_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.ts_assessement_dropdown_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Assessment Number:"))
self.ts_assessment_id = wx.ComboBox(self.ts_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.ts_results_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Computed Results"))
self.ts_granulation_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Granulation (%):"))
self.ts_granulation_percentage = wx.lib.masked.NumCtrl(self.ts_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.ts_slough_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Slough (%):"))
self.ts_slough_percentage = wx.lib.masked.NumCtrl(self.ts_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.ts_eschar_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Eschar (%):"))
self.ts_eschar_percentage = wx.lib.masked.NumCtrl(self.ts_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.ts_bone_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Bone/Tendon (%):"))
self.ts_bone_percentage = wx.lib.masked.NumCtrl(self.ts_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.ts_staging_label = wx.StaticText(self.ts_results_pane, wx.ID_ANY, _("Ulcer Staging (est.):"))
self.ts_staging = wx.lib.masked.NumCtrl(self.ts_results_pane, wx.ID_ANY, _("0"), style=wx.TE_RIGHT)
self.ts_historical_pane = wx.Panel(self.tissue_segmentation_notebook, wx.ID_ANY)
self.tsh_patient_label = wx.StaticText(self.ts_historical_pane, wx.ID_ANY, _("Patient:"))
self.tsh_patient_name = wx.TextCtrl(self.ts_historical_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.tsh_patient_select_button = wx.Button(self.ts_historical_pane, wx.ID_ANY, _("Select"))
self.tsh_wound_dropdown_label = wx.StaticText(self.ts_historical_pane, wx.ID_ANY, _("Wound Location:"))
self.tsh_wound_loc = wx.ComboBox(self.ts_historical_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.ts_historical_label = wx.StaticText(self.ts_historical_pane, wx.ID_ANY, _("Historical Measurements"))
self.ts_historical_list = ObjectListView(self.ts_historical_pane, wx.ID_ANY, style=wx.LC_REPORT|wx.SUNKEN_BORDER)
self.analysis_thermal_pane = wx.Panel(self.assessment_notebook, wx.ID_ANY)
self.thermal_2d_pane = wx.Panel(self.analysis_thermal_pane, wx.ID_ANY)
self.th_2d_label = wx.StaticText(self.analysis_thermal_pane, wx.ID_ANY, _("Thermal 2-D Hot Spot Map"))
self.thermal_3d_panel = wx.Panel(self.analysis_thermal_pane, wx.ID_ANY)
self.th_3d_label = wx.StaticText(self.analysis_thermal_pane, wx.ID_ANY, _("Thermal 3-D Hot Spot Map"))
self.thermal_cont_radio = wx.RadioButton(self.analysis_thermal_pane, wx.ID_ANY, _("Continuous Temperature"), style=wx.RB_GROUP)
self.thermal_segmented_radio = wx.RadioButton(self.analysis_thermal_pane, wx.ID_ANY, _("Segmented Temperature"), style=wx.RB_GROUP)
self.thermal_segment_size = wx.ComboBox(self.analysis_thermal_pane, wx.ID_ANY, choices=[_("0.25 C"), _("0.50 C"), _("0.75 C"), _("1.00 C"), _("1.25 C"), _("1.50 C"), _("1.75 C"), _("2.00 C")], style=wx.CB_DROPDOWN)
self.thermal_profile_notebook = wx.Notebook(self.analysis_thermal_pane, wx.ID_ANY, style=0)
self.th_config_pane = wx.Panel(self.thermal_profile_notebook, wx.ID_ANY)
self.th_results_pane = wx.Panel(self.thermal_profile_notebook, wx.ID_ANY)
self.th_patient_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Patient:"))
self.th_patient_name = wx.TextCtrl(self.th_results_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.th_patient_select_button = wx.Button(self.th_results_pane, wx.ID_ANY, _("Select"))
self.th_wound_dropdown_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Wound Location:"))
self.th_wound_loc = wx.ComboBox(self.th_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.th_assessement_dropdown_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Assessment Number:"))
self.th_assessment_id = wx.ComboBox(self.th_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.th_results_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Thermal Profile"))
self.t_max_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Maximum Temperature (C):"))
self.t_max = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.t_min_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Minimum Temperature (C):"))
self.t_min = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.t_base_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Baseline Temperature (C):"))
self.t_baseline = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.t_sigma_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Temperature Variation Sigma:"))
self.t_sigma = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.000"), style=wx.TE_RIGHT)
self.t_s1_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Temperature Segment #1 (%):"))
self.t_segment_1 = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.t_s2_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Temperature Segment #2 (%):"))
self.t_segment_2 = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.t_s3_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Temperature Segment #3 (%):"))
self.t_segment_3 = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.t_s4_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Temperature Segment #4 (%):"))
self.t_segment_4 = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.t_s5_label = wx.StaticText(self.th_results_pane, wx.ID_ANY, _("Temperature Segment #5 (%):"))
self.t_segment_5 = wx.lib.masked.NumCtrl(self.th_results_pane, wx.ID_ANY, _("0.0"), style=wx.TE_RIGHT)
self.th_historical_pane = wx.Panel(self.thermal_profile_notebook, wx.ID_ANY)
self.thh_patient_label = wx.StaticText(self.th_historical_pane, wx.ID_ANY, _("Patient:"))
self.thh_patient_name = wx.TextCtrl(self.th_historical_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.thh_patient_select_button = wx.Button(self.th_historical_pane, wx.ID_ANY, _("Select"))
self.thh_wound_dropdown_label = wx.StaticText(self.th_historical_pane, wx.ID_ANY, _("Wound Location:"))
self.thh_wound_loc = wx.ComboBox(self.th_historical_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.th_historical_label = wx.StaticText(self.th_historical_pane, wx.ID_ANY, _("Historical Measurements"))
self.th_historical_list = ObjectListView(self.th_historical_pane, wx.ID_ANY, style=wx.LC_REPORT|wx.SUNKEN_BORDER)
self.analysis_chemical_pane = wx.Panel(self.assessment_notebook, wx.ID_ANY)
self.chemical_panel = wx.Panel(self.analysis_chemical_pane, wx.ID_ANY)
self.cs_panel_label = wx.StaticText(self.analysis_chemical_pane, wx.ID_ANY, _("Frequency / Impedance Curve"))
self.chemical_sensing_notebook = wx.Notebook(self.analysis_chemical_pane, wx.ID_ANY, style=0)
self.cs_config_pane = wx.Panel(self.chemical_sensing_notebook, wx.ID_ANY)
self.cs_results_pane = wx.Panel(self.chemical_sensing_notebook, wx.ID_ANY)
self.cs_patient_label = wx.StaticText(self.cs_results_pane, wx.ID_ANY, _("Patient:"))
self.cs_patient_name = wx.TextCtrl(self.cs_results_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.cs_patient_select_button = wx.Button(self.cs_results_pane, wx.ID_ANY, _("Select"))
self.cs_wound_dropdown_label = wx.StaticText(self.cs_results_pane, wx.ID_ANY, _("Wound Location:"))
self.cs_wound_loc = wx.ComboBox(self.cs_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.cs_assessement_dropdown_label = wx.StaticText(self.cs_results_pane, wx.ID_ANY, _("Assessment Number:"))
self.cs_assessment_id = wx.ComboBox(self.cs_results_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.cs_results_label = wx.StaticText(self.cs_results_pane, wx.ID_ANY, _("Chemical Sensing Results"))
self.cs_historical_pane = wx.Panel(self.chemical_sensing_notebook, wx.ID_ANY)
self.csh_patient_label = wx.StaticText(self.cs_historical_pane, wx.ID_ANY, _("Patient:"))
self.csh_patient_name = wx.TextCtrl(self.cs_historical_pane, wx.ID_ANY, _("None"), style=wx.TE_READONLY)
self.csh_patient_select_button = wx.Button(self.cs_historical_pane, wx.ID_ANY, _("Select"))
self.csh_wound_dropdown_label = wx.StaticText(self.cs_historical_pane, wx.ID_ANY, _("Wound Location:"))
self.csh_wound_loc = wx.ComboBox(self.cs_historical_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.cs_historical_label = wx.StaticText(self.cs_historical_pane, wx.ID_ANY, _("Historical Measurements"))
self.cs_historical_list = ObjectListView(self.cs_historical_pane, wx.ID_ANY)
self.clinical_collection_pane = wx.lib.scrolledpanel.ScrolledPanel(self.assessment_notebook, wx.ID_ANY)
self.cc_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Clinical Assessment Reporting"), style=wx.ALIGN_CENTRE)
self.cc_patient_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Patient:"))
self.cc_patient_name = wx.TextCtrl(self.clinical_collection_pane, wx.ID_ANY, "")
self.patient_select_button = wx.Button(self.clinical_collection_pane, wx.ID_ANY, _("Select"))
self.cc_wound_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Wound:"))
self.cc_wound_loc = wx.ComboBox(self.clinical_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.cc_wound_button = wx.Button(self.clinical_collection_pane, wx.ID_ANY, _("New"))
self.assessment_no_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Assessment Number:"))
self.cc_assessment_no = wx.ComboBox(self.clinical_collection_pane, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN)
self.cc_assessment_button = wx.Button(self.clinical_collection_pane, wx.ID_ANY, _("New"))
self.cc_datetime_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Assessment Date & Time:"))
self.cc_datepicker = wx.DatePickerCtrl(self.clinical_collection_pane, wx.ID_ANY, style=wx.DP_DROPDOWN)
self.cc_timepicker = wx.lib.masked.timectrl.TimeCtrl(self.clinical_collection_pane, wx.ID_ANY)
self.cc_length_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Wound Length (cm.):"))
self.cc_length = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.cc_width_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Wound Width (cm.):"))
self.cc_width = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.cc_depth_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Wound Depth (cm.):"))
self.cc_depth = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.cc_u_depth_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Undermining Depth (cm.):"))
self.cc_u_depth = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0.00"), style=wx.TE_RIGHT)
self.cc_u_desc_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Undermining Description:"))
self.cc_u_desc = wx.TextCtrl(self.clinical_collection_pane, wx.ID_ANY, "", style=wx.TE_MULTILINE)
self.cc_staging_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Ulcer Staging:"))
self.cc_staging = wx.ComboBox(self.clinical_collection_pane, wx.ID_ANY, choices=[_("1"), _("2"), _("3"), _("4")], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.cc_bed_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Bed Color:"))
self.cc_bed_color = wx.TextCtrl(self.clinical_collection_pane, wx.ID_ANY, "")
self.cc_ex_amt_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Extudate Amount:"))
self.cc_ex_amt = wx.ComboBox(self.clinical_collection_pane, wx.ID_ANY, choices=[_("None"), _("Scant"), _("Small"), _("Moderate"), _("Large")], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.cc_ex_type_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Extudate Type:"))
self.cc_ex_type = wx.ComboBox(self.clinical_collection_pane, wx.ID_ANY, choices=[_("None"), _("Bloody"), _("Serosanguineous"), _("Serous"), _("Purulent")], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.cc_granulation_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Granulation Percentage:"))
self.cc_granulation_percentage = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0"), style=wx.TE_RIGHT)
self.cc_slough_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Slough Percentage:"))
self.cc_slough_percentage = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0"), style=wx.TE_RIGHT)
self.cc_eschar_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Eschar Percentage:"))
self.cc_eschar_percentage = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0"), style=wx.TE_RIGHT)
self.cc_bone_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Bone / Tendon Percentage:"))
self.cc_bone_percentage = wx.lib.masked.NumCtrl(self.clinical_collection_pane, wx.ID_ANY, _("0"), style=wx.TE_RIGHT)
self.cc_peri_desc_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Peri-Ulcer Area Description:"))
self.cc_peri_desc = wx.TextCtrl(self.clinical_collection_pane, wx.ID_ANY, "", style=wx.TE_MULTILINE)
self.cc_blanching_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Blanching Exists?"))
self.cc_blanching_exists = wx.CheckBox(self.clinical_collection_pane, wx.ID_ANY, "")
self.cc_infection_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Infection Notes:"))
self.cc_infection = wx.TextCtrl(self.clinical_collection_pane, wx.ID_ANY, "", style=wx.TE_MULTILINE)
self.cc_odor_intensity_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Odor Intensity:"))
self.cc_odor_intensity = wx.ComboBox(self.clinical_collection_pane, wx.ID_ANY, choices=[_("No odor"), _("Slight"), _("Moderate"), _("Strong")], style=wx.CB_DROPDOWN | wx.CB_DROPDOWN)
self.cc_odor_description_label = wx.StaticText(self.clinical_collection_pane, wx.ID_ANY, _("Odor Description:"))
self.cc_odor_description = wx.TextCtrl(self.clinical_collection_pane, wx.ID_ANY, "", style=wx.TE_MULTILINE)
self.cc_save_button = wx.Button(self.clinical_collection_pane, wx.ID_SAVE, "")
self.cc_reset_button = wx.Button(self.clinical_collection_pane, wx.ID_ANY, _("Reset"))
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_MENU, self.OnCloseWindow, self.exitFileMenuItem)
self.Bind(wx.EVT_MENU, self.DoWoundList, self.newWoundMenuItem)
self.Bind(wx.EVT_MENU, self.DoAssessmentList, self.newAssessmentMenuItem)
self.Bind(wx.EVT_MENU, self.DoAlgorithmList, self.newAlgorithmItem)
self.Bind(wx.EVT_MENU, self.DoExperimentList, self.newExperimentItem)
self.Bind(wx.EVT_MENU, self.DoChangePassword, self.changePasswordItem)
self.Bind(wx.EVT_MENU, self.DoConfigList, self.newSystemConfiguration)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.ch_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.ch_wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoNewWound, self.ch_wound_new_button)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.ch_assessment_id)
self.Bind(wx.EVT_BUTTON, self.DoNewAssessment, self.ch_assessment_new_button)
self.Bind(wx.EVT_BUTTON, self.OnStartChemBaseline, self.baseline_start_stop_button)
self.Bind(wx.EVT_BUTTON, self.OnResetChemBaseline, self.baseline_reset_button)
self.Bind(wx.EVT_BUTTON, self.OnStartChemWound, self.wound_start_stop_button)
self.Bind(wx.EVT_BUTTON, self.OnResetChemWound, self.wound_reset_button)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.cv_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoNewWound, self.cv_wound_new_button)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.cv_assessment_id)
self.Bind(wx.EVT_BUTTON, self.DoNewAssessment, self.cv_assessment_new_button)
self.Bind(wx.EVT_BUTTON, self.OnRecordClick, self.start_stop_button)
self.Bind(wx.EVT_BUTTON, self.DoVisualReset, self.cv_reset_button)
self.Bind(wx.EVT_BUTTON, self.OnPreviewClick, self.preview_button)
self.Bind(wx.EVT_BUTTON, self.OnSelectSnapshot, id=801)
self.Bind(wx.EVT_BUTTON, self.OnSelectSnapshot, id=803)
self.Bind(wx.EVT_BUTTON, self.OnSelectSnapshot, id=804)
self.Bind(wx.EVT_BUTTON, self.OnSelectSnapshot, id=805)
self.Bind(wx.EVT_BUTTON, self.OnSelectSnapshot, id=807)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.ms_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.ms_wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoNewWound, self.ms_wound_new_button)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.ms_assessment_id)
self.Bind(wx.EVT_BUTTON, self.DoNewAssessment, self.ms_assessment_new_button)
self.Bind(wx.EVT_BUTTON, self.OnMSSnapshotClick, self.ms_snapshot_button)
self.Bind(wx.EVT_BUTTON, self.OnMSResetClick, self.ms_reset_button)
self.Bind(wx.EVT_BUTTON, self.OnMSPreviewClick, self.ms_preview_button)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.wm_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.wm_wound_loc)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.wm_assessment_id)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.wmh_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.wmh_wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.ts_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.ts_wound_loc)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.ts_assessment_id)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.tsh_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.tsh_wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.th_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.th_wound_loc)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.th_assessment_id)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.thh_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.thh_wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.cs_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.cs_wound_loc)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.cs_assessment_id)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.csh_patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.csh_wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoSelectPatient, self.patient_select_button)
self.Bind(wx.EVT_COMBOBOX, self.DoWoundSelect, self.cc_wound_loc)
self.Bind(wx.EVT_BUTTON, self.DoNewWound, self.cc_wound_button)
self.Bind(wx.EVT_COMBOBOX, self.DoSelectAssessment, self.cc_assessment_no)
self.Bind(wx.EVT_BUTTON, self.DoNewAssessment, self.cc_assessment_button)
self.Bind(wx.EVT_BUTTON, self.OnClinicalSave, self.cc_save_button)
self.Bind(wx.EVT_BUTTON, self.OnClinicalReset, self.cc_reset_button)
# end wxGlade
self.recordingExists = False
self.recordOn = False
self.playbackOn = False
self.previewOn = False
self.previewPaused = False
self.currentPatient = None
self.patient_id = -1
self.woundId = -1
self.assessmentId = -1
self.start_stop_button.Disable()
self.cv_reset_button.Disable()
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Bind(wx.EVT_MENU, self.DoNewPatient, self.newPatientMenuItem)
self.Bind(wx.EVT_MENU, self.DoSelectPatient, self.selectPatientMenuItem)
self.ch_wound_msg.SetValue('No assessment selected')
self.ch_baseline_msg.SetValue('No assessment selected')
self.activeVisualBtn = -1
self.VisualControl("")
self.clinical_collection_pane.SetAutoLayout(True)
self.clinical_collection_pane.SetupScrolling()
# set up biochemical plot panel
self.chem_figure = Figure(figsize=(7.92,7.92))
self.chem_axes = self.chem_figure.add_subplot(111)
self.chem_axes.set_xlabel('Frequency')
self.chem_axes.set_ylabel('Impedance')
self.chem_canvas = FigureCanvas(self.chem_plot_panel, wx.ID_ANY, self.chem_figure)
chem_panel_sizer = wx.BoxSizer(wx.VERTICAL)
chem_panel_sizer.Add(self.chem_canvas, 1, wx.EXPAND)
self.chem_plot_panel.SetSizer(chem_panel_sizer)
self.chem_plot_panel.Fit()
# Hide analysis tabs
#self.assessment_notebook.EnableTab(3, False)
#self.assessment_notebook.EnableTab(4, False)
#self.assessment_notebook.EnableTab(5, False)
#self.assessment_notebook.EnableTab(6, False)
def __set_properties(self):
"""
Sets various properties of the widgets
"""
# begin wxGlade: MainFrame.__set_properties
self.SetTitle(_("Multi-Modality Portable System for Pressure Ulcer Prevention - Assessment"))
self.main_frame_statusbar.SetStatusWidths([-1])
# statusbar fields
main_frame_statusbar_fields = [_("frame_1_statusbar")]
for i in range(len(main_frame_statusbar_fields)):
self.main_frame_statusbar.SetStatusText(main_frame_statusbar_fields[i], i)
self.chem_plot_panel.SetMinSize((640, 640))
self.ch_sampling_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ch_patient_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_patient_name.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_patient_name.Enable(False)
self.ch_patient_select_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_wound_dropdown_label_copy.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_wound_loc.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_wound_new_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_assessement_dropdown_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_assessment_id.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ch_assessment_new_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.baseline_label.SetMinSize((149, 19))
self.baseline_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ch_baseline_msg.SetMinSize((600, 27))
self.ch_baseline_msg.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.chemical_wound_label.SetMinSize((136, 19))
self.chemical_wound_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ch_wound_msg.SetMinSize((600, 27))
self.ch_wound_msg.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.rgb_video_panel.SetMinSize((640, 360))
self.rgb_video_panel_label.SetMinSize((640, 20))
self.rgb_video_panel_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.depth_video_panel.SetMinSize((320, 240))
self.thermal_video_panel.SetMinSize((320, 240))
self.visual_depth_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.visual_thermal_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.cv_patient_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.patient_name.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.patient_name.Enable(False)
self.cv_patient_select_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cv_wound_dropdown_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wound_loc.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cv_wound_new_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cv_assessement_dropdown_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cv_assessment_id.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cv_assessment_new_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.start_stop_button.SetMinSize((75, 29))
self.start_stop_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cv_reset_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.preview_button.SetMinSize((75, 29))
self.preview_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.visual_msg.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cv_grid_mc.SetMinSize((75, 37))
self.visual_collection_pane.SetFocus()
self.rgb_video_panel_copy.SetMinSize((640, 360))
self.rgb_video_panel_label_copy.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.multi_video_panel.SetMinSize((256, 256))
self.multi_video_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ms_patient_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_patient_name.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_patient_name.Enable(False)
self.ms_patient_select_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_wound_dropdown_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_wound_loc.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_wound_new_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_assessement_dropdown_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_assessment_id.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_assessment_new_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_snapshot_button.SetMinSize((75, 29))
self.ms_snapshot_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_reset_button.SetMinSize((75, 29))
self.ms_reset_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_preview_button.SetMinSize((75, 29))
self.ms_preview_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ms_msg.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_3d_panel.SetMinSize((640,480))
self.wm3d_label.SetMinSize((175, 20))
self.wm3d_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.wm_rgb_radio_btn.SetMinSize((166, 39))
self.wm_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_patient_name.Enable(False)
self.wm_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_assessement_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_assessment_id.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_reslts_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.wm_l_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_length.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_length.SetAllowNegative(False)
self.wm_length.SetFractionWidth(2)
self.wm_length.SetMin(0.00)
self.wm_w_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_width.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_width.SetAllowNegative(False)
self.wm_width.SetFractionWidth(2)
self.wm_width.SetMin(0.00)
self.wm_d_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_depth.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_depth.SetAllowNegative(False)
self.wm_depth.SetFractionWidth(2)
self.wm_depth.SetMin(0.00)
self.wm_lw_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_lw.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_lw.SetAllowNegative(False)
self.wm_lw.SetFractionWidth(2)
self.wm_lw.SetMin(0.00)
self.wm_a_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_area.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_area.SetAllowNegative(False)
self.wm_area.SetFractionWidth(2)
self.wm_area.SetMin(0.00)
self.wm_v_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_volume.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_volume.SetAllowNegative(False)
self.wm_volume.SetFractionWidth(2)
self.wm_volume.SetMin(0.00)
self.wm_push_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_push.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_push.SetAllowNegative(False)
self.wm_push.SetFractionWidth(0)
self.wm_push.SetMin(0)
self.wmh_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wmh_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wmh_patient_name.Enable(False)
self.wmh_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wmh_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wmh_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.wm_historical_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ts_2d_panel.SetMinSize((480, 360))
self.ts_2d_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ts_3d_panel.SetMinSize((480,360))
self.ts_3d_label.SetMinSize((161, 16))
self.ts_3d_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ts_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_patient_name.Enable(False)
self.ts_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_assessement_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_assessment_id.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_results_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.ts_granulation_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_granulation_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_granulation_percentage.SetAllowNegative(False)
self.ts_granulation_percentage.SetFractionWidth(1)
self.ts_granulation_percentage.SetMin(0)
self.ts_slough_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_slough_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_slough_percentage.SetAllowNegative(False)
self.ts_slough_percentage.SetFractionWidth(1)
self.ts_slough_percentage.SetMin(0)
self.ts_eschar_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_eschar_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_eschar_percentage.SetAllowNegative(False)
self.ts_eschar_percentage.SetFractionWidth(1)
self.ts_eschar_percentage.SetMin(0)
self.ts_bone_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_bone_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_bone_percentage.SetAllowNegative(False)
self.ts_bone_percentage.SetFractionWidth(1)
self.ts_bone_percentage.SetMin(0)
self.ts_staging_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_staging.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_staging.SetAllowNegative(False)
self.ts_staging.SetFractionWidth(0)
self.ts_staging.SetMin(0)
self.tsh_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.tsh_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.tsh_patient_name.Enable(False)
self.tsh_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.tsh_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.tsh_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.ts_historical_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.thermal_2d_pane.SetMinSize((384,288))
self.th_2d_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.thermal_3d_panel.SetMinSize((384,288))
self.th_3d_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.thermal_cont_radio.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.thermal_segmented_radio.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.thermal_segment_size.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.thermal_segment_size.SetSelection(1)
self.th_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_patient_name.Enable(False)
self.th_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_assessement_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_assessment_id.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_results_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.t_max_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_max.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_max.SetAllowNegative(False)
self.t_max.SetFractionWidth(1)
self.t_max.SetMin(0.0)
self.t_min_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_min.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_min.SetAllowNegative(False)
self.t_min.SetFractionWidth(1)
self.t_min.SetMin(0.0)
self.t_base_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_baseline.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_baseline.SetAllowNegative(False)
self.t_baseline.SetFractionWidth(1)
self.t_baseline.SetMin(0.0)
self.t_sigma_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_sigma.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_sigma.SetAllowNegative(False)
self.t_sigma.SetFractionWidth(3)
self.t_sigma.SetMin(0.00)
self.t_s1_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_1.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_1.SetAllowNegative(False)
self.t_segment_1.SetFractionWidth(1)
self.t_segment_1.SetMin(0.0)
self.t_s2_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_2.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_2.SetAllowNegative(False)
self.t_segment_2.SetFractionWidth(1)
self.t_segment_2.SetMin(0.0)
self.t_s3_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_3.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_3.SetAllowNegative(False)
self.t_segment_3.SetFractionWidth(1)
self.t_segment_3.SetMin(0.0)
self.t_s4_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_4.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_4.SetAllowNegative(False)
self.t_segment_4.SetFractionWidth(1)
self.t_segment_4.SetMin(0.0)
self.t_s5_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_5.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.t_segment_5.SetAllowNegative(False)
self.t_segment_5.SetFractionWidth(1)
self.t_segment_5.SetMin(0.0)
self.thh_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.thh_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.thh_patient_name.Enable(False)
self.thh_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.thh_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.thh_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.th_historical_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.chemical_panel.SetMinSize((640, 480))
self.cs_panel_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.cs_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_patient_name.Enable(False)
self.cs_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_assessement_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_assessment_id.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_results_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.csh_patient_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.csh_patient_name.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.csh_patient_name.Enable(False)
self.csh_patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.csh_wound_dropdown_label.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.csh_wound_loc.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cs_historical_label.SetFont(wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.cc_label.SetFont(wx.Font(16, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.cc_patient_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_patient_name.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_patient_name.Enable(False)
self.patient_select_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_wound_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_wound_loc.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_wound_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.assessment_no_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_assessment_no.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_assessment_button.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_datetime_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_datepicker.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_timepicker.SetMinSize((92, 29))
self.cc_timepicker.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_length_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_length.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_length.SetAllowNegative(False)
self.cc_length.SetFractionWidth(2)
self.cc_length.SetMin(0.00)
self.cc_width_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_width.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_width.SetAllowNegative(False)
self.cc_width.SetFractionWidth(2)
self.cc_width.SetMin(0.00)
self.cc_depth_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_depth.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_depth.SetAllowNegative(False)
self.cc_depth.SetFractionWidth(2)
self.cc_depth.SetMin(0.00)
self.cc_u_depth_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_u_depth.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_u_depth.SetAllowNegative(False)
self.cc_u_depth.SetFractionWidth(2)
self.cc_u_depth.SetMin(0.00)
self.cc_u_desc_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_u_desc.SetMinSize((300, 34))
self.cc_u_desc.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_staging_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_staging.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_staging.SetSelection(0)
self.cc_bed_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_bed_color.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_ex_amt_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_ex_amt.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_ex_amt.SetSelection(0)
self.cc_ex_type_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_ex_type.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_ex_type.SetSelection(-1)
self.cc_granulation_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_granulation_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_granulation_percentage.SetAllowNegative(False)
self.cc_granulation_percentage.SetMin(0)
self.cc_slough_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_slough_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_slough_percentage.SetAllowNegative(False)
self.cc_slough_percentage.SetMin(0)
self.cc_eschar_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_eschar_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_eschar_percentage.SetAllowNegative(False)
self.cc_eschar_percentage.SetMin(0)
self.cc_bone_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_bone_percentage.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_bone_percentage.SetAllowNegative(False)
self.cc_bone_percentage.SetMin(0)
self.cc_peri_desc_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_peri_desc.SetMinSize((300, 34))
self.cc_peri_desc.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_blanching_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_blanching_exists.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_infection_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_infection.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_odor_intensity_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_odor_intensity.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_odor_intensity.SetSelection(-1)
self.cc_odor_description_label.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_odor_description.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_save_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.cc_reset_button.SetFont(wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.assessment_notebook.SetAGWWindowStyleFlag(aui.AUI_NB_TOP)
# end wxGlade
def __do_layout(self):
# begin wxGlade: MainFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_2 = wx.BoxSizer(wx.VERTICAL)
clinical_sizer = wx.BoxSizer(wx.VERTICAL)
clinical_button_sizer = wx.BoxSizer(wx.HORIZONTAL)
grid_sizer_2 = wx.FlexGridSizer(22, 3, 6, 12)
sizer_4 = wx.BoxSizer(wx.HORIZONTAL)
cs_sizer = wx.BoxSizer(wx.HORIZONTAL)
cs_historical_top_sizer = wx.BoxSizer(wx.VERTICAL)
csh_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
cs_results_top_sizer = wx.BoxSizer(wx.VERTICAL)
cs_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
cs_sizer_l_v = wx.BoxSizer(wx.VERTICAL)
th_sizer = wx.BoxSizer(wx.HORIZONTAL)
th_historical_top_sizer = wx.BoxSizer(wx.VERTICAL)
thh_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
th_results_top_sizer = wx.BoxSizer(wx.VERTICAL)
grid_sizer_1 = wx.GridSizer(9, 2, 16, 30)
th_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
th_sizer_l_v = wx.BoxSizer(wx.VERTICAL)
ts_button_grid_sizer = wx.FlexGridSizer(2, 2, 0, 0)
ts_sizer = wx.BoxSizer(wx.HORIZONTAL)
ts_historical_top_sizer = wx.BoxSizer(wx.VERTICAL)
tsh_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
ts_results_top_sizer = wx.BoxSizer(wx.VERTICAL)
ts_results_grid_sizer = wx.GridSizer(5, 2, 16, 30)
ts_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
ts_sizer_l_v = wx.BoxSizer(wx.VERTICAL)
wm_sizer = wx.BoxSizer(wx.HORIZONTAL)
wm_historical_top_sizer = wx.BoxSizer(wx.VERTICAL)
wmh_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
wm_results_top_sizer = wx.BoxSizer(wx.VERTICAL)
wm_results_grid_sizer = wx.GridSizer(7, 2, 16, 30)
wm_pwa_grid_sizer = wx.FlexGridSizer(3, 3, 5, 4)
wm_sizer_l_v = wx.BoxSizer(wx.VERTICAL)
wm_sizer_lbh = wx.BoxSizer(wx.HORIZONTAL)
wm_sizer_lbmv = wx.BoxSizer(wx.VERTICAL)
ms_collection_sizer = wx.BoxSizer(wx.HORIZONTAL)
ms_r_sizer = wx.BoxSizer(wx.VERTICAL)
ms_grid_sizer = wx.FlexGridSizer(6, 3, 8, 20)
ms_l_sizer = wx.BoxSizer(wx.VERTICAL)
ms_lower_video_sizer = wx.FlexGridSizer(3, 3, 0, 0)
sizer_7_copy = wx.BoxSizer(wx.HORIZONTAL)
sizer_6_copy = wx.BoxSizer(wx.HORIZONTAL)
collection_sizer_1 = wx.BoxSizer(wx.HORIZONTAL)
cv_r_sizer = wx.BoxSizer(wx.VERTICAL)
sizer_8 = wx.BoxSizer(wx.HORIZONTAL)
grid_sizer_3 = wx.GridSizer(3, 3, 10, 10)
cv_grid_sizer = wx.FlexGridSizer(6, 3, 8, 20)
cv_l_sizer = wx.BoxSizer(wx.VERTICAL)
lower_video_sizer = wx.FlexGridSizer(2, 5, 0, 0)
sizer_7 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6 = wx.BoxSizer(wx.HORIZONTAL)
chemical_collection_sizer_h = wx.BoxSizer(wx.HORIZONTAL)
chemical_collection_sizer_vr = wx.BoxSizer(wx.VERTICAL)
ch_grid_sizer = wx.FlexGridSizer(8, 3, 8, 20)
chemical_collection_sizer_vl = wx.BoxSizer(wx.VERTICAL)
chemical_collection_sizer_h.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vl.Add((20, 50), 0, 0, 0)
chemical_collection_sizer_vl.Add(self.chem_plot_panel, 1, wx.EXPAND, 0)
chemical_collection_sizer_vl.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vl.Add(self.ch_sampling_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_h.Add(chemical_collection_sizer_vl, 1, 0, 0)
chemical_collection_sizer_h.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vr.Add((20, 20), 0, 0, 0)
ch_grid_sizer.Add((20, 20), 0, 0, 0)
ch_grid_sizer.Add((20, 20), 0, 0, 0)
ch_grid_sizer.Add((20, 20), 0, 0, 0)
ch_grid_sizer.Add(self.ch_patient_label, 0, 0, 0)
ch_grid_sizer.Add(self.ch_patient_name, 0, 0, 0)
ch_grid_sizer.Add(self.ch_patient_select_button, 0, 0, 3)
ch_grid_sizer.Add(self.ch_wound_dropdown_label_copy, 0, 0, 0)
ch_grid_sizer.Add(self.ch_wound_loc, 0, 0, 0)
ch_grid_sizer.Add(self.ch_wound_new_button, 0, 0, 0)
ch_grid_sizer.Add(self.ch_assessement_dropdown_label, 0, 0, 0)
ch_grid_sizer.Add(self.ch_assessment_id, 0, 0, 0)
ch_grid_sizer.Add(self.ch_assessment_new_button, 0, 0, 0)
chemical_collection_sizer_vr.Add(ch_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_vr.Add((20, 40), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.baseline_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
chemical_collection_sizer_vr.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.ch_baseline_msg, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_vr.Add((50, 20), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.baseline_start_stop_button, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_vr.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.baseline_reset_button, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_vr.Add((20, 100), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.chemical_wound_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
chemical_collection_sizer_vr.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.ch_wound_msg, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_vr.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.wound_start_stop_button, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_vr.Add((20, 20), 0, 0, 0)
chemical_collection_sizer_vr.Add(self.wound_reset_button, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
chemical_collection_sizer_h.Add(chemical_collection_sizer_vr, 1, wx.EXPAND, 0)
self.chemical_collection_pane.SetSizer(chemical_collection_sizer_h)
cv_l_sizer.Add((700, 20), 0, 0, 0)
sizer_6.Add((20, 360), 0, 0, 0)
sizer_6.Add(self.rgb_video_panel, 0, 0, 0)
sizer_6.Add((40, 360), 0, 0, 0)
cv_l_sizer.Add(sizer_6, 0, wx.EXPAND, 0)
sizer_7.Add((20, 20), 0, 0, 0)
sizer_7.Add(self.rgb_video_panel_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
sizer_7.Add((40, 20), 0, 0, 0)
cv_l_sizer.Add(sizer_7, 0, 0, 0)
lower_video_sizer.Add((20, 240), 0, 0, 0)
lower_video_sizer.Add(self.depth_video_panel, 1, wx.EXPAND, 0)
lower_video_sizer.Add((20, 240), 0, 0, 0)
lower_video_sizer.Add(self.thermal_video_panel, 1, wx.EXPAND, 0)
lower_video_sizer.Add((20, 240), 0, 0, 0)
lower_video_sizer.Add((20, 20), 0, 0, 0)
lower_video_sizer.Add(self.visual_depth_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
lower_video_sizer.Add((20, 20), 0, 0, 0)
lower_video_sizer.Add(self.visual_thermal_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
lower_video_sizer.Add((20, 20), 0, 0, 0)
cv_l_sizer.Add(lower_video_sizer, 1, 0, 0)
collection_sizer_1.Add(cv_l_sizer, 0, wx.EXPAND, 0)
cv_grid_sizer.Add((20, 20), 0, 0, 0)
cv_grid_sizer.Add((20, 20), 0, 0, 0)
cv_grid_sizer.Add((20, 20), 0, 0, 0)
cv_grid_sizer.Add(self.cv_patient_label, 0, 0, 0)
cv_grid_sizer.Add(self.patient_name, 0, 0, 0)
cv_grid_sizer.Add(self.cv_patient_select_button, 0, 0, 3)
cv_grid_sizer.Add(self.cv_wound_dropdown_label, 0, 0, 0)
cv_grid_sizer.Add(self.wound_loc, 0, 0, 0)
cv_grid_sizer.Add(self.cv_wound_new_button, 0, 0, 0)
cv_grid_sizer.Add(self.cv_assessement_dropdown_label, 0, 0, 0)
cv_grid_sizer.Add(self.cv_assessment_id, 0, 0, 0)
cv_grid_sizer.Add(self.cv_assessment_new_button, 0, 0, 0)
cv_grid_sizer.Add((20, 40), 0, 0, 0)
cv_grid_sizer.Add((20, 40), 0, 0, 0)
cv_grid_sizer.Add((20, 40), 0, 0, 0)
cv_grid_sizer.Add(self.start_stop_button, 0, 0, 2)
cv_grid_sizer.Add(self.cv_reset_button, 0, 0, 0)
cv_grid_sizer.Add(self.preview_button, 0, 0, 0)
cv_r_sizer.Add(cv_grid_sizer, 0, wx.LEFT | wx.TOP | wx.EXPAND, 20)
cv_r_sizer.Add((20, 30), 0, 0, 0)
cv_r_sizer.Add(self.visual_msg, 0, wx.EXPAND, 0)
cv_r_sizer.Add((20, 30), 0, 0, 0)
sizer_8.Add((120, 20), 0, 0, 0)
grid_sizer_3.Add((75, 37), 0, 0, 0)
grid_sizer_3.Add(self.cv_grid_uc, 0, 0, 0)
grid_sizer_3.Add((75, 37), 0, 0, 0)
grid_sizer_3.Add(self.cv_grid_ml, 0, 0, 0)
grid_sizer_3.Add(self.cv_grid_mc, 0, 0, 0)
grid_sizer_3.Add(self.cv_grid_mr, 0, 0, 0)
grid_sizer_3.Add((75, 37), 0, 0, 0)
grid_sizer_3.Add(self.cv_grid_lc, 0, 0, 0)
grid_sizer_3.Add((75, 37), 0, 0, 0)
sizer_8.Add(grid_sizer_3, 1, 0, 0)
cv_r_sizer.Add(sizer_8, 1, 0, 0)
collection_sizer_1.Add(cv_r_sizer, 1, wx.LEFT | wx.TOP | wx.EXPAND, 10)
self.visual_collection_pane.SetSizer(collection_sizer_1)
ms_l_sizer.Add((700, 20), 0, 0, 0)
sizer_6_copy.Add((20, 360), 0, 0, 0)
sizer_6_copy.Add(self.rgb_video_panel_copy, 0, 0, 0)
sizer_6_copy.Add((40, 360), 0, 0, 0)
ms_l_sizer.Add(sizer_6_copy, 0, 0, 0)
sizer_7_copy.Add((20, 20), 0, 0, 0)
sizer_7_copy.Add(self.rgb_video_panel_label_copy, 1, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
sizer_7_copy.Add((40, 20), 0, 0, 0)
ms_l_sizer.Add(sizer_7_copy, 0, wx.EXPAND, 0)
ms_lower_video_sizer.Add((212, 256), 0, 0, 0)
ms_lower_video_sizer.Add(self.multi_video_panel, 0, 0, 0)
ms_lower_video_sizer.Add((232, 256), 0, 0, 0)
ms_lower_video_sizer.Add((212, 20), 0, 0, 0)
ms_lower_video_sizer.Add(self.multi_video_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
ms_lower_video_sizer.Add((232, 20), 0, 0, 0)
ms_l_sizer.Add(ms_lower_video_sizer, 1, 0, 0)
ms_collection_sizer.Add(ms_l_sizer, 0, 0, 0)
ms_grid_sizer.Add((20, 20), 0, 0, 0)
ms_grid_sizer.Add((20, 20), 0, 0, 0)
ms_grid_sizer.Add((20, 20), 0, 0, 0)
ms_grid_sizer.Add(self.ms_patient_label, 0, 0, 0)
ms_grid_sizer.Add(self.ms_patient_name, 0, 0, 0)
ms_grid_sizer.Add(self.ms_patient_select_button, 0, 0, 3)
ms_grid_sizer.Add(self.ms_wound_dropdown_label, 0, 0, 0)
ms_grid_sizer.Add(self.ms_wound_loc, 0, 0, 0)
ms_grid_sizer.Add(self.ms_wound_new_button, 0, 0, 0)
ms_grid_sizer.Add(self.ms_assessement_dropdown_label, 0, 0, 0)
ms_grid_sizer.Add(self.ms_assessment_id, 0, 0, 0)
ms_grid_sizer.Add(self.ms_assessment_new_button, 0, 0, 0)
ms_grid_sizer.Add((20, 40), 0, 0, 0)
ms_grid_sizer.Add((20, 40), 0, 0, 0)
ms_grid_sizer.Add((20, 40), 0, 0, 0)
ms_grid_sizer.Add(self.ms_snapshot_button, 0, 0, 2)
ms_grid_sizer.Add(self.ms_reset_button, 0, 0, 2)
ms_grid_sizer.Add(self.ms_preview_button, 0, 0, 0)
ms_r_sizer.Add(ms_grid_sizer, 0, 0, 20)
ms_r_sizer.Add((20, 30), 0, 0, 0)
ms_r_sizer.Add(self.ms_msg, 0, wx.EXPAND, 0)
ms_collection_sizer.Add(ms_r_sizer, 1, 0, 0)
self.multi_spectral_collection_pane.SetSizer(ms_collection_sizer)
wm_sizer_l_v.Add((20, 50), 0, 0, 0)
wm_sizer_l_v.Add(self.wm_3d_panel, 1, wx.EXPAND, 0)
wm_sizer_l_v.Add(self.wm3d_label, 0, wx.ALIGN_BOTTOM | wx.ALIGN_CENTER_HORIZONTAL, 0)
wm_sizer_l_v.Add((20, 50), 0, 0, 0)
wm_sizer_lbh.Add((50, 20), 0, 0, 0)
wm_sizer_lbmv.Add(self.wm_no_overlay_radio_btn, 0, 0, 0)
wm_sizer_lbmv.Add(self.wm_rgb_radio_btn, 0, 0, 0)
wm_sizer_lbmv.Add(self.wm_thermal_radio_btn, 0, 0, 0)
wm_sizer_lbh.Add(wm_sizer_lbmv, 3, wx.EXPAND, 0)
wm_sizer_lbh.Add((50, 20), 0, 0, 0)
wm_sizer_l_v.Add(wm_sizer_lbh, 1, wx.EXPAND, 0)
wm_sizer.Add(wm_sizer_l_v, 1, wx.EXPAND, 0)
wm_results_top_sizer.Add((20, 20), 0, 0, 0)
wm_pwa_grid_sizer.Add(self.wm_patient_label, 0, 0, 0)
wm_pwa_grid_sizer.Add(self.wm_patient_name, 0, 0, 0)
wm_pwa_grid_sizer.Add(self.wm_patient_select_button, 0, 0, 3)
wm_pwa_grid_sizer.Add(self.wm_wound_dropdown_label, 0, 0, 0)
wm_pwa_grid_sizer.Add(self.wm_wound_loc, 0, 0, 0)
wm_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
wm_pwa_grid_sizer.Add(self.wm_assessement_dropdown_label, 0, 0, 0)
wm_pwa_grid_sizer.Add(self.wm_assessment_id, 0, 0, 0)
wm_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
wm_pwa_grid_sizer.Add((20, 40), 0, 0, 0)
wm_results_top_sizer.Add(wm_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
wm_results_top_sizer.Add(self.wm_reslts_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
wm_results_top_sizer.Add((20, 20), 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_l_label, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_length, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_w_label, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_width, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_d_label, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_depth, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_lw_label, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_lw, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_a_label, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_area, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_v_label, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_volume, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_push_label, 0, 0, 0)
wm_results_grid_sizer.Add(self.wm_push, 0, 0, 0)
wm_results_top_sizer.Add(wm_results_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
self.wm_results_pane.SetSizer(wm_results_top_sizer)
wm_historical_top_sizer.Add((20, 20), 0, 0, 0)
wmh_pwa_grid_sizer.Add(self.wmh_patient_label, 0, 0, 0)
wmh_pwa_grid_sizer.Add(self.wmh_patient_name, 0, 0, 0)
wmh_pwa_grid_sizer.Add(self.wmh_patient_select_button, 0, 0, 3)
wmh_pwa_grid_sizer.Add(self.wmh_wound_dropdown_label, 0, 0, 0)
wmh_pwa_grid_sizer.Add(self.wmh_wound_loc, 0, 0, 0)
wmh_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
wmh_pwa_grid_sizer.Add((20, 40), 0, 0, 0)
wm_historical_top_sizer.Add(wmh_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
wm_historical_top_sizer.Add(self.wm_historical_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
wm_historical_top_sizer.Add((20, 20), 0, 0, 0)
wm_historical_top_sizer.Add(self.wm_historical_list, 1, wx.EXPAND, 0)
self.wm_historical_pane.SetSizer(wm_historical_top_sizer)
self.wound_measurement_notebook.AddPage(self.wm_config_pane, _("Analysis Configuration"))
self.wound_measurement_notebook.AddPage(self.wm_results_pane, _("Results"))
self.wound_measurement_notebook.AddPage(self.wm_historical_pane, _("Historical"))
wm_sizer.Add(self.wound_measurement_notebook, 1, wx.EXPAND, 0)
self.analysis_measurements_pane.SetSizer(wm_sizer)
ts_sizer_l_v.Add((20, 20), 0, 0, 0)
ts_sizer_l_v.Add(self.ts_2d_panel, 1, wx.EXPAND, 0)
ts_sizer_l_v.Add(self.ts_2d_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
ts_sizer_l_v.Add((20, 20), 0, 0, 0)
ts_sizer_l_v.Add(self.ts_3d_panel, 1, wx.EXPAND, 0)
ts_sizer_l_v.Add(self.ts_3d_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
ts_sizer.Add(ts_sizer_l_v, 1, wx.EXPAND, 0)
ts_results_top_sizer.Add((20, 20), 0, 0, 0)
ts_pwa_grid_sizer.Add(self.ts_patient_label, 0, 0, 0)
ts_pwa_grid_sizer.Add(self.ts_patient_name, 0, 0, 0)
ts_pwa_grid_sizer.Add(self.ts_patient_select_button, 0, 0, 3)
ts_pwa_grid_sizer.Add(self.ts_wound_dropdown_label, 0, 0, 0)
ts_pwa_grid_sizer.Add(self.ts_wound_loc, 0, 0, 0)
ts_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
ts_pwa_grid_sizer.Add(self.ts_assessement_dropdown_label, 0, 0, 0)
ts_pwa_grid_sizer.Add(self.ts_assessment_id, 0, 0, 0)
ts_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
ts_pwa_grid_sizer.Add((20, 40), 0, 0, 0)
ts_results_top_sizer.Add(ts_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
ts_results_top_sizer.Add((20, 40), 0, 0, 0)
ts_results_top_sizer.Add(self.ts_results_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
ts_results_top_sizer.Add((20, 20), 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_granulation_label, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_granulation_percentage, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_slough_label, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_slough_percentage, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_eschar_label, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_eschar_percentage, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_bone_label, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_bone_percentage, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_staging_label, 0, 0, 0)
ts_results_grid_sizer.Add(self.ts_staging, 0, 0, 0)
ts_results_top_sizer.Add(ts_results_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
self.ts_results_pane.SetSizer(ts_results_top_sizer)
ts_historical_top_sizer.Add((20, 20), 0, 0, 0)
tsh_pwa_grid_sizer.Add(self.tsh_patient_label, 0, 0, 0)
tsh_pwa_grid_sizer.Add(self.tsh_patient_name, 0, 0, 0)
tsh_pwa_grid_sizer.Add(self.tsh_patient_select_button, 0, 0, 3)
tsh_pwa_grid_sizer.Add(self.tsh_wound_dropdown_label, 0, 0, 0)
tsh_pwa_grid_sizer.Add(self.tsh_wound_loc, 0, 0, 0)
tsh_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
tsh_pwa_grid_sizer.Add((20, 40), 0, 0, 0)
ts_historical_top_sizer.Add(tsh_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
ts_historical_top_sizer.Add(self.ts_historical_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
ts_historical_top_sizer.Add((20, 20), 0, 0, 0)
ts_historical_top_sizer.Add(self.ts_historical_list, 1, wx.EXPAND, 0)
self.ts_historical_pane.SetSizer(ts_historical_top_sizer)
self.tissue_segmentation_notebook.AddPage(self.ts_config_pane, _("Analysis Configuratoin"))
self.tissue_segmentation_notebook.AddPage(self.ts_results_pane, _("Results"))
self.tissue_segmentation_notebook.AddPage(self.ts_historical_pane, _("Historical"))
ts_sizer.Add(self.tissue_segmentation_notebook, 1, wx.EXPAND, 0)
self.analysis_segmentation_pane.SetSizer(ts_sizer)
th_sizer_l_v.Add((20, 10), 0, 0, 0)
th_sizer_l_v.Add(self.thermal_2d_pane, 1, wx.EXPAND, 0)
th_sizer_l_v.Add(self.th_2d_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
th_sizer_l_v.Add((20, 10), 0, 0, 0)
th_sizer_l_v.Add(self.thermal_3d_panel, 1, wx.EXPAND, 0)
th_sizer_l_v.Add(self.th_3d_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
th_sizer_l_v.Add((20, 10), 0, 0, 0)
ts_button_grid_sizer.Add(self.thermal_cont_radio, 0, wx.EXPAND, 0)
ts_button_grid_sizer.Add((20, 10), 0, 0, 0)
ts_button_grid_sizer.Add(self.thermal_segmented_radio, 0, wx.EXPAND, 0)
ts_button_grid_sizer.Add(self.thermal_segment_size, 0, 0, 0)
th_sizer_l_v.Add(ts_button_grid_sizer, 0, wx.EXPAND, 0)
th_sizer.Add(th_sizer_l_v, 0, 0, 0)
th_results_top_sizer.Add((20, 20), 0, 0, 0)
th_pwa_grid_sizer.Add(self.th_patient_label, 0, 0, 0)
th_pwa_grid_sizer.Add(self.th_patient_name, 0, 0, 0)
th_pwa_grid_sizer.Add(self.th_patient_select_button, 0, 0, 3)
th_pwa_grid_sizer.Add(self.th_wound_dropdown_label, 0, 0, 0)
th_pwa_grid_sizer.Add(self.th_wound_loc, 0, 0, 0)
th_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
th_pwa_grid_sizer.Add(self.th_assessement_dropdown_label, 0, 0, 0)
th_pwa_grid_sizer.Add(self.th_assessment_id, 0, 0, 0)
th_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
th_pwa_grid_sizer.Add((20, 40), 0, 0, 0)
th_results_top_sizer.Add(th_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
th_results_top_sizer.Add((20, 40), 0, 0, 0)
th_results_top_sizer.Add(self.th_results_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
th_results_top_sizer.Add((20, 20), 0, 0, 0)
grid_sizer_1.Add(self.t_max_label, 0, 0, 0)
grid_sizer_1.Add(self.t_max, 0, 0, 0)
grid_sizer_1.Add(self.t_min_label, 0, 0, 0)
grid_sizer_1.Add(self.t_min, 0, 0, 0)
grid_sizer_1.Add(self.t_base_label, 0, 0, 0)
grid_sizer_1.Add(self.t_baseline, 0, 0, 0)
grid_sizer_1.Add(self.t_sigma_label, 0, 0, 0)
grid_sizer_1.Add(self.t_sigma, 0, 0, 0)
grid_sizer_1.Add(self.t_s1_label, 0, 0, 0)
grid_sizer_1.Add(self.t_segment_1, 0, 0, 0)
grid_sizer_1.Add(self.t_s2_label, 0, 0, 0)
grid_sizer_1.Add(self.t_segment_2, 0, 0, 0)
grid_sizer_1.Add(self.t_s3_label, 0, 0, 0)
grid_sizer_1.Add(self.t_segment_3, 0, 0, 0)
grid_sizer_1.Add(self.t_s4_label, 0, 0, 0)
grid_sizer_1.Add(self.t_segment_4, 0, 0, 0)
grid_sizer_1.Add(self.t_s5_label, 0, 0, 0)
grid_sizer_1.Add(self.t_segment_5, 0, 0, 0)
th_results_top_sizer.Add(grid_sizer_1, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
self.th_results_pane.SetSizer(th_results_top_sizer)
th_historical_top_sizer.Add((20, 20), 0, 0, 0)
thh_pwa_grid_sizer.Add(self.thh_patient_label, 0, 0, 0)
thh_pwa_grid_sizer.Add(self.thh_patient_name, 0, 0, 0)
thh_pwa_grid_sizer.Add(self.thh_patient_select_button, 0, 0, 3)
thh_pwa_grid_sizer.Add(self.thh_wound_dropdown_label, 0, 0, 0)
thh_pwa_grid_sizer.Add(self.thh_wound_loc, 0, 0, 0)
thh_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
thh_pwa_grid_sizer.Add((20, 40), 0, 0, 0)
th_historical_top_sizer.Add(thh_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
th_historical_top_sizer.Add(self.th_historical_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
th_historical_top_sizer.Add((20, 20), 0, 0, 0)
th_historical_top_sizer.Add(self.th_historical_list, 1, wx.EXPAND, 0)
self.th_historical_pane.SetSizer(th_historical_top_sizer)
self.thermal_profile_notebook.AddPage(self.th_config_pane, _("Analysis Configuration"))
self.thermal_profile_notebook.AddPage(self.th_results_pane, _("Results"))
self.thermal_profile_notebook.AddPage(self.th_historical_pane, _("Historical"))
th_sizer.Add(self.thermal_profile_notebook, 1, wx.EXPAND, 0)
self.analysis_thermal_pane.SetSizer(th_sizer)
cs_sizer_l_v.Add((20, 100), 0, 0, 0)
cs_sizer_l_v.Add(self.chemical_panel, 1, wx.EXPAND, 0)
cs_sizer_l_v.Add(self.cs_panel_label, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL, 0)
cs_sizer.Add(cs_sizer_l_v, 1, wx.EXPAND, 0)
cs_results_top_sizer.Add((20, 50), 0, 0, 0)
cs_pwa_grid_sizer.Add(self.cs_patient_label, 0, 0, 0)
cs_pwa_grid_sizer.Add(self.cs_patient_name, 0, 0, 0)
cs_pwa_grid_sizer.Add(self.cs_patient_select_button, 0, 0, 3)
cs_pwa_grid_sizer.Add(self.cs_wound_dropdown_label, 0, 0, 0)
cs_pwa_grid_sizer.Add(self.cs_wound_loc, 0, 0, 0)
cs_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
cs_pwa_grid_sizer.Add(self.cs_assessement_dropdown_label, 0, 0, 0)
cs_pwa_grid_sizer.Add(self.cs_assessment_id, 0, 0, 0)
cs_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
cs_results_top_sizer.Add(cs_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
cs_results_top_sizer.Add((20, 40), 0, 0, 0)
cs_results_top_sizer.Add(self.cs_results_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
cs_results_top_sizer.Add((20, 50), 0, 0, 0)
self.cs_results_pane.SetSizer(cs_results_top_sizer)
cs_historical_top_sizer.Add((20, 20), 0, 0, 0)
csh_pwa_grid_sizer.Add(self.csh_patient_label, 0, 0, 0)
csh_pwa_grid_sizer.Add(self.csh_patient_name, 0, 0, 0)
csh_pwa_grid_sizer.Add(self.csh_patient_select_button, 0, 0, 3)
csh_pwa_grid_sizer.Add(self.csh_wound_dropdown_label, 0, 0, 0)
csh_pwa_grid_sizer.Add(self.csh_wound_loc, 0, 0, 0)
csh_pwa_grid_sizer.Add((20, 20), 0, 0, 0)
csh_pwa_grid_sizer.Add((20, 40), 0, 0, 0)
cs_historical_top_sizer.Add(csh_pwa_grid_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
cs_historical_top_sizer.Add(self.cs_historical_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
cs_historical_top_sizer.Add((20, 20), 0, 0, 0)
cs_historical_top_sizer.Add(self.cs_historical_list, 1, wx.EXPAND, 0)
self.cs_historical_pane.SetSizer(cs_historical_top_sizer)
self.chemical_sensing_notebook.AddPage(self.cs_config_pane, _("Analysis Configuration"))
self.chemical_sensing_notebook.AddPage(self.cs_results_pane, _("Results"))
self.chemical_sensing_notebook.AddPage(self.cs_historical_pane, _("Historical"))
cs_sizer.Add(self.chemical_sensing_notebook, 1, wx.EXPAND, 0)
self.analysis_chemical_pane.SetSizer(cs_sizer)
clinical_sizer.Add((20, 12), 0, 0, 0)
clinical_sizer.Add(self.cc_label, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
clinical_sizer.Add((20, 12), 0, 0, 0)
grid_sizer_2.Add(self.cc_patient_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_patient_name, 0, 0, 0)
grid_sizer_2.Add(self.patient_select_button, 0, 0, 3)
grid_sizer_2.Add(self.cc_wound_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_wound_loc, 0, 0, 0)
grid_sizer_2.Add(self.cc_wound_button, 0, 0, 3)
grid_sizer_2.Add(self.assessment_no_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_assessment_no, 0, 0, 0)
grid_sizer_2.Add(self.cc_assessment_button, 0, 0, 3)
grid_sizer_2.Add(self.cc_datetime_label, 0, 0, 0)
sizer_4.Add(self.cc_datepicker, 0, 0, 0)
sizer_4.Add(self.cc_timepicker, 1, 0, 0)
grid_sizer_2.Add(sizer_4, 1, wx.EXPAND, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_length_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_length, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_width_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_width, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_depth_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_depth, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_u_depth_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_u_depth, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_u_desc_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_u_desc, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_staging_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_staging, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_bed_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_bed_color, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_ex_amt_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_ex_amt, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_ex_type_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_ex_type, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_granulation_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_granulation_percentage, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_slough_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_slough_percentage, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_eschar_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_eschar_percentage, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_bone_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_bone_percentage, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_peri_desc_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_peri_desc, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_blanching_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_blanching_exists, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_infection_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_infection, 0, wx.EXPAND, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_odor_intensity_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_odor_intensity, 0, 0, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
grid_sizer_2.Add(self.cc_odor_description_label, 0, 0, 0)
grid_sizer_2.Add(self.cc_odor_description, 0, wx.EXPAND, 0)
grid_sizer_2.Add((20, 10), 0, 0, 0)
clinical_sizer.Add(grid_sizer_2, 1, wx.ALIGN_CENTER_HORIZONTAL, 0)
clinical_sizer.Add((20, 2), 0, 0, 0)
clinical_button_sizer.Add(self.cc_save_button, 0, 0, 3)
clinical_button_sizer.Add((20, 20), 0, 0, 0)
clinical_button_sizer.Add(self.cc_reset_button, 0, 0, 0)
clinical_sizer.Add(clinical_button_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
self.clinical_collection_pane.SetSizer(clinical_sizer)
self.assessment_notebook.AddPage(self.chemical_collection_pane, _("Collection - Chemical"))
self.assessment_notebook.AddPage(self.visual_collection_pane, _("Collection - Visual"))
self.assessment_notebook.AddPage(self.multi_spectral_collection_pane, _("Collection - Multi-Spectral"))
self.assessment_notebook.AddPage(self.analysis_measurements_pane, _("Analysis - Wound Measurements"))
self.assessment_notebook.AddPage(self.analysis_segmentation_pane, _("Analysis - Tissue Type Segmentation"))
self.assessment_notebook.AddPage(self.analysis_thermal_pane, _("Analysis - Thermal Profile"))
self.assessment_notebook.AddPage(self.analysis_chemical_pane, _("Analysis - Chemical Sensing"))
self.assessment_notebook.AddPage(self.clinical_collection_pane, _("Clinical"))
sizer_2.Add(self.assessment_notebook, 1, wx.EXPAND, 0)
self.panel_1.SetSizer(sizer_2)
sizer_1.Add(self.panel_1, 1000, wx.EXPAND, 0)
self.SetSizer(sizer_1)
sizer_1.Fit(self)
sizer_1.SetSizeHints(self)
self.Layout()
# end wxGlade
def InitControllers(self):
"""
Method to initialize various controllers
"""
global db, measurementAlgorithm
# set up controllers
self.clinicalWoundController = ClinicalWoundController(db, ClinicalWoundAssessment, None, None)
self.experimentController = ExperimentController(db, Experiment, None, None)
experiment = self.experimentController.getDefaultExperiment(measurementAlgorithm)
self.assessment_measurements_experiment_id = experiment.id
self.assessment_segmentation_experiment_id = experiment.id
self.assessment_temperature_experiment_id = experiment.id
self.assessmentMeasurementsController = AssessmentMeasurementsController(db, AssessmentMeasurements, OlvAssessmentMeasurements, OlvAssessmentMeasurementsCols)
self.assessmentSegmentationController = AssessmentSegmentationController(db, AssessmentSegmentation, OlvAssessmentSegmentation, OlvAssessmentSegmentationCols)
self.assessmentTemperatureController = AssessmentTemperatureController(db, AssessmentTemperature, OlvAssessmentTemperature, OlvAssessmentTemperatureCols)
def InitMisc(self):
"""
Method to initialize various misc. items
"""
# Set up file director
self.director = PUFileDirector(self.system_config['BASE_FILE_DIRECTORY'])
self.director.SetSystemType("assessment")
# Set up biochemical sampling parameters
self.chemicalBaselineScanValue = int(self.system_config['CHEMICAL_BASELINE_SAMPLE_TIME']) / 10
self.chemicalWoundScanValue = int(self.system_config['CHEMICAL_SAMPLE_TIME']) / 10
# Start up thermal camera server
# thermal_camera_server_path = "C:/Program Files (x86)/MICRO-EPSILON Messtechnik GmbH u Co KG/TIM Connect/Imager.exe"
thermal_camera_server_path = self.system_config['THERMAL_CAMERA_SERVER_PATH']
(junk,executable) = os.path.split(thermal_camera_server_path)
# kill old process
try:
os.system('taskkill /f /im '+executable)
except Exception:
pass
startupOptions = '/Invisible'
print "Starting thermal camera server"
self.thermal_server_process = subprocess.Popen([thermal_camera_server_path, startupOptions])
print "PID = %d" % (self.thermal_server_process.pid)
# Start up hyperspectral camera server
hyperspectral_camera_server_path = self.system_config['HYPERSPECTRAL_CAMERA_SERVER_PATH']
(junk,executable) = os.path.split(hyperspectral_camera_server_path)
# kill old process
try:
os.system('taskkill /f /im '+executable)
except Exception:
pass
print "Starting hyperspectral camera server"
self.hyperspectral_server_process = subprocess.Popen([hyperspectral_camera_server_path, startupOptions])
print "PID = %d" % (self.hyperspectral_server_process.pid)
# find touchscreen
self.touchscreen = None
for i in range(wx.Display.GetCount()):
if wx.Display(i).GetGeometry().GetSize() == wx.Size(800, 480):
#if i == 1: # BRH
self.touchscreen = wx.Display(i)
print "touchscreen is display #%d" % (i)
print self.touchscreen.GetGeometry().GetPosition()
# self.touchframe = wx.Frame(self, wx.ID_ANY, "VA-PUPC Touchscreen Input", self.touchscreen.GetGeometry().GetPosition(), self.touchscreen.GetGeometry().GetSize())
if self.touchscreen:
self.touchframe = TouchFrame(self, wx.ID_ANY, "VA-PUPC Touchscreen Input", self.touchscreen.GetGeometry().GetPosition(), self.touchscreen.GetGeometry().GetSize())
self.touchframe.Show()
def DoNewPatient(self, event):
"""
Menu handler for adding new patients
"""
global db
self.panel_1.Disable()
olvDialog = OLVDialog(None, db, PatientIdentification, OlvPatientIdentification, OlvPatientIdentificationCols)
rc = olvDialog.ShowModal()
olvDialog.Destroy()
self.panel_1.Enable()
def DoSelectPatient(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for selecting a patient to record
"""
global db
self.panel_1.Disable()
olvDialog = OLVDialog(None, db, PatientIdentification, OlvPatientIdentification, OlvPatientIdentificationCols,"Select-Only")
rc = olvDialog.ShowModal()
if rc == 0:
self.currentPatient = olvDialog.getSelectedObject()
self.patient_id = self.currentPatient.patient_id
self.patient_name.SetValue(self.currentPatient.patient_name)
self.director.SetPatientId(self.patient_id)
self.ms_patient_name.SetValue(self.currentPatient.patient_name)
self.ch_patient_name.SetValue(self.currentPatient.patient_name)
self.wm_patient_name.SetValue(self.currentPatient.patient_name)
self.wmh_patient_name.SetValue(self.currentPatient.patient_name)
self.ts_patient_name.SetValue(self.currentPatient.patient_name)
self.tsh_patient_name.SetValue(self.currentPatient.patient_name)
self.th_patient_name.SetValue(self.currentPatient.patient_name)
self.thh_patient_name.SetValue(self.currentPatient.patient_name)
self.cs_patient_name.SetValue(self.currentPatient.patient_name)
self.csh_patient_name.SetValue(self.currentPatient.patient_name)
self.cc_patient_name.SetValue(self.currentPatient.patient_name)
self.PopulateWoundDropDowns()
self.RefreshTabs()
olvDialog.Destroy()
self.panel_1.Enable()
def OnCloseWindow(self, event): # wxGlade: MainFrame.<event_handler>
"""
Close handler for main window
"""
print "...closing"
if hasattr(self,'thermal_client_process'):
print "Terminating thermal camera client process"
self.thermal_client_process.terminate()
if hasattr(self,'thermal_server_process'):
print "Terminating thermal camera server process"
self.thermal_server_process.terminate()
self.Destroy()
def DoWoundSelect(self, event): # wxGlade: MainFrame.<event_handler>
"""
Event handler for changing a wound drop-down
"""
cb = event.GetEventObject()
print "in DoWoundSelect event handler"
selNo = event.GetSelection()
self.woundId = int(cb.GetClientData(event.GetSelection()))
print "Wound select event hander....woundId = %d" % self.woundId
if self.woundId > 0:
# Set other dropdowns
if cb != self.wound_loc:
self.wound_loc.SetSelection(selNo)
if cb != self.ms_wound_loc:
self.ms_wound_loc.SetSelection(selNo)
if cb != self.ch_wound_loc:
self.ch_wound_loc.SetSelection(selNo)
if cb != self.wm_wound_loc:
self.wm_wound_loc.SetSelection(selNo)
if cb != self.wmh_wound_loc:
self.wmh_wound_loc.SetSelection(selNo)
if cb != self.ts_wound_loc:
self.ts_wound_loc.SetSelection(selNo)
if cb != self.tsh_wound_loc:
self.tsh_wound_loc.SetSelection(selNo)
if cb != self.th_wound_loc:
self.th_wound_loc.SetSelection(selNo)
if cb != self.thh_wound_loc:
self.thh_wound_loc.SetSelection(selNo)
if cb != self.cs_wound_loc:
self.cs_wound_loc.SetSelection(selNo)
if cb != self.csh_wound_loc:
self.csh_wound_loc.SetSelection(selNo)
if cb != self.cc_wound_loc:
self.cc_wound_loc.SetSelection(selNo)
# Set file director
self.director.SetWoundId(self.woundId)
self.PopulateAssessmentDropDowns()
self.assessmentMeasurements = self.assessmentMeasurementsController.getAllByPatientByWoundByExpForOLView(self.patient_id, self.woundId, self.assessment_measurements_experiment_id)
amcols = OlvAssessmentMeasurementsCols()
self.wm_historical_list.SetColumns(amcols.getColumnDefinitions())
self.wm_historical_list.SetObjects(self.assessmentMeasurements)
self.assessmentSegmentation = self.assessmentSegmentationController.getAllByPatientByWoundByExpForOLView(self.patient_id, self.woundId, self.assessment_segmentation_experiment_id)
ascols = OlvAssessmentSegmentationCols()
self.ts_historical_list.SetColumns(ascols.getColumnDefinitions())
self.ts_historical_list.SetObjects(self.assessmentSegmentation)
self.assessmentTemperature = self.assessmentTemperatureController.getAllByPatientByWoundByExpForOLView(self.patient_id, self.woundId, self.assessment_temperature_experiment_id)
# l = len(self.assessmentTemperature)
# print "Number of assessment temperatures = %d" % l
atcols = OlvAssessmentTemperatureCols()
# print atcols.getColumnDefinitions()
self.th_historical_list.SetColumns(atcols.getColumnDefinitions())
self.th_historical_list.SetObjects(self.assessmentTemperature)
self.RefreshTabs()
def RefreshTabs(self):
tabNo = self.assessment_notebook.GetSelection()
if tabNo == 0:
self.visual_collection_pane.Fit()
elif tabNo == 1:
self.multi_spectral_collection_pane.Fit()
elif tabNo == 2:
self.chemical_collection_pane.Fit()
elif tabNo == 3:
x = 3
# self.analysis_measurements_pane.Fit()
self.wmh_wound_loc.GetContainingSizer().Layout()
elif tabNo == 4:
# self.analysis_segmentation_pane.Fit()
self.tsh_wound_loc.GetContainingSizer().Layout()
elif tabNo == 5:
# self.analysis_thermal_pane.Fit()
self.thh_wound_loc.GetContainingSizer().Layout()
elif tabNo == 6:
self.analysis_chemical_pane.Fit()
else:
self.PopulateClinicalCollectionPane()
# self.clinical_collection_pane.Fit()
def PopulateWoundDropDowns(self):
self.wound_loc.Clear()
self.ms_wound_loc.Clear()
self.ch_wound_loc.Clear()
self.wm_wound_loc.Clear()
self.wmh_wound_loc.Clear()
self.ts_wound_loc.Clear()
self.tsh_wound_loc.Clear()
self.th_wound_loc.Clear()
self.thh_wound_loc.Clear()
self.cs_wound_loc.Clear()
self.csh_wound_loc.Clear()
self.cc_wound_loc.Clear()
self.cv_assessment_id.Clear()
self.ms_assessment_id.Clear()
self.ch_assessment_id.Clear()
self.wm_assessment_id.Clear()
self.ts_assessment_id.Clear()
self.th_assessment_id.Clear()
self.cs_assessment_id.Clear()
self.cc_assessment_no.Clear()
woundController = WoundAssessmentController(db, WoundAssessment, None, None)
wounds = woundController.getAllByPatient(self.patient_id)
firstWound = True
if len(wounds) > 0:
for w in wounds:
if firstWound:
self.woundId = w.wound_id
firstWound = False
self.wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.ms_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.ch_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.wm_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.wmh_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.ts_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.tsh_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.th_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.thh_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.cs_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.csh_wound_loc.Append(w.wound_location_description, str(w.wound_id))
self.cc_wound_loc.Append(w.wound_location_description, str(w.wound_id))
else:
self.woundId = -1
self.wound_loc.Append("No wounds for this patient", "-1")
self.ms_wound_loc.Append("No wounds for this patient", "-1")
self.ch_wound_loc.Append("No wounds for this patient", "-1")
self.wm_wound_loc.Append("No wounds for this patient", "-1")
self.wmh_wound_loc.Append("No wounds for this patient", "-1")
self.ts_wound_loc.Append("No wounds for this patient", "-1")
self.tsh_wound_loc.Append("No wounds for this patient", "-1")
self.th_wound_loc.Append("No wounds for this patient", "-1")
self.thh_wound_loc.Append("No wounds for this patient", "-1")
self.cs_wound_loc.Append("No wounds for this patient", "-1")
self.csh_wound_loc.Append("No wounds for this patient", "-1")
self.cc_wound_loc.Append("No wounds for this patient", "-1")
self.ch_wound_msg.SetValue('No assessment selected')
self.ch_baseline_msg.SetValue('No assessment selected')
self.wound_loc.Select(0)
self.ms_wound_loc.Select(0)
self.ch_wound_loc.Select(0)
self.wm_wound_loc.Select(0)
self.wmh_wound_loc.Select(0)
self.ts_wound_loc.Select(0)
self.tsh_wound_loc.Select(0)
self.th_wound_loc.Select(0)
self.thh_wound_loc.Select(0)
self.cs_wound_loc.Select(0)
self.csh_wound_loc.Select(0)
self.cc_wound_loc.Select(0)
#
wcEvent = wx.CommandEvent(wx.wxEVT_COMMAND_COMBOBOX_SELECTED,self.wound_loc.GetId())
wcEvent.SetEventObject(self.wound_loc)
wx.PostEvent(self.wound_loc, wcEvent)
# Recalculate size of wound dropdowns
self.wound_loc.GetContainingSizer().Layout()
self.ms_wound_loc.GetContainingSizer().Layout()
self.ch_wound_loc.GetContainingSizer().Layout()
self.wm_wound_loc.GetContainingSizer().Layout()
self.wmh_wound_loc.GetContainingSizer().Layout()
self.ts_wound_loc.GetContainingSizer().Layout()
self.tsh_wound_loc.GetContainingSizer().Layout()
self.th_wound_loc.GetContainingSizer().Layout()
self.thh_wound_loc.GetContainingSizer().Layout()
self.cs_wound_loc.GetContainingSizer().Layout()
self.csh_wound_loc.GetContainingSizer().Layout()
self.cc_wound_loc.GetContainingSizer().Layout()
def PopulateAssessmentDropDowns(self):
self.cv_assessment_id.Clear()
self.ms_assessment_id.Clear()
self.ch_assessment_id.Clear()
self.wm_assessment_id.Clear()
self.ts_assessment_id.Clear()
self.th_assessment_id.Clear()
self.cs_assessment_id.Clear()
self.cc_assessment_no.Clear()
asController = AssessmentSessionController(db, AssessmentSession, None, None)
assessments = asController.getAllByPatientByWound(self.patient_id, self.woundId)
if len(assessments) > 0:
for a in assessments:
a_str = "%d - %s" % (a.assessment_id, a.start_time.strftime('%m/%d/%Y'))
self.cv_assessment_id.Append(a_str, str(a.assessment_id))
self.ms_assessment_id.Append(a_str, str(a.assessment_id))
self.ch_assessment_id.Append(a_str, str(a.assessment_id))
self.wm_assessment_id.Append(a_str, str(a.assessment_id))
self.ts_assessment_id.Append(a_str, str(a.assessment_id))
self.th_assessment_id.Append(a_str, str(a.assessment_id))
self.cs_assessment_id.Append(a_str, str(a.assessment_id))
self.cc_assessment_no.Append(a_str, str(a.assessment_id))
else:
self.cv_assessment_id.Append("No assessments for this patient", "-1")
self.ms_assessment_id.Append("No assessments for this patient", "-1")
self.ch_assessment_id.Append("No assessments for this patient", "-1")
self.wm_assessment_id.Append("No assessments for this patient", "-1")
self.ts_assessment_id.Append("No assessments for this patient", "-1")
self.th_assessment_id.Append("No assessments for this patient", "-1")
self.cs_assessment_id.Append("No assessments for this patient", "-1")
self.cc_assessment_no.Append("No assessments for this patient", "-1")
self.ch_wound_msg.SetValue('No assessment selected')
self.ch_baseline_msg.SetValue('No assessment selected')
self.cv_assessment_id.Select(0)
self.ms_assessment_id.Select(0)
self.ch_assessment_id.Select(0)
self.wm_assessment_id.Select(0)
self.ts_assessment_id.Select(0)
self.th_assessment_id.Select(0)
self.cs_assessment_id.Select(0)
self.cc_assessment_no.Select(0)
acEvent = wx.CommandEvent(wx.wxEVT_COMMAND_COMBOBOX_SELECTED,self.cv_assessment_id.GetId())
acEvent.SetEventObject(self.cv_assessment_id)
wx.PostEvent(self.cv_assessment_id, acEvent)
def PopulateClinicalCollectionPane(self):
self.clinicalAssessment = None
if self.patient_id > 0 and self.woundId and self.assessmentId > 0:
self.clinicalAssessment = self.clinicalWoundController.getByPatientByWoundByNo(self.patient_id, self.woundId, self.assessmentId)
if self.clinicalAssessment is None:
self.clinicalAssessment = ClinicalWoundAssessment()
now = datetime.now()
wxnow = wx.calendar._pydate2wxdate(now)
wxnow.SetHMS(now.hour,now.minute,now.second)
self.cc_datepicker.SetValue(wxnow)
self.cc_timepicker.SetValue(wxnow)
self.cc_length.SetValue("0.00")
self.cc_width.SetValue("0.00")
self.cc_depth.SetValue("0.00")
self.cc_u_depth.SetValue("0.00")
self.cc_u_desc.SetValue("")
self.cc_staging.Select(0)
self.cc_bed_color.SetValue("")
self.cc_ex_amt.Select(0)
self.cc_ex_type.Select(0)
self.cc_granulation_percentage.SetValue("0")
self.cc_slough_percentage.SetValue("0")
self.cc_eschar_percentage.SetValue("0")
self.cc_bone_percentage.SetValue("0")
self.cc_peri_desc.SetValue("")
self.cc_blanching_exists.SetValue(False)
self.cc_infection.SetValue("")
self.cc_odor_intensity.Select(0)
self.cc_odor_description.SetValue("")
else:
wxad = wx.calendar._pydate2wxdate(self.clinicalAssessment.assessment_date)
wxad.SetHMS(self.clinicalAssessment.assessment_date.hour,self.clinicalAssessment.assessment_date.minute,self.clinicalAssessment.assessment_date.second)
self.cc_datepicker.SetValue(wxad)
self.cc_timepicker.SetValue(wxad)
self.cc_length.SetValue(self.clinicalAssessment.length)
self.cc_width.SetValue(self.clinicalAssessment.width)
self.cc_depth.SetValue(self.clinicalAssessment.depth)
self.cc_u_depth.SetValue(self.clinicalAssessment.undermining_depth)
self.cc_u_desc.SetValue(self.clinicalAssessment.undermining_description)
val = self.clinicalAssessment.ulcer_stage - 1
self.cc_staging.Select(val)
self.cc_bed_color.SetValue(self.clinicalAssessment.bed_color)
self.cc_ex_amt.SetStringSelection(self.clinicalAssessment.exudate_amount)
self.cc_ex_type.SetStringSelection(self.clinicalAssessment.exudate_type)
self.cc_granulation_percentage.SetValue(self.clinicalAssessment.granulation_percentage)
self.cc_slough_percentage.SetValue(self.clinicalAssessment.slough_percentage)
self.cc_eschar_percentage.SetValue(self.clinicalAssessment.eschar_percentage)
self.cc_bone_percentage.SetValue(self.clinicalAssessment.bone_percentage)
self.cc_peri_desc.SetValue(self.clinicalAssessment.peri_ulcer_area_description)
self.cc_blanching_exists.SetValue(self.clinicalAssessment.blanching_exists)
self.cc_infection.SetValue(self.clinicalAssessment.infection_process)
self.cc_odor_intensity.SetStringSelection(self.clinicalAssessment.odor_intensity)
self.cc_odor_description.SetValue(self.clinicalAssessment.odor_description)
def PopulateMeasurementsResultsPane(self):
self.assessmentMeasurements = None
if self.patient_id > 0 and self.woundId and self.assessmentId > 0:
self.assessmentMeasurements = self.assessmentMeasurementsController.getByPatientByWoundByNoByExp(self.patient_id, self.woundId, self.assessmentId, self.assessment_measurements_experiment_id)
if self.assessmentMeasurements is None:
self.wm_length.SetValue("0.00")
self.wm_width.SetValue("0.00")
self.wm_depth.SetValue("0.00")
self.wm_lw.SetValue("0.00")
self.wm_area.SetValue("0.00")
self.wm_volume.SetValue("0.00")
self.wm_push.SetValue("0")
else:
self.wm_length.SetValue(self.assessmentMeasurements.length)
self.wm_width.SetValue(self.assessmentMeasurements.width)
self.wm_depth.SetValue(self.assessmentMeasurements.depth)
self.wm_lw.SetValue(self.assessmentMeasurements.length_x_width)
self.wm_area.SetValue(self.assessmentMeasurements.surface_area)
self.wm_volume.SetValue(self.assessmentMeasurements.wound_volume)
self.wm_push.SetValue(self.assessmentMeasurements.push_score)
def PopulateSegmentationResultsPane(self):
self.assessmentSegmentation = None
if self.patient_id > 0 and self.woundId and self.assessmentId > 0:
self.assessmentSegmentation = self.assessmentSegmentationController.getByPatientByWoundByNoByExp(self.patient_id, self.woundId, self.assessmentId, self.assessment_measurements_experiment_id)
if self.assessmentSegmentation is None:
self.ts_granulation_percentage.SetValue("0.0")
self.ts_slough_percentage.SetValue("0.0")
self.ts_eschar_percentage.SetValue("0.0")
self.ts_bone_percentage.SetValue("0.0")
self.ts_staging.SetValue("0")
else:
self.ts_granulation_percentage.SetValue(self.assessmentSegmentation.granulation_percentage)
self.ts_slough_percentage.SetValue(self.assessmentSegmentation.slough_percentage)
self.ts_eschar_percentage.SetValue(self.assessmentSegmentation.eschar_percentage)
self.ts_bone_percentage.SetValue(self.assessmentSegmentation.bone_percentage)
self.ts_staging.SetValue(self.assessmentSegmentation.ulcer_stage)
def PopulateTemperatureResultsPane(self):
self.assessmentTemperature = None
if self.patient_id > 0 and self.woundId and self.assessmentId > 0:
self.assessmentTemperature = self.assessmentTemperatureController.getByPatientByWoundByNoByExp(self.patient_id, self.woundId, self.assessmentId, self.assessment_measurements_experiment_id)
if self.assessmentTemperature is None:
self.t_max.SetValue("0.0")
self.t_min.SetValue("0.0")
self.t_baseline.SetValue("0.0")
self.t_sigma.SetValue("0.000")
self.t_segment_1.SetValue("0.0")
self.t_segment_2.SetValue("0.0")
self.t_segment_3.SetValue("0.0")
self.t_segment_4.SetValue("0.0")
self.t_segment_5.SetValue("0.0")
else:
self.t_max.SetValue(self.assessmentTemperature.max_temperature)
self.t_min.SetValue(self.assessmentTemperature.min_temperature)
self.t_baseline.SetValue(self.assessmentTemperature.baseline_temperature)
self.t_sigma.SetValue(self.assessmentTemperature.temperature_variation_sigma)
self.t_segment_1.SetValue(self.assessmentTemperature.temperature_segment_1_percentage)
self.t_segment_2.SetValue(self.assessmentTemperature.temperature_segment_2_percentage)
self.t_segment_3.SetValue(self.assessmentTemperature.temperature_segment_3_percentage)
self.t_segment_4.SetValue(self.assessmentTemperature.temperature_segment_4_percentage)
self.t_segment_5.SetValue(self.assessmentTemperature.temperature_segment_5_percentage)
def DoSelectAssessment(self, event): # wxGlade: MainFrame.<event_handler>
"""
Event handler for changing an assessment drop-down
"""
idx = event.GetSelection()
print "Assessment select event hander....index = %d" % idx
cb = event.GetEventObject()
self.assessmentId = int(cb.GetClientData(event.GetSelection()))
print "Assessment select event hander....assessmentId = %d" % self.assessmentId
self.cv_assessment_id.Select(idx)
self.ms_assessment_id.Select(idx)
self.ch_assessment_id.Select(idx)
self.wm_assessment_id.Select(idx)
self.ts_assessment_id.Select(idx)
self.th_assessment_id.Select(idx)
self.cs_assessment_id.Select(idx)
self.cc_assessment_no.Select(idx)
self.start_stop_button.SetLabel("Record")
# Disable all buttons on multi-spectral tab
self.ms_reset_button.Disable()
self.ms_snapshot_button.Disable()
# Disable all buttons on biochemical tab
self.baseline_start_stop_button.Disable()
self.baseline_reset_button.Disable()
self.wound_start_stop_button.Disable()
self.wound_reset_button.Disable()
# Perform tests of prior sampling
if self.patient_id > 0 and self.woundId > 0 and self.assessmentId > 0:
self.director.SetAssessmentId(self.assessmentId)
self.sessionController = AssessmentSessionController(db, AssessmentSession, OlvAssessmentSession, OlvAssessmentSessionCols)
self.assessmentSession = self.sessionController.getByPatientByWoundByAssessment(self.patient_id,self.woundId,self.assessmentId)
if self.assessmentSession.collection_status_chemical and self.assessmentSession.collection_status_chemical != "Not collected":
self.ch_wound_msg.SetValue(self.assessmentSession.collection_status_chemical)
self.wound_reset_button.Enable()
else:
self.ch_wound_msg.SetValue("Not collected")
self.wound_start_stop_button.Enable()
if self.assessmentSession.collection_status_chemical_bl and self.assessmentSession.collection_status_chemical_bl != "Not collected":
self.ch_baseline_msg.SetValue(self.assessmentSession.collection_status_chemical_bl)
self.baseline_reset_button.Enable()
else:
self.ch_baseline_msg.SetValue("Not collected")
self.baseline_start_stop_button.Enable()
if self.assessmentSession.collection_status_visual == "Completed":
self.start_stop_button.SetLabel("Play")
if self.previewOn:
self.start_stop_button.Enable()
self.visual_msg.SetValue("Click Play to enable playback of previously recorded assessment")
else:
self.start_stop_button.Disable()
self.visual_msg.SetValue("Click Preview to enable play button")
self.VisualControl("play")
self.cv_reset_button.Enable()
if self.touchscreen:
self.touchframe.touch_record_button.Disable()
else:
self.start_stop_button.SetLabel("Record")
if self.previewOn:
self.start_stop_button.Enable()
if self.touchscreen:
self.touchframe.touch_record_button.Enable()
else:
self.start_stop_button.Disable()
if self.touchscreen:
self.touchframe.touch_record_button.Disable()
self.VisualControl("")
self.cv_reset_button.Disable()
if self.assessmentSession.collection_status_multi_spectral and self.assessmentSession.collection_status_multi_spectral != "Not collected":
self.ms_msg.SetValue(self.assessmentSession.collection_status_multi_spectral)
self.ms_snapshot_button.Disable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Disable()
self.ms_reset_button.Enable()
else:
self.ms_msg.SetValue("Not collected")
if self.assessmentSession.collection_status_visual == "Completed":
self.ms_snapshot_button.Enable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Enable()
else:
self.ms_msg.SetValue("Not collected - collect visual data first!")
self.ms_snapshot_button.Disable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Disable()
else:
self.ms_msg.SetValue('No assessment selected')
self.ch_wound_msg.SetValue('No assessment selected')
self.ch_baseline_msg.SetValue('No assessment selected')
self.start_stop_button.SetLabel("Record")
self.start_stop_button.Disable()
self.cv_reset_button.Disable()
self.ms_snapshot_button.Disable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Disable()
self.PopulateClinicalCollectionPane()
self.PopulateMeasurementsResultsPane()
self.PopulateSegmentationResultsPane()
self.PopulateTemperatureResultsPane()
def DoNewWound(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for adding new wounds
"""
global db
self.panel_1.Disable()
# set up controllers
woundController = WoundAssessmentController(db, WoundAssessment, None, None)
woundDialog = AddModifyWoundDialog(woundController, WoundAssessment, db, row=None, title="Add", addRecord=True, patientId=self.patient_id)
rc = woundDialog.ShowModal()
woundDialog.Destroy()
self.PopulateWoundDropDowns()
self.RefreshTabs()
self.panel_1.Enable()
def DoWoundList(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for adding, modifying, deleting wounds
"""
global db
self.panel_1.Disable()
olvDialog = WoundAssessmentDialog(None, db, self.patient_id)
rc = olvDialog.ShowModal()
olvDialog.Destroy()
self.PopulateWoundDropDowns()
self.RefreshTabs()
self.panel_1.Enable()
def DoNewAssessment(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for creating a new assessment
"""
global db
self.panel_1.Disable()
sessionController = AssessmentSessionController(db, AssessmentSession, OlvAssessmentSession, OlvAssessmentSessionCols)
if self.patient_id < 0:
util.showMessageDialog("Need to select a patient!", "Failure!", wx.ICON_INFORMATION)
self.panel_1.Enable()
return
if self.woundId < 0:
util.showMessageDialog("Need to select a wound!", "Failure!", wx.ICON_INFORMATION)
self.panel_1.Enable()
return
(rc, msg) = sessionController.createNewAssessment(self.patient_id,self.woundId)
if rc == 0:
self.SetStatusText("Successfully created new assessment")
else:
util.showMessageDialog(msg, "Failure!", wx.ICON_INFORMATION)
self.PopulateAssessmentDropDowns()
self.RefreshTabs()
self.panel_1.Enable()
def DoAssessmentList(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for adding, modifying, deleting assessments
"""
global db
self.panel_1.Disable()
olvDialog = OLVDialog(None, db, AssessmentSession, OlvAssessmentSession, OlvAssessmentSessionCols)
rc = olvDialog.ShowModal()
olvDialog.Destroy()
self.PopulateAssessmentDropDowns()
self.RefreshTabs()
self.panel_1.Enable()
def DoAlgorithmList(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for adding, modifying, deleting algorithms and algorithm defaults
"""
global db
self.panel_1.Disable()
olvDialog = AlgorithmDialog(None, db)
rc = olvDialog.ShowModal()
olvDialog.Destroy()
self.PopulateAssessmentDropDowns()
self.RefreshTabs()
self.panel_1.Enable()
def DoExperimentList(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for adding, modifying, deleting experiments and experiment configurations
"""
global db
self.panel_1.Disable()
olvDialog = ExperimentDialog(None, db)
rc = olvDialog.ShowModal()
olvDialog.Destroy()
self.panel_1.Enable()
def OnClinicalSave(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to save clinical assessment form to the database
"""
if self.patient_id < 0:
util.showMessageDialog("Need to select a patient!", "Failure!", wx.ICON_INFORMATION)
return
if self.woundId < 0:
util.showMessageDialog("Need to select a wound!", "Failure!", wx.ICON_INFORMATION)
return
if self.assessmentId < 0:
util.showMessageDialog("Need to select an assessment number or create a new one!", "Failure!", wx.ICON_INFORMATION)
return
self.clinicalAssessment = None
if self.patient_id > 0 and self.woundId and self.assessmentId > 0:
self.clinicalAssessment = self.clinicalWoundController.getByPatientByWoundByNo(self.patient_id, self.woundId, self.assessmentId)
if self.clinicalAssessment is None:
self.clinicalAssessment = ClinicalWoundAssessment()
self.clinicalAssessment.patient_id = self.patient_id
self.clinicalAssessment.wound_id = self.woundId
self.clinicalAssessment.assessment_id = self.assessmentId
adate_wx = self.cc_datepicker.GetValue()
atime_wx = self.cc_timepicker.GetValue(as_wxDateTime=True)
adate = wx.calendar._wxdate2pydate(adate_wx)
atime = time(atime_wx.GetHour(),atime_wx.GetMinute(),atime_wx.GetSecond())
assessmentDate = datetime.combine(adate,atime)
self.clinicalAssessment.assessment_date = assessmentDate
self.clinicalAssessment.length = float(self.cc_length.GetValue())
self.clinicalAssessment.width = float(self.cc_width.GetValue())
self.clinicalAssessment.depth = float(self.cc_depth.GetValue())
self.clinicalAssessment.undermining_depth = float(self.cc_u_depth.GetValue())
self.clinicalAssessment.undermining_description = self.cc_u_desc.GetValue()
self.clinicalAssessment.ulcer_stage = int(self.cc_staging.GetValue())
self.clinicalAssessment.bed_color = self.cc_bed_color.GetValue()
self.clinicalAssessment.exudate_amount = self.cc_ex_amt.GetValue()
self.clinicalAssessment.exudate_type = self.cc_ex_type.GetValue()
self.clinicalAssessment.granulation_percentage = float(self.cc_granulation_percentage.GetValue())
self.clinicalAssessment.slough_percentage = float(self.cc_slough_percentage.GetValue())
self.clinicalAssessment.eschar_percentage = float(self.cc_eschar_percentage.GetValue())
self.clinicalAssessment.bone_percentage = float(self.cc_bone_percentage.GetValue())
self.clinicalAssessment.peri_ulcer_area_description = self.cc_peri_desc.GetValue()
self.clinicalAssessment.blanching_exists = self.cc_blanching_exists.GetValue()
self.clinicalAssessment.infection_process = self.cc_infection.GetValue()
self.clinicalAssessment.odor_intensity = self.cc_odor_intensity.GetValue()
self.clinicalAssessment.odor_description = self.cc_odor_description.GetValue()
(o, rc, msg) = self.clinicalWoundController.save(self.clinicalAssessment)
if rc == 0:
self.main_frame_statusbar.SetStatusText("Clinical wound assessment saved sucessfully")
else:
util.showMessageDialog(msg, "Failure!", wx.ICON_INFORMATION)
def OnClinicalReset(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to reset clinical assessment form to original database values
"""
self.PopulateClinicalCollectionPane()
def DoConfigList(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for adding, editing, deleting system configuration parameters
"""
global db
self.panel_1.Disable()
olvDialog = OLVDialog(None, db, SystemConfiguration, OlvSystemConfiguration, OlvSystemConfigurationCols)
rc = olvDialog.ShowModal()
olvDialog.Destroy()
self.panel_1.Enable()
def OnStartChemBaseline(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to start chemical baseline sampling
"""
global db
self.baseline_start_stop_button.Disable()
if hasattr(self,'collector'):
self.collector.Stop()
else:
self.collector = BiochemicalCollection(self.system_config['CHEMICAL_SENSOR_PORT'], db, self.director)
msg = self.collector.GetMessage()
if msg:
self.ch_baseline_msg.SetValue(msg)
self.baseline_start_stop_button.Enable()
return
print "Calling collector.Setup with baseline"
self.collector.Setup("baseline")
msg = self.collector.GetMessage()
if msg:
self.ch_baseline_msg.SetValue(msg)
self.baseline_start_stop_button.Enable()
return
print "entering collection loop"
for k in range(self.chemicalBaselineScanValue):
msg = "Collecting scan " + str(k+1) + " of " + str(self.chemicalBaselineScanValue) + "..."
print msg
self.ch_baseline_msg.SetValue(msg)
self.Update()
self.collector.Collector()
msg = self.collector.GetMessage()
if msg:
self.ch_baseline_msg.SetValue(msg)
self.collector.Stop()
self.baseline_reset_button.Enable()
return
f = self.collector.GetFrequencies()
impedances = self.collector.GetImpedances()
zr = []
zi = []
for z in impedances:
zr.append(z.real)
zi.append(z.imag)
self.chem_axes.clear()
self.chem_axes.plot(f, zr, 'k', label='Z-real')
self.chem_axes.plot(f, zi, 'k--', label='Z-imag')
self.chem_axes.legend(loc='upper right')
self.chem_canvas.draw()
self.chem_canvas.flush_events()
self.collector.Stop()
self.assessmentSession.collection_status_chemical_bl = "Completed"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.ch_baseline_msg.SetValue("Assessment chemical baseline data collected")
self.baseline_start_stop_button.Disable()
self.baseline_reset_button.Enable()
else:
self.ch_baseline_msg.SetValue(msg)
self.baseline_start_stop_button.Enable()
self.baseline_reset_button.Disable()
def OnResetChemBaseline(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to reset chemical baseline sampling
"""
self.assessmentSession.collection_status_chemical_bl = "Not Collected"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.ch_baseline_msg.SetValue("Not Collected")
self.baseline_start_stop_button.Enable()
self.baseline_reset_button.Disable()
else:
self.ch_baseline_msg.SetValue("Error resetting sample: "+msg)
self.baseline_start_stop_button.Disable()
self.baseline_reset_button.Enable()
def OnStartChemWound(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to start chemical wound sampling
"""
global db
self.wound_start_stop_button.Disable()
if hasattr(self,'collector'):
self.collector.Stop()
else:
self.collector = BiochemicalCollection(self.system_config['CHEMICAL_SENSOR_PORT'], db, self.director)
msg = self.collector.GetMessage()
if msg:
self.ch_wound_msg.SetValue(msg)
self.wound_start_stop_button.Enable()
return
print "Calling collector.Setup with wound"
self.collector.Setup("wound")
msg = self.collector.GetMessage()
if msg:
self.ch_wound_msg.SetValue(msg)
self.wound_start_stop_button.Enable()
return
print "entering collection loop"
for k in range(self.chemicalWoundScanValue):
msg = "Collecting scan " + str(k+1) + " of " + str(self.chemicalWoundScanValue) + "..."
print msg
self.ch_wound_msg.SetValue(msg)
self.Update()
self.collector.Collector()
msg = self.collector.GetMessage()
if msg:
self.ch_wound_msg.SetValue(msg)
self.collector.Stop()
self.wound_reset_button.Enable()
return
f = self.collector.GetFrequencies()
impedances = self.collector.GetImpedances()
zr = []
zi = []
for z in impedances:
zr.append(z.real)
zi.append(z.imag)
self.chem_axes.clear()
self.chem_axes.plot(f, zr, 'k', label='Z-real')
self.chem_axes.plot(f, zi, 'k--', label='Z-imag')
self.chem_axes.legend(loc='upper right')
self.chem_canvas.draw()
self.chem_canvas.flush_events()
self.collector.Stop()
self.assessmentSession.collection_status_chemical = "Completed"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.ch_wound_msg.SetValue("Assessment chemical wound data collected")
self.wound_start_stop_button.Disable()
self.wound_reset_button.Enable()
else:
self.ch_wound_msg.SetValue(msg)
self.wound_start_stop_button.Enable()
self.wound_reset_button.Disable()
def OnResetChemWound(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to reset chemical wound sampling
"""
self.assessmentSession.collection_status_chemical = "Not Collected"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.ch_wound_msg.SetValue("Not Collected")
self.wound_start_stop_button.Enable()
self.wound_reset_button.Disable()
else:
self.ch_wound_msg.SetValue("Error resetting sample: "+msg)
self.wound_start_stop_button.Disable()
self.wound_reset_button.Enable()
def DoChangePassword(self, event): # wxGlade: MainFrame.<event_handler>
"""
Menu handler for adding, editing, deleting system configuration parameters
"""
global db
self.panel_1.Disable()
changePasswordDialog = ChangePasswordDialog(None)
changePasswordDialog.SetDb(db)
rc = changePasswordDialog.ShowModal()
# print "rc=%d" % rc
if rc == 1:
self.SetStatusText("Password successfully changed")
changePasswordDialog.Destroy()
self.panel_1.Enable()
def InitCanvases(self):
global db, cameraAlgorithm
print "in InitCanvases"
self.experimentController = ExperimentController(db, Experiment, None, None)
experiment = self.experimentController.getDefaultExperiment(cameraAlgorithm)
self.experiment_id = experiment.id
if self.touchscreen:
self.multi_canvases = AssessmentMultiCameraCanvases (None, 401, "PU3dCanvas",
self.rgb_video_panel, 402, "PURgbCanvas",
self.depth_video_panel, 403, "PUDepthCanvas",
self.thermal_video_panel, 404, "PUThermalCanvas",
self.multi_video_panel, 405, "PUMultispectralCanvas",
self.rgb_video_panel_copy, 406, "PURgbCopyCanvas",
self.touchframe.rgb_video_touch_panel, 502, "PURgbTouchCanvas",
self.touchframe.thermal_video_touch_panel, 504, "PUThermalTouchCanvas",
self.touchframe.multi_video_touch_panel, 505, "PUMultispectralTouchCanvas",
db, experiment.id)
else:
self.multi_canvases = AssessmentMultiCameraCanvases (None, 401, "PU3dCanvas",
self.rgb_video_panel, 402, "PURgbCanvas",
self.depth_video_panel, 403, "PUDepthCanvas",
self.thermal_video_panel, 404, "PUDepthCanvas",
self.multi_video_panel, 405, "PUMultispectralCanvas",
self.rgb_video_panel_copy, 406, "PURgbCopyCanvas",
None, None, None,
None, None, None,
None, None, None,
db, experiment.id)
#return # BRH
# w3 = self.multi_canvases.Get3dWindow()
wr = self.multi_canvases.GetRgbWindow()
wd = self.multi_canvases.GetDepthWindow()
wt = self.multi_canvases.GetThermalWindow()
wm = self.multi_canvases.GetMultispectralWindow()
wr2 = self.multi_canvases.GetRgbCopyWindow()
# Set up sizers and layout canvases into windows
# w3.SetMinSize(wx.Size(640, 480))
# reconPanelSizer = wx.BoxSizer(wx.VERTICAL)
# reconPanelSizer.Add(w3, 0, wx.EXPAND, 0, self.multi_canvases.canvas_3d)
# self.recon_3d_panel.SetSizer(reconPanelSizer)
# reconPanelSizer.Layout()
#wr.SetMinSize(wx.Size(640, 480))
rgbPanelSizer = wx.BoxSizer(wx.VERTICAL)
rgbPanelSizer.Add(wr, 0, wx.EXPAND, 0, self.multi_canvases.canvas_rgb)
self.rgb_video_panel.SetSizer(rgbPanelSizer)
rgbPanelSizer.Layout()
# wd.SetMinSize(wx.Size(640, 480))
depthPanelSizer = wx.BoxSizer(wx.VERTICAL)
depthPanelSizer.Add(wd, 0, wx.EXPAND, 0, self.multi_canvases.canvas_depth)
self.depth_video_panel.SetSizer(depthPanelSizer)
depthPanelSizer.Layout()
# wt.SetMinSize(wx.Size(640, 480))
thermalPanelSizer = wx.BoxSizer(wx.VERTICAL)
thermalPanelSizer.Add(wt, 0, wx.EXPAND, 0, self.multi_canvases.canvas_thermal)
self.thermal_video_panel.SetSizer(thermalPanelSizer)
thermalPanelSizer.Layout()
# wm.SetMinSize(wx.Size(640, 480))
multispectralPanelSizer = wx.BoxSizer(wx.VERTICAL)
multispectralPanelSizer.Add(wm, 0, wx.EXPAND, 0, self.multi_canvases.canvas_multispectral)
self.multi_video_panel.SetSizer(multispectralPanelSizer)
multispectralPanelSizer.Layout()
#wr2.SetMinSize(wx.Size(640, 360))
rgbCopyPanelSizer = wx.BoxSizer(wx.VERTICAL)
rgbCopyPanelSizer.Add(wr2, 0, wx.EXPAND, 0, self.multi_canvases.canvas_rgb_copy)
self.rgb_video_panel_copy.SetSizer(rgbCopyPanelSizer)
rgbCopyPanelSizer.Layout()
self.Layout()
windows = [wr, wd, wt, wm, wr2]
for window in windows:
w = window
while w:
w.Layout()
w.Refresh()
w = w.GetParent()
if self.touchscreen:
wr_touch = self.multi_canvases.GetRgbTouchWindow()
wt_touch = self.multi_canvases.GetThermalTouchWindow()
wm_touch = self.multi_canvases.GetMultispectralTouchWindow()
rgbTouchPanelSizer = wx.BoxSizer(wx.VERTICAL)
rgbTouchPanelSizer.Add(wr_touch, 0, wx.EXPAND, 0, self.multi_canvases.canvas_rgb_touch)
self.touchframe.rgb_video_touch_panel.SetSizer(rgbTouchPanelSizer)
rgbTouchPanelSizer.Layout()
thermalTouchPanelSizer = wx.BoxSizer(wx.VERTICAL)
thermalTouchPanelSizer.Add(wt_touch, 0, wx.EXPAND, 0, self.multi_canvases.canvas_thermal_touch)
self.touchframe.thermal_video_touch_panel.SetSizer(thermalTouchPanelSizer)
thermalTouchPanelSizer.Layout()
multispectralTouchPanelSizer = wx.BoxSizer(wx.VERTICAL)
multispectralTouchPanelSizer.Add(wm_touch, 0, wx.EXPAND, 0, self.multi_canvases.canvas_multispectral_touch)
self.touchframe.multi_video_touch_panel.SetSizer(multispectralTouchPanelSizer)
multispectralTouchPanelSizer.Layout()
self.Layout()
windows = [wr_touch, wt_touch, wm_touch]
for window in windows:
w = window
while w:
w.Layout()
w.Refresh()
w = w.GetParent()
wx.Sleep(3)
# Start up thermal camera client
thermal_camera_client_path = self.system_config['THERMAL_CAMERA_CLIENT_PATH']
(junk,executable) = os.path.split(thermal_camera_client_path)
# kill old process
try:
os.system('taskkill /f /im '+executable)
except Exception:
pass
si = subprocess.STARTUPINFO()
si.dwFlags = subprocess.STARTF_USESTDHANDLES | subprocess.STARTF_USESHOWWINDOW
si.wShowWindow = 15
print "Starting thermal camera client"
self.thermal_client_process = subprocess.Popen(str(thermal_camera_client_path),startupinfo=si)
print "PID = %d" % (self.thermal_client_process.pid)
def OnPreviewClick(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler for the preview button
"""
if self.previewPaused:
self.multi_canvases.ctrl_.execute_continously()
self.previewPaused = False
elif self.previewOn:
self.multi_canvases.TimeToQuit(event)
self.previewOn = False
else:
self.InitCanvases()
self.previewOn = True
if self.patient_id > 0 and self.woundId > 0 and self.assessmentId > 0:
self.start_stop_button.Enable()
def OnRecordClick(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler for the record/stop/erase button
"""
btn = event.GetEventObject()
self.start_stop_button.Disable()
if self.touchscreen:
self.touchframe.touch_record_button.Disable()
if self.activeVisualBtn < 0:
rgb_folder = self.director.GetRgbFileDirectory()
depth_folder = self.director.GetDepthFileDirectory()
thermal_folder = self.director.GetThermalFileDirectory()
multispectral_folder = self.director.GetMultiSpectralFileDirectory()
try:
os.makedirs(rgb_folder)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(rgb_folder):
pass
else:
raise
try:
os.makedirs(depth_folder)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(depth_folder):
pass
else:
raise
try:
os.makedirs(thermal_folder)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(thermal_folder):
pass
else:
raise
try:
os.makedirs(multispectral_folder)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(multispectral_folder):
pass
else:
raise
if btn.GetLabelText() == "Record":
self.multi_canvases.SetupRecording(rgb_folder, depth_folder, thermal_folder, multispectral_folder)
self.VisualControl("reset")
msg = "Position unit over " + self.visualBtnLabels[self.activeVisualBtn] + " portion of wound and click Record"
self.visual_msg.SetValue(msg)
self.Update()
if self.touchscreen:
self.touchframe.SetMsg(msg)
else:
self.multi_canvases.SetupPlayback(rgb_folder, depth_folder, thermal_folder, multispectral_folder)
self.VisualControl("playing")
self.visual_msg.SetValue("Click one of the 5 view buttons to play that clip")
elif btn.GetLabelText() == "Record":
msg = "Recording for " + self.system_config['VISUAL_RECORD_TIME'] + " seconds"
self.visual_msg.SetValue(msg)
self.Update()
if self.touchscreen:
self.touchframe.SetMsg(msg)
self.multi_canvases.StartRecording(self.activeVisualBtn)
wx.Sleep(int(self.system_config['VISUAL_RECORD_TIME']))
self.multi_canvases.StopRecording()
wx.Bell()
wx.Bell()
wx.Bell()
activeVisualBtnId = self.activeVisualBtn + 800
self.FindWindowById(activeVisualBtnId).SetLabel(self.visualBtnLabels[self.activeVisualBtn] + "\n(Taken)")
if self.activeVisualBtn == 5:
self.assessmentSession.collection_status_visual = "Completed"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.visual_msg.SetValue("Assessment visual data collected")
if self.touchscreen:
self.touchframe.SetMsg("Assessment visual data collected")
self.start_stop_button.SetLabel("Play")
self.start_stop_button.Enable()
self.cv_reset_button.Enable()
self.VisualControl("playing")
self.visual_msg.SetValue("Click one of the 5 view buttons to play that clip")
# Set state of multi-spectral pane
if self.assessmentSession.collection_status_multi_spectral and self.assessmentSession.collection_status_multi_spectral != "Not collected":
self.ms_msg.SetValue(self.assessmentSession.collection_status_multi_spectral)
self.ms_snapshot_button.Disable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Disable()
self.ms_reset_button.Enable()
else:
self.ms_msg.SetValue("Not collected")
self.ms_snapshot_button.Enable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Enable()
# End of set state of multi-spectral pane
else:
self.visual_msg.SetValue(msg)
if self.touchscreen:
self.touchframe.SetMsg(msg)
self.touchframe.touch_record_button.Disable()
self.start_stop_button.Disable()
self.cv_reset_button.Enable()
return
self.VisualControl("next")
msg = "Position unit over " + self.visualBtnLabels[self.activeVisualBtn] + " portion of wound and click Record"
self.visual_msg.SetValue(msg)
self.Update()
if self.touchscreen:
self.touchframe.SetMsg(msg)
else:
print "Play button clicked in play mode"
self.start_stop_button.Enable()
if self.touchscreen:
self.touchframe.touch_record_button.Enable()
def OnPlayClick(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler for the play button
"""
if self.play_button.GetLabelText() == "Play":
self.playbackOn = True
self.play_button.SetLabel("Stop Play")
rgb_folder = self.director.GetRgbFileDirectory()
depth_folder = self.director.GetDepthFileDirectory()
thermal_folder = self.director.GetThermalFileDirectory()
multispectral_folder = self.director.GetMultiSpectralFileDirectory()
self.multi_canvases.SetupPlayback(rgb_folder, depth_folder, thermal_folder, multispectral_folder)
else:
self.playbackOn = False
self.multi_canvases.StopPlayback()
self.play_button.SetLabel("Play")
def DoVisualNext(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler for the next button
"""
if self.previewPaused:
self.multi_canvases.ctrl_.execute_continously()
self.previewPaused = False
self.VisualControl('next')
def DoVisualBack(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler for the back button
"""
if self.previewPaused:
self.multi_canvases.ctrl_.execute_continously()
self.previewPaused = False
self.VisualControl('back')
def DoVisualReset(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to reset visual sampling
"""
rc = util.showMessageDialog("Do you want to erase this assessment?", "Erase assessment?", flag=wx.YES_NO|wx.ICON_QUESTION)
if rc == wx.ID_NO:
return
self.assessmentSession.collection_status_visual = "Reset"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.director.CleanRgbFileDirectory()
self.director.CleanDepthFileDirectory()
self.director.CleanThermalFileDirectory()
self.visual_msg.SetValue("Not Collected")
if self.touchscreen:
self.touchframe.SetMsg("Not Collected")
self.touchframe.touch_record_button.Enable()
self.start_stop_button.Enable()
self.start_stop_button.SetLabel("Record")
self.cv_reset_button.Disable()
self.VisualControl('reset')
else:
self.visual_msg.SetValue("Error resetting visual sample: "+msg)
if self.touchscreen:
self.touchframe.SetMsg("Error resetting visual sample: "+msg)
self.touchframe.touch_record_button.Disable()
self.start_stop_button.Disable()
self.cv_reset_button.Enable()
def OnMSSnapshotClick(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to take a multi-spectral sample
"""
self.multi_canvases.ctrl_.execute_one_step()
self.previewPaused = True
multispectral_folder = self.director.GetMultiSpectralFileDirectory()
try:
os.makedirs(multispectral_folder)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(multispectral_folder):
pass
else: raise
self.multi_canvases.proc_.set_hyperspectral_file_directory(str(multispectral_folder))
self.multi_canvases.proc_.take_hyperspectral_snapshot()
self.multi_canvases.ctrl_.execute_continously()
self.assessmentSession.collection_status_multi_spectral = "Completed"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.ms_msg.SetValue("Assessment multi-spectral data collected")
self.ms_snapshot_button.Disable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Disable()
self.ms_reset_button.Enable()
else:
self.ms_msg.SetValue(msg)
self.ms_snapshot_button.Enable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Enable()
self.ms_reset_button.Disable()
def OnMSResetClick(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler to reset multi-spectral sampling
"""
rc = util.showMessageDialog("Do you want to erase this assessment?", "Erase assessment?", flag=wx.YES_NO|wx.ICON_QUESTION)
if rc == wx.ID_NO:
return
self.assessmentSession.collection_status_multi_spectral = "Reset"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.director.CleanMultiSpectralFileDirectory()
self.ms_msg.SetValue("Not Collected")
self.ms_snapshot_button.Enable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Enable()
self.ms_reset_button.Disable()
else:
self.ms_msg.SetValue("Error resetting sample: "+msg)
self.ms_snapshot_button.Disable()
if self.touchscreen:
self.touchframe.touch_snapshot_button.Disable()
self.ms_reset_button.Enable()
def OnMSPreviewClick(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler for the preview button
"""
if self.previewPaused:
self.multi_canvases.ctrl_.execute_continously()
self.previewPaused = False
elif self.previewOn:
self.multi_canvases.TimeToQuit(event)
self.previewOn = False
else:
self.InitCanvases()
self.previewOn = True
if self.patient_id > 0 and self.woundId > 0 and self.assessmentId > 0:
self.start_stop_button.Enable()
def OnTakeSnapshot(self, event): # wxGlade: MainFrame.<event_handler>
"""
Button handler for a snapshot button
"""
btnId = event.GetId() - 800
self.multi_canvases.ctrl_.execute_one_step()
self.previewPaused = True
self.multi_canvases.proc_.take_snapshot(btnId)
if btnId == 8:
self.assessmentSession.collection_status_visual = "Completed"
(self.assessmentSession, rc, msg) = self.sessionController.save(self.assessmentSession)
if rc == 0:
self.visual_msg.SetValue("Assessment visual data collected")
self.start_stop_button.Disable()
self.cv_next_button.Disable()
self.cv_back_button.Disable()
self.cv_reset_button.Enable()
else:
self.visual_msg.SetValue(msg)
self.start_stop_button.Enable()
self.cv_reset_button.Disable()
return
event.GetEventObject().Disable()
def VisualControl(self, directionParm):
"""
Function to manage all the snapshot buttons
"""
# visualBtnOrder = [4, 1, 7, 3, 5]
self.visualBtnLabels = ["", "Upper", "", "Left", "Center", "Right", "", "Lower", ""]
visualBtnNextO = [2, 7, 6, 5, 1, -1, 8, 3, 8]
visualBtnBackO = [5, 4, 0, 7, -1, 3, 2, 1, 6]
direction = directionParm
if not hasattr(self,'activeVisualBtn'):
direction = 'reset'
if direction == 'reset':
for btnId in (801,803,804,805,807):
self.FindWindowById(btnId).SetLabel(self.visualBtnLabels[btnId-800] + "\n(Not Taken)")
self.FindWindowById(btnId).Disable()
self.FindWindowById(804).Enable()
self.activeVisualBtn = 4
elif direction == 'next':
oldBtnId = self.activeVisualBtn + 800
self.FindWindowById(oldBtnId).Disable()
self.activeVisualBtn = visualBtnNextO[self.activeVisualBtn]
if self.activeVisualBtn >= 0:
newBtnId = self.activeVisualBtn + 800
self.FindWindowById(newBtnId).Enable()
elif direction == 'back':
oldBtnId = self.activeVisualBtn + 800
self.FindWindowById(oldBtnId).Disable()
self.activeVisualBtn = visualBtnBackO[self.activeVisualBtn]
if self.activeVisualBtn >= 0:
newBtnId = self.activeVisualBtn + 800
self.FindWindowById(newBtnId).Enable()
elif direction == 'play':
for btnId in (801,803,804,805,807):
self.FindWindowById(btnId).SetLabel(self.visualBtnLabels[btnId-800] + "\n(Taken)")
self.FindWindowById(btnId).Disable()
self.activeVisualBtn = -1
elif direction == 'playing':
for btnId in (801,803,804,805,807):
self.FindWindowById(btnId).SetLabel(self.visualBtnLabels[btnId-800] + "\n(Taken)")
self.FindWindowById(btnId).Enable()
self.activeVisualBtn = 4
else:
for btnId in (801,803,804,805,807):
self.FindWindowById(btnId).Disable()
self.activeVisualBtn = -1
def OnSelectSnapshot(self, event): # wxGlade: MainFrame.<event_handler>
"""
Event handler view id button to control playback
"""
if self.start_stop_button.GetLabelText() == "Record":
return
btnId = event.GetId() - 800
print "OnSelectSnapshot button clicked, with btnId = "
print btnId
self.multi_canvases.StartPlayback(btnId)
# end of class MainFrame
class Assessment(wx.App, wx.lib.mixins.inspection.InspectionMixin):
"""
This class implements the Prevention System GUI.
It processes a configuration file, creates the main frame, and forces users
to log into the application
Methods:
OnInit() - initializes the GUI
ReadConfiguration() - processes the main configuration file
"""
def OnInit(self):
"""
Initializes the GUI and displays a modal login panel.
"""
global db
self.Init(cmd=False) # initialize the inspection tool
self.ReadConfiguration()
wx.InitAllImageHandlers()
self.mainframe = MainFrame(None, -1, "Assessment", (0,0))
self.SetTopWindow(self.mainframe)
self.mainframe.Show(True)
self.mainframe.Maximize(True)
self.mainframe.panel_1.Disable()
# Check if system is not on battery power
if not util.GetPowerOnline():
rc = util.showMessageDialog("Computer is running on battery power and not AC power. Please insure unit is plugged into wall outlet and the power strip is turned on.", "Power not connected")
return 1
logon = LogonDialog(None)
logon.SetDb(db)
rc = logon.ShowModal()
# print "rc=%d" % rc
if rc == 1:
# get system configuration
systemConfigurationController = SystemConfigurationController(db, SystemConfiguration, None, None)
self.mainframe.system_config = {}
config = systemConfigurationController.getAll()
for conf in config:
self.mainframe.system_config[conf.parameter_name] = conf.parameter_value
pprint(self.mainframe.system_config)
self.mainframe.InitControllers()
self.mainframe.InitMisc()
self.mainframe.panel_1.Enable()
self.mainframe.Show()
self.mainframe.SetStatusText("Logon successful")
logon.Destroy()
return 1
logon.Destroy()
self.mainframe.Destroy()
wx.GetApp().ProcessIdle()
return 0
def ReadConfiguration(self):
"""
Reads main configuration file
"""
global db
config = ConfigParser.ConfigParser()
config.read('resources/prevention.cfg')
self.dbHost = config.get('MySql-localhost','dbHost')
print "dbHost = %s" % self.dbHost
self.dbSchema = config.get('MySql-localhost','dbSchema')
print "dbSchema = %s" % self.dbSchema
db = Database(self.dbHost,self.dbSchema)
# end of class assessment
if __name__ == "__main__":
gettext.install("assessment") # replace with the appropriate catalog name
print "creating app"
print os.path.abspath(".")
assessment = Assessment(0)
assessment.MainLoop()
print "exited MainLoop"
|
{
"content_hash": "f3d09edd85bbee5dc0572988468e9baf",
"timestamp": "",
"source": "github",
"line_count": 2775,
"max_line_length": 222,
"avg_line_length": 60.84576576576577,
"alnum_prop": 0.6383352976363217,
"repo_name": "VHAINNOVATIONS/GE-Pressure-Ulcer",
"id": "69a766bef34542e2c6ca9e52da89dc4de57f71d5",
"size": "168976",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python_gui_decision_support_webportal/python/assessment.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "10761"
},
{
"name": "C++",
"bytes": "1363462"
},
{
"name": "CSS",
"bytes": "172280"
},
{
"name": "JavaScript",
"bytes": "790100"
},
{
"name": "Python",
"bytes": "563688"
},
{
"name": "Shell",
"bytes": "3599"
},
{
"name": "XSLT",
"bytes": "68094"
}
],
"symlink_target": ""
}
|
import argparse
import logging as std_logging
import os
import pprint
import sys
from st2common import log as logging
from st2reactor.rules.tester import RuleTester
__all__ = [
'main'
]
def _parse_args():
parser = argparse.ArgumentParser(description='Test the provided rule')
parser.add_argument('--rule', help='Path to the file containing rule definition',
required=True)
parser.add_argument('--trigger-instance',
help='Path to the file containing trigger instance definition',
required=True)
parser.add_argument('-v', '--verbose', help='increase output verbosity',
action='store_true')
return parser.parse_args()
def _setup_logging():
logging_config = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'default': {
'format': '%(asctime)s %(levelname)s %(name)s %(message)s'
},
},
'handlers': {
'console': {
'()': std_logging.StreamHandler,
'formatter': 'default'
}
},
'root': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
std_logging.config.dictConfig(logging_config)
def main():
args = _parse_args()
if args.verbose:
_setup_logging()
output = logging.getLogger(__name__).info
else:
output = pprint.pprint
rule_file_path = os.path.realpath(args.rule)
trigger_instance_file_path = os.path.realpath(args.trigger_instance)
tester = RuleTester(rule_file_path=rule_file_path,
trigger_instance_file_path=trigger_instance_file_path)
matches = tester.evaluate()
if matches:
output('=== RULE MATCHES ===')
sys.exit(0)
else:
output('=== RULE DOES NOT MATCH ===')
sys.exit(1)
|
{
"content_hash": "9d42a8aae6afa8a0da871f55d2c361f0",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 87,
"avg_line_length": 27.585714285714285,
"alnum_prop": 0.5572242361470741,
"repo_name": "Itxaka/st2",
"id": "b91ddf60a1dde4d39308157762f8fbb999a4fe07",
"size": "2711",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "st2reactor/st2reactor/cmd/rule_tester.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "35769"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "2673739"
},
{
"name": "Shell",
"bytes": "16008"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
}
|
import logging
import os
import sys
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
sys.path.append(ROOT_PATH)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
SITE_ID = 1
SITE_BRANDING = 'OpenStack'
SITE_NAME = 'openstack'
ENABLE_VNC = True
LOGIN_URL = '/auth/login'
LOGIN_REDIRECT_URL = '/'
MEDIA_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'media'))
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'static'))
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
CREDENTIAL_AUTHORIZATION_DAYS = '5'
ROOT_URLCONF = 'dashboard.urls'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django_openstack.middleware.keystone.AuthenticationMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'dashboard.middleware.DashboardLogUnhandledExceptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
'django_openstack.context_processors.object_store',
'django_openstack.context_processors.tenants',
'django_openstack.context_processors.quantum',
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
TEMPLATE_DIRS = (
os.path.join(ROOT_PATH, 'templates'),
)
STATICFILES_DIRS = (
os.path.join(ROOT_PATH, 'static'),
)
INSTALLED_APPS = (
'dashboard',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_openstack',
'django_openstack.templatetags',
'mailer',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',)
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
TIME_ZONE = None
gettext_noop = lambda s: s
LANGUAGES = (
('en', gettext_noop('English')),
('en-gb', gettext_noop('British English')),
('it', gettext_noop('Italiano')),
('es', gettext_noop('Spanish')),
('fr', gettext_noop('French')),
('ja', gettext_noop('Japanese')),
('pt', gettext_noop('Portuguese')),
('zh-cn', gettext_noop('Simplified Chinese')),
('zh-tw', gettext_noop('Traditional Chinese')),
)
LANGUAGE_CODE = 'en'
USE_I18N = True
ACCOUNT_ACTIVATION_DAYS = 7
TOTAL_CLOUD_RAM_GB = 10
try:
from local.local_settings import *
except Exception, e:
logging.exception(e)
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
try:
import debug_toolbar
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',)
except ImportError:
logging.info('Running in debug mode without debug_toolbar.')
OPENSTACK_KEYSTONE_DEFAULT_ROLE = 'Member'
|
{
"content_hash": "2a7883513aead3f9b16d607be7e39f70",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 72,
"avg_line_length": 27.705882352941178,
"alnum_prop": 0.7033666969972703,
"repo_name": "rnirmal/openstack-dashboard",
"id": "ef8919a34fd45c956ab24e5bd6e1f25e37dabd43",
"size": "4106",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstack-dashboard/dashboard/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
"""Flow Resource functions."""
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Dict, List
from google.cloud.dialogflowcx_v3beta1 import services
from google.cloud.dialogflowcx_v3beta1 import types
from google.protobuf import field_mask_pb2
from dfcx_scrapi.core import scrapi_base
# logging config
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s %(levelname)-8s %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)
class Flows(scrapi_base.ScrapiBase):
"""Core Class for CX Flow Resource functions."""
def __init__(
self,
creds_path: str = None,
creds_dict: Dict = None,
creds=None,
scope=False,
flow_id: str = None,
agent_id: str = None,
):
super().__init__(
creds_path=creds_path,
creds_dict=creds_dict,
creds=creds,
scope=scope,
)
if flow_id:
self.flow_id = flow_id
self.agent_id = agent_id
# TODO: Migrate to Flow Builder class when ready
@staticmethod
def _build_nlu_settings(
model_type: str = "STANDARD",
classification_threshold: float = 0.3,
model_training_mode: str = "MANUAL",
):
"""Builds the NLU Settings object to be used with Flow objects.
Args:
model_type: ONEOF `STANDARD`, `ADVANCED`, `CUSTOM`. Defaults to
`STANDARD`.
classification_threshold: To filter out false positive results and
still get variety in matched natural language inputs for your
agent, you can tune the machine learning classification threshold.
If the returned score value is less than the threshold value, then
a no-match event will be triggered. The score values range from 0.0
(completely uncertain) to 1.0 (completely certain). If set to 0.0,
the default of 0.3 is used.
model_training_mode: ONEOF `AUTOMATIC`, `MANUAL`. Defaults to
`MANUAL`
"""
model_type_map = {"STANDARD": 1, "CUSTOM": 2, "ADVANCED": 3}
model_training_map = {"AUTOMATIC": 1, "MANUAL": 2}
nlu_settings = types.NluSettings()
nlu_settings.classification_threshold = classification_threshold
if model_type in model_type_map:
nlu_settings.model_type = model_type_map[model_type]
else:
raise KeyError(
f"`{model_type}` is invalid. `model_type` must be "
"one of `STANDARD`, `ADVANCED`, `CUSTOM`."
)
if model_training_mode in model_training_map:
nlu_settings.model_training_mode = model_training_map[
model_training_mode
]
else:
raise KeyError(
f"`{model_training_mode}` is invalid. "
"`model_training_mode` must be one of `AUTOMATIC`, `MANUAL`."
)
return nlu_settings
def get_flows_map(self, agent_id: str, reverse=False):
"""Exports Agent Flow Names and UUIDs into a user friendly dict.
Args:
agent_id: the formatted CX Agent ID to use
reverse: (Optional) Boolean flag to swap key:value -> value:key
Returns:
Dictionary containing flow UUIDs as keys and display names as values
"""
if reverse:
flows_dict = {
flow.display_name: flow.name
for flow in self.list_flows(agent_id=agent_id)
}
else:
flows_dict = {
flow.name: flow.display_name
for flow in self.list_flows(agent_id=agent_id)
}
return flows_dict
def train_flow(self, flow_id: str) -> str:
"""Trains the specified flow.
Args:
flow_id: CX flow ID string in the following format
projects/<PROJECT ID>/locations/<LOCATION ID>/agents/<AGENT ID>/
flows/<FLOW ID>
Returns:
A Long Running Operation (LRO) ID that can be used to
check the status of the export using
dfcx_scrapi.core.operations->get_lro()
"""
request = types.flow.TrainFlowRequest()
request.name = flow_id
client_options = self._set_region(flow_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.train_flow(request)
return response
def list_flows(self, agent_id: str) -> List[types.Flow]:
"""Get a List of all Flows in the current Agent.
Args:
agent_id: CX Agent ID string in the proper format
projects/<PROJECT ID>/locations/<LOCATION ID>/agents/<AGENT ID>
Returns:
List of Flow objects
"""
request = types.flow.ListFlowsRequest()
request.parent = agent_id
client_options = self._set_region(agent_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.list_flows(request)
flows = []
for page in response.pages:
for flow in page.flows:
flows.append(flow)
return flows
def get_flow_by_display_name(
self, display_name: str, agent_id: str
) -> types.Flow:
"""Get a single CX Flow object based on its display name.
Args:
display_name: The display name of the desired Flow.
agent_id: CX Agent ID in which the flow exists.
Returns:
A single CX Flow object
"""
flows_map = self.get_flows_map(agent_id=agent_id, reverse=True)
if display_name in flows_map:
flow_id = flows_map[display_name]
else:
raise ValueError(
f'Flow "{display_name}" '
f"does not exist in the specified agent."
)
flow = self.get_flow(flow_id=flow_id)
return flow
def get_flow(self, flow_id: str) -> types.Flow:
"""Get a single CX Flow object.
Args:
flow_id: CX Flow ID in the proper format
Returns:
A single CX Flow object
"""
client_options = self._set_region(flow_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.get_flow(name=flow_id)
return response
def create_flow(
self,
agent_id: str,
display_name: str = None,
language_code: str = "en",
obj: types.Flow = None,
**kwargs,
):
"""Create a Dialogflow CX Flow with given display name.
If the user provides an existing Flow object, a new CX Flow will be
created based on this object and any other input/kwargs will be
discarded.
Args:
agent_id: DFCX Agent id where the Flow will be created
display_name: Human readable display name for the CX Flow
obj: (Optional) Flow object to create in proto format
Returns:
The newly created CX Flow resource object.
"""
request = types.flow.CreateFlowRequest()
request.parent = agent_id
request.language_code = language_code
if obj:
flow_obj = obj
request.flow = flow_obj
else:
flow_obj = types.Flow()
flow_obj.display_name = display_name
# set optional args as agent attributes
for key, value in kwargs.items():
setattr(flow_obj, key, value)
request.flow = flow_obj
client_options = self._set_region(agent_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.create_flow(request)
return response
def update_flow(
self, flow_id: str, obj: types.Flow = None, **kwargs
) -> types.Flow:
"""Update a single specific CX Flow object.
Args:
flow_id: CX Flow ID in the proper format
obj: (Optional) a single CX Flow object of types.Flow
Returns:
A copy of the updated Flow object
"""
if obj:
flow = obj
flow.name = flow_id
else:
flow = self.get_flow(flow_id)
# set flow attributes to args
for key, value in kwargs.items():
setattr(flow, key, value)
paths = kwargs.keys()
mask = field_mask_pb2.FieldMask(paths=paths)
client_options = self._set_region(flow_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.update_flow(flow=flow, update_mask=mask)
return response
def update_nlu_settings(self, flow_id: str, **kwargs):
"""Updates flow to new NLU setting.
Args:
flow_id: flow id to update nlu settings for.
model_type: (Optional) [0:unspecified, 1:MODEL_TYPE_STANDARD,
2:Custom, 3:Advanced]
classification_threshold: (Optional) threshold for the flow
model_training_mode: (Optional) [0:unspecified, 1:automatic,
2:'manual]
"""
flow = self.get_flow(flow_id)
current_settings = flow.nlu_settings
for key, value in kwargs.items():
setattr(current_settings, key, value)
self.update_flow(flow_id=flow_id, nlu_settings=current_settings)
def export_flow(
self, flow_id: str, gcs_path: str, ref_flows: bool = True
) -> Dict[str, str]:
"""Exports DFCX Flow(s) into GCS bucket.
Args:
flow_id: the formatted CX Flow ID to export
gcs_path: The `Google Cloud Storage URI to export the flow to. The
format of this URI must be ``gs://<bucket-name>/<object-name>``. If
left unspecified, the serialized flow is returned inline.
ref_flows: Whether to export flows referenced by the specified flow.
Returns:
A Long Running Operation result. If successful the LRO result will
return the Google Cloud Storage URI from the Export Flow request.
Otherwise, it will return the corresponding error.
"""
request = types.flow.ExportFlowRequest()
request.name = flow_id
request.include_referenced_flows = ref_flows
request.flow_uri = gcs_path
client_options = self._set_region(flow_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.export_flow(request)
return response.result()
def export_flow_inline(self, flow_id: str, ref_flows: bool = True) -> bytes:
"""Export a Flow, returning uncompressed raw byte content for flow.
Args:
flow_id: the formatted CX Flow ID to export
ref_flows: Whether to export flows referenced by the specified flow.
Returns:
Bytes representing the content of the flow.
"""
request = types.flow.ExportFlowRequest()
request.name = flow_id
request.include_referenced_flows = ref_flows
client_options = self._set_region(flow_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.export_flow(request)
return (response.result()).flow_content
def import_flow(
self,
agent_id: str,
gcs_path: str = None,
flow_content: bytes = None,
import_option: str = "KEEP",
) -> Dict[str, str]:
"""Imports a DFCX Flow to CX Agent. Flow can be imported from a
GCS bucket or from raw bytes.
Args:
agent_id: the CX Agent ID to import the flow into.
gcs_path: the `Google Cloud Storage URI to import flow from.
the format of this URI must be ``gs://<bucket-name>/<object-name>``.
flow_content: uncompressed raw byte content for flow.
import_option: one of 'FALLBACK' or 'KEEP'. Defaults to 'KEEP'
Returns:
A Long Running Operation result. If successful the LRO result will
return the Flow ID of the newly imported Flow.
Otherwise, it will return the corresponding error.
"""
if gcs_path and flow_content:
raise ValueError(
"gcs_path or flow_content required (But not both!)."
)
if not gcs_path and not flow_content:
raise ValueError(
"gcs_path or flow_content required (But not both!)."
)
request = types.flow.ImportFlowRequest()
request.parent = agent_id
request.flow_uri = gcs_path
request.flow_content = flow_content
request.import_option = import_option
client_options = self._set_region(agent_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
response = client.import_flow(request)
return response
def delete_flow(self, flow_id: str, force: bool = False):
"""Deletes a single CX Flow Object resource.
Args:
flow_id: flow to delete
force: False means a flow will not be deleted if a route to the flow
exists, True means the flow will be deleted as well as all the
transition routes leading to the flow.
"""
request = types.DeleteFlowRequest()
request.name = flow_id
request.force = force
client_options = self._set_region(flow_id)
client = services.flows.FlowsClient(
credentials=self.creds, client_options=client_options
)
client.delete_flow(request)
|
{
"content_hash": "8d886f955efdf160a397eee52f1bbf0a",
"timestamp": "",
"source": "github",
"line_count": 447,
"max_line_length": 80,
"avg_line_length": 32.56375838926174,
"alnum_prop": 0.5928826600714482,
"repo_name": "GoogleCloudPlatform/dfcx-scrapi",
"id": "206fd60c56ce7bfc3a71fb36da14dcec7861d63c",
"size": "14556",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/dfcx_scrapi/core/flows.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "735"
},
{
"name": "Python",
"bytes": "499643"
},
{
"name": "Shell",
"bytes": "1030"
}
],
"symlink_target": ""
}
|
__author__ = "Mehdi Korjani"
__version__ = "1.0.0"
import pdb
import glob
import os
import numpy as np
from keras.models import load_model
from keras.models import model_from_json
from keras.callbacks import EarlyStopping
from keras.callbacks import ModelCheckpoint
import h5py
import argparse
from pydub import AudioSegment
import sys
sys.path.append(os.path.abspath('utils'))
import preprocessing as frame
import wave_manipulation as manipulate
import generate_model
MODEL_FILE = 'model/model_weights_11_21_16.h5'
SPEECH_FILE = '/home/mehdi/data/corpus/VCTK-Corpus/wav48/*/*.wav'
NOISE_FILE = '/home/mehdi/data/corpus/office_noise/*.wav'
RESULT_DIR = 'model/model_12_8_16.h5'
NB_EPOCH = 100000
BATCH_SIZE = 200
FRAME_STEP = 50
FRAME_LEN = 6500
MIN_SNR = 10
MAX_SNR = 12
X_train = np.array([])
Y_train = np.array([])
def get_arguments():
parser = argparse.ArgumentParser(description='train Deep Learning model')
parser.add_argument('--model_file', type=str, default=MODEL_FILE,
help='The directory containing the file model .')
parser.add_argument('--speech_file', type=str, default=SPEECH_FILE,
help='Speech files.')
parser.add_argument('--noise_file', type=str, default=NOISE_FILE,
help='Noise files.')
parser.add_argument('--nb_epoch', type=int, default=NB_EPOCH,
help='number of epoch')
parser.add_argument('--batch_size', type=int, default=BATCH_SIZE,
help='batch size')
parser.add_argument('--min_snr', type=int, default=MIN_SNR,
help='minimum of SNR signal to noise')
parser.add_argument('--max_snr', type=int, default=MAX_SNR,
help='maximum of SNR signal to noise')
parser.add_argument('--frame_step', type=int, default=FRAME_STEP,
help='frame step')
parser.add_argument('--frame_len', type=int, default=FRAME_LEN,
help='frame length')
parser.add_argument('--result_dir', type=str, default=RESULT_DIR,
help='directory to write result.')
return parser.parse_args()
def validate_arg(arg):
try:
os.path.isfile(arg)
print('file exists: %s' %arg)
except:
print('file not found: %s' %arg)
return
try:
os.access(arg, os.R_OK)
print('file is readable: %s' %arg)
except:
print('file is not readable: %s' %arg)
return
def data_genertor():
args = get_arguments()
frame_len =args.frame_len
frame_step = args.frame_step
while True:
for fullpath in glob.iglob(args.speech_file):
fs_signal, signal_sound_data = manipulate.wavread(fullpath)
signal_sound = AudioSegment.from_file(fullpath)
for fullpath_noise in glob.iglob(args.noise_file):
fs_noise, noise_sound_data = manipulate.wavread(fullpath_noise)
noise_sound = AudioSegment.from_file(fullpath_noise)
SNR = np.random.randint(args.min_snr,args.max_snr)
dB = signal_sound.dBFS - noise_sound.dBFS - SNR
noise_sound += dB # adjust dB for noise relative to sound
noise_sound_data = noise_sound.get_array_of_samples()
rand_start = np.random.randint(len(noise_sound_data)- len(signal_sound_data))
# check the lenght of signal and noise , assume len(noise) > len(signal)
combined = signal_sound_data + noise_sound_data[rand_start: rand_start+ len(signal_sound_data)]
noisy_data = combined.astype(np.int16)
# nosrmalized data [0,1]
noisy_data_norm = manipulate.normalize(noisy_data)
signal_sound_data_norm = manipulate.normalize(signal_sound_data)
framed_noisy = frame.framesig(noisy_data_norm,frame_len,frame_step)
framed_clean = frame.framesig(signal_sound_data_norm,frame_len,frame_step)
#in_out =np.hstack((framed_noisy, framed_clean))
#np.random.shuffle(in_out)
#X_train = in_out[:,:frame_len]
#audio = in_out[:,frame_len + frame_len/2]
X_train = framed_noisy
audio = framed_clean[:,frame_len/2]
ulaw_audio = frame.ulaw(audio)
digit_audio = frame.float_to_uint8(ulaw_audio)
Y_train = frame.one_hot(digit_audio)
yield X_train , Y_train # yield
def main():
args = get_arguments()
## read model
try:
model = generate_model.generate()
model.summary()
except:
print('cant read the model!' )
return
## load weights
try:
model.load_weights(args.model_weight)
except:
print('no weight!' )
## validate wave file
try:
validate_arg(args.speech_file)
except ValueError as e:
print("wave file is not available:")
print(str(e))
return
data = data_genertor()
## training
checkpoint = ModelCheckpoint(args.result_dir, monitor='val_acc', verbose=0, save_weights_only=False, save_best_only=False , mode='auto')
callbacks_list = [checkpoint]
nb_files =40000
print('Training model...')
model.fit_generator(data,
nb_files,
nb_epoch=args.nb_epoch,
callbacks=callbacks_list,
verbose = 1)
if __name__ == '__main__':
main()
|
{
"content_hash": "b86ffa8282cf1a2deb60e7f0f814cfe1",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 141,
"avg_line_length": 34.75739644970414,
"alnum_prop": 0.5614572693224379,
"repo_name": "korjani/time_domain_speech_enhancement",
"id": "ba2d582a96fba34d2b0cbed6cff9487f407eea11",
"size": "5874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "train.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17800"
}
],
"symlink_target": ""
}
|
plot_docstring = """
Plot the target map.
Parameters:
path: `string`
The path where to put the plot.
whiteOnBlack: `boolean [default True]`
By default, whiteOnBlack=True, the detected signal
is white on a black background. You can invert this with
whiteOnBlack=False.
suffix: `string [default None]`
Suffix to add to the file name.
"""
display_docstring = """
Display the target map to a IPython Notebook.
Parameters:
whiteOnBlack: `boolean [default True]`
By default, whiteOnBlack=True, the detected signal
is white on a black background. You can invert this with
whiteOnBlack=False.
suffix: `string [default None]`
Suffix to add to the title.
"""
|
{
"content_hash": "caf2d289def0fa9cdb3716c143b26d7a",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 70,
"avg_line_length": 30.7,
"alnum_prop": 0.5374592833876222,
"repo_name": "ctherien/pysptools",
"id": "e613b7b036de9e246ff9dd4d658b8073159ded70",
"size": "1757",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pysptools/detection/docstring.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7264"
},
{
"name": "HTML",
"bytes": "8140529"
},
{
"name": "Jupyter Notebook",
"bytes": "4526823"
},
{
"name": "Makefile",
"bytes": "7765"
},
{
"name": "Python",
"bytes": "490805"
}
],
"symlink_target": ""
}
|
"""
@author: KBK
"""
import numpy as np
import time
from sklearn.model_selection import cross_val_score
import pandas as pd
class EvolutionaryFeatureSubsetting(object):
def __init__(self,populationsize,composition=[.2,.2,.1,.1,.4]):
self.populationsize = populationsize
self.population = {}
self.nbestfits = int(populationsize*composition[0])
self.nbestcross = int(populationsize*composition[1])
self.nbestmutants = int(populationsize*composition[2])
self.nbestcrossmutant = int(populationsize*composition[3])
self.nnewborns = int(populationsize*composition[4])
self.manid = 0
self.generation = 0
self.bestfitofgen = {}
def initiateLife(self,n=None):
g = self.generation
if self.generation==0:
n = self.populationsize
self.population[self.generation] = {}
for man in range(g*n,g*n+n):
gene = [np.random.binomial(1,0.5) for i in range(self.genelength)]
self.population[self.generation][self.manid] = [gene,0]
self.manid+=1
print "___________Generation", g ,"____________"
print n, "random livings are born"
def nextGeneration(self,population):
tmp_dict = self.population[self.generation]
self.ranking = sorted([[m,tmp_dict[m]] for m in tmp_dict.keys()],key=lambda x:x[1][1],reverse=True)[0:self.nbestfits]
self.bestfitofgen[self.generation] = [self.ranking[0][0],self.ranking[0][1][1]]
print "Best score was", self.ranking[0][1][1],"of id:",self.ranking[0][0]
self.bestfits = [m[0] for m in self.ranking]
self.generation = population.keys()[-1]+1
self.population[self.generation] = {}
self.initiateLife(n=self.nnewborns)
for bf in self.bestfits:
self.population[self.generation][bf] = self.population[self.generation-1][bf]
print "%i survived from previous generation" %self.nbestfits
self.bestcrosses = self.crossover(self.bestfits,returnids=True)
print "%i bestfit crossovers are born" %self.nbestcross
self.mutate(self.bestfits,0.05,bestfitmutants=True)
print "%i mutated bestfits are born" %self.nbestmutants
self.mutate(self.bestcrosses,0.05)
print "%i mutated bestfit crossovers are born" %self.nbestcrossmutant
def fitnessScore(self,X,y,estimator,search_estimator=False):
if search_estimator:
estimator.fit(X,y)
score = estimator.best_score_
else:
score = np.mean(cross_val_score(estimator,X, y, cv = 3, scoring = "r2"))
return score
def fit(self,X,y,estimator,ngenerations,search_estimator=False):
self.genelength = len(X.columns)
print "%i generations of evolution has started" %ngenerations
self.initiateLife()
n = ngenerations
for gen in range(n):
t = time.time()
g = self.generation
for man in self.population[g].keys():
chosen_columns = [i[0] for i in zip(X.columns,
self.population[g][man][0]) if i[1] == 1]
score = self.fitnessScore(X[chosen_columns],y,estimator,search_estimator)
self.population[g][man][1] = score
passed = time.time()-t
print "Generation %i took %.2f seconds to fit"%(g,passed)
self.nextGeneration(self.population)
print "Final population is created after %s generations" %n
def mutate(self,men,rate=0.05,bestfitmutants=False):
n = int(self.genelength*rate)
g = self.generation
if bestfitmutants:
nmen = self.nbestmutants
for man in np.random.choice(men,size=nmen,replace=False):
self.population[g][self.manid] = self.population[g-1][man]
mut = np.random.randint(0,self.genelength-1,n)
for i in mut:
self.population[g][self.manid][0][i] = abs(self.population[g][self.manid][0][i]-1)
self.manid += 1
else:
nmen = self.nbestcrossmutant
for man in np.random.choice(men,size=nmen,replace=False):
mut = np.random.randint(0,self.genelength-1,n)
self.population[g][self.manid] = self.population[g][man]
for i in mut:
self.population[g][self.manid][0][i] = abs(self.population[g][self.manid][0][i]-1)
self.manid +=1
def crossover(self,men,returnids=False):
men2 = list(np.roll(men,1))
half = self.genelength/2
g = self.generation
for man, man2 in zip(men,men2):
self.population[g][self.manid]=[self.population[g-1][man][0][0:half],0]
self.population[g][self.manid][0].extend(self.population[g-1][man2][0][half:half*2])
self.manid+=1
if returnids:
return range(self.manid-len(men),self.manid)
def main():
from sklearn.linear_model import LinearRegression
df = pd.read_csv("data.csv")
y = df["y"]
X = df[list(set(df.columns)-set(["y"]))]
m_lr = LinearRegression()
evo = EvolutionaryFeatureSubsetting(populationsize=100)
evo.fit(X,y,estimator=m_lr,ngenerations=100)
grandlogbook = evo.population
bestperformers = evo.bestfitofgen
if __name__ == "__main__":
main()
|
{
"content_hash": "fdce5629ee67a6cd868aca8c28e3bf97",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 125,
"avg_line_length": 44.093023255813954,
"alnum_prop": 0.5726090014064698,
"repo_name": "kaplanbr/Evolutionary-Search",
"id": "7446cb1e8892440c2cdf11dd80159fc8a70ea32d",
"size": "5713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "evo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5713"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import collections
from collections import OrderedDict
from django.utils.encoding import force_text
from rest_framework.compat import unicode_to_repr
class ReturnDict(OrderedDict):
"""
Return object from `serialier.data` for the `Serializer` class.
Includes a backlink to the serializer instance for renderers
to use if they need richer field information.
"""
def __init__(self, *args, **kwargs):
self.serializer = kwargs.pop('serializer')
super(ReturnDict, self).__init__(*args, **kwargs)
def copy(self):
return ReturnDict(self, serializer=self.serializer)
def __repr__(self):
return dict.__repr__(self)
def __reduce__(self):
# Pickling these objects will drop the .serializer backlink,
# but preserve the raw data.
return (dict, (dict(self),))
class ReturnList(list):
"""
Return object from `serialier.data` for the `SerializerList` class.
Includes a backlink to the serializer instance for renderers
to use if they need richer field information.
"""
def __init__(self, *args, **kwargs):
self.serializer = kwargs.pop('serializer')
super(ReturnList, self).__init__(*args, **kwargs)
def __repr__(self):
return list.__repr__(self)
def __reduce__(self):
# Pickling these objects will drop the .serializer backlink,
# but preserve the raw data.
return (list, (list(self),))
class BoundField(object):
"""
A field object that also includes `.value` and `.error` properties.
Returned when iterating over a serializer instance,
providing an API similar to Django forms and form fields.
"""
def __init__(self, field, value, errors, prefix=''):
self._field = field
self._prefix = prefix
self.value = value
self.errors = errors
self.name = prefix + self.field_name
def __getattr__(self, attr_name):
return getattr(self._field, attr_name)
@property
def _proxy_class(self):
return self._field.__class__
def __repr__(self):
return unicode_to_repr('<%s value=%s errors=%s>' % (
self.__class__.__name__, self.value, self.errors
))
def as_form_field(self):
value = '' if (self.value is None or self.value is False) else force_text(self.value)
return self.__class__(self._field, value, self.errors, self._prefix)
class NestedBoundField(BoundField):
"""
This `BoundField` additionally implements __iter__ and __getitem__
in order to support nested bound fields. This class is the type of
`BoundField` that is used for serializer fields.
"""
def __init__(self, field, value, errors, prefix=''):
if value is None:
value = {}
super(NestedBoundField, self).__init__(field, value, errors, prefix)
def __iter__(self):
for field in self.fields.values():
yield self[field.field_name]
def __getitem__(self, key):
field = self.fields[key]
value = self.value.get(key) if self.value else None
error = self.errors.get(key) if self.errors else None
if hasattr(field, 'fields'):
return NestedBoundField(field, value, error, prefix=self.name + '.')
return BoundField(field, value, error, prefix=self.name + '.')
def as_form_field(self):
values = {}
for key, value in self.value.items():
if isinstance(value, (list, dict)):
values[key] = value
else:
values[key] = '' if (value is None or value is False) else force_text(value)
return self.__class__(self._field, values, self.errors, self._prefix)
class BindingDict(collections.MutableMapping):
"""
This dict-like object is used to store fields on a serializer.
This ensures that whenever fields are added to the serializer we call
`field.bind()` so that the `field_name` and `parent` attributes
can be set correctly.
"""
def __init__(self, serializer):
self.serializer = serializer
self.fields = OrderedDict()
def __setitem__(self, key, field):
self.fields[key] = field
field.bind(field_name=key, parent=self.serializer)
def __getitem__(self, key):
return self.fields[key]
def __delitem__(self, key):
del self.fields[key]
def __iter__(self):
return iter(self.fields)
def __len__(self):
return len(self.fields)
def __repr__(self):
return dict.__repr__(self.fields)
|
{
"content_hash": "7add2653307184a749a6101511edb818",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 93,
"avg_line_length": 30.953020134228186,
"alnum_prop": 0.6175195143104943,
"repo_name": "hunter007/django-rest-framework",
"id": "b11d3fd0713e1d989703f3fc9cd73469f4902e70",
"size": "4612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest_framework/utils/serializer_helpers.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "10024"
},
{
"name": "HTML",
"bytes": "59707"
},
{
"name": "JavaScript",
"bytes": "7694"
},
{
"name": "Python",
"bytes": "889788"
}
],
"symlink_target": ""
}
|
"""This application demonstrates how to delete a reservation using
the Pub/Sub Lite API. For more information, see the root level README.md
and the documentation at https://cloud.google.com/pubsub/lite/docs/topics.
"""
import argparse
def delete_lite_reservation(project_number, cloud_region, reservation_id):
# [START pubsublite_delete_reservation]
from google.api_core.exceptions import NotFound
from google.cloud.pubsublite import AdminClient
from google.cloud.pubsublite.types import CloudRegion, ReservationPath
# TODO(developer):
# project_number = 1122334455
# cloud_region = "us-central1"
# reservation_id = "your-reservation-id"
cloud_region = CloudRegion(cloud_region)
reservation_path = ReservationPath(project_number, cloud_region, reservation_id)
client = AdminClient(cloud_region)
try:
client.delete_reservation(reservation_path)
print(f"{reservation_path} deleted successfully.")
except NotFound:
print(f"{reservation_path} not found.")
# [END pubsublite_delete_reservation]
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument("project_number", help="Your Google Cloud Project Number")
parser.add_argument("cloud_region", help="Your Cloud Region, e.g. 'us-central1'")
parser.add_argument("reservation_id", help="Your reservation ID")
args = parser.parse_args()
delete_lite_reservation(
args.project_number,
args.cloud_region,
args.zone_id,
args.topic_id,
args.regional,
)
|
{
"content_hash": "b5e67ecc4e75901e3846d53d2d2f65ea",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 85,
"avg_line_length": 34.645833333333336,
"alnum_prop": 0.7041491280817799,
"repo_name": "googleapis/python-pubsublite",
"id": "373ccf1502804add4cb36d210ad04acf25265378",
"size": "2283",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/snippets/delete_lite_reservation_example.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "1689513"
},
{
"name": "Shell",
"bytes": "30672"
}
],
"symlink_target": ""
}
|
import sys
import posixpath
output_dir = sys.argv[1]
for file in sys.argv[2:]:
(base, suffix) = posixpath.splitext(file)
if suffix == ".idl":
print posixpath.normpath(posixpath.join(output_dir, "%s_glue.h" % base))
print posixpath.normpath(posixpath.join(output_dir, "%s_glue.cc" % base))
else:
print posixpath.normpath(posixpath.join(output_dir, file))
sys.exit(0)
|
{
"content_hash": "4245adfa1975d52f5eeaadbd7fea9d62",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 77,
"avg_line_length": 27.642857142857142,
"alnum_prop": 0.6950904392764858,
"repo_name": "rwatson/chromium-capsicum",
"id": "ceb15754add05f5e6b308278a99acfd550dd0220",
"size": "812",
"binary": false,
"copies": "1",
"ref": "refs/heads/chromium-capsicum",
"path": "o3d/plugin/idl/idl_filenames.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Comment.remix_of'
db.add_column('canvas_comment', 'remix_of', self.gf('django.db.models.fields.related.ForeignKey')(related_name='remixes', null=True, to=orm['canvas.Content']), keep_default=False)
def backwards(self, orm):
# Deleting field 'Comment.remix_of'
db.delete_column('canvas_comment', 'remix_of_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['canvas.Content']"}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'remixes'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'used_in_comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.FloatField', [], {})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'timestamp': ('django.db.models.fields.FloatField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'})
},
'canvas.contentsticker': {
'Meta': {'object_name': 'ContentSticker'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('django.db.models.fields.FloatField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.hashtag': {
'Meta': {'object_name': 'Hashtag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'canvas.post': {
'Meta': {'object_name': 'Post'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blacklisted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'content_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'post_id': ('django.db.models.fields.IntegerField', [], {}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['canvas.Thread']"}),
'thumb_down': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'thumb_up': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'timestamp': ('django.db.models.fields.FloatField', [], {})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Thread']", 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.thread': {
'Meta': {'object_name': 'Thread'},
'hashtags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'threads'", 'symmetrical': 'False', 'to': "orm['canvas.Hashtag']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
|
{
"content_hash": "8cc2f775a074ed20c57915ada281de2a",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 187,
"avg_line_length": 69.62096774193549,
"alnum_prop": 0.5459284142244875,
"repo_name": "canvasnetworks/canvas",
"id": "9fc45da712f050bf11d5e8ebb82b3e7a4c27e3de",
"size": "8651",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "website/canvas/migrations/0024_auto__add_field_comment_remix_of.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "57"
},
{
"name": "C",
"bytes": "547"
},
{
"name": "CSS",
"bytes": "537625"
},
{
"name": "HTML",
"bytes": "689709"
},
{
"name": "JavaScript",
"bytes": "1313262"
},
{
"name": "Makefile",
"bytes": "258"
},
{
"name": "PHP",
"bytes": "1983"
},
{
"name": "Python",
"bytes": "6659685"
},
{
"name": "Ruby",
"bytes": "876"
},
{
"name": "Shell",
"bytes": "5326"
}
],
"symlink_target": ""
}
|
import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
interface TestArrayBuffer {
attribute ArrayBuffer bufferAttr;
void bufferMethod(ArrayBuffer arg1, ArrayBuffer? arg2, ArrayBuffer[] arg3, sequence<ArrayBuffer> arg4);
attribute ArrayBufferView viewAttr;
void viewMethod(ArrayBufferView arg1, ArrayBufferView? arg2, ArrayBufferView[] arg3, sequence<ArrayBufferView> arg4);
attribute Int8Array int8ArrayAttr;
void int8ArrayMethod(Int8Array arg1, Int8Array? arg2, Int8Array[] arg3, sequence<Int8Array> arg4);
attribute Uint8Array uint8ArrayAttr;
void uint8ArrayMethod(Uint8Array arg1, Uint8Array? arg2, Uint8Array[] arg3, sequence<Uint8Array> arg4);
attribute Uint8ClampedArray uint8ClampedArrayAttr;
void uint8ClampedArrayMethod(Uint8ClampedArray arg1, Uint8ClampedArray? arg2, Uint8ClampedArray[] arg3, sequence<Uint8ClampedArray> arg4);
attribute Int16Array int16ArrayAttr;
void int16ArrayMethod(Int16Array arg1, Int16Array? arg2, Int16Array[] arg3, sequence<Int16Array> arg4);
attribute Uint16Array uint16ArrayAttr;
void uint16ArrayMethod(Uint16Array arg1, Uint16Array? arg2, Uint16Array[] arg3, sequence<Uint16Array> arg4);
attribute Int32Array int32ArrayAttr;
void int32ArrayMethod(Int32Array arg1, Int32Array? arg2, Int32Array[] arg3, sequence<Int32Array> arg4);
attribute Uint32Array uint32ArrayAttr;
void uint32ArrayMethod(Uint32Array arg1, Uint32Array? arg2, Uint32Array[] arg3, sequence<Uint32Array> arg4);
attribute Float32Array float32ArrayAttr;
void float32ArrayMethod(Float32Array arg1, Float32Array? arg2, Float32Array[] arg3, sequence<Float32Array> arg4);
attribute Float64Array float64ArrayAttr;
void float64ArrayMethod(Float64Array arg1, Float64Array? arg2, Float64Array[] arg3, sequence<Float64Array> arg4);
};
""")
results = parser.finish()
iface = results[0]
harness.ok(True, "TestArrayBuffer interface parsed without error")
harness.check(len(iface.members), 22, "Interface should have twenty two members")
members = iface.members
def checkStuff(attr, method, t):
harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Expect an IDLAttribute")
harness.ok(isinstance(method, WebIDL.IDLMethod), "Expect an IDLMethod")
harness.check(str(attr.type), t, "Expect an ArrayBuffer type")
harness.ok(attr.type.isSpiderMonkeyInterface(), "Should test as a js interface")
(retType, arguments) = method.signatures()[0]
harness.ok(retType.isVoid(), "Should have a void return type")
harness.check(len(arguments), 4, "Expect 4 arguments")
harness.check(str(arguments[0].type), t, "Expect an ArrayBuffer type")
harness.ok(arguments[0].type.isSpiderMonkeyInterface(), "Should test as a js interface")
harness.check(str(arguments[1].type), t + "OrNull", "Expect an ArrayBuffer type")
harness.ok(arguments[1].type.inner.isSpiderMonkeyInterface(), "Should test as a js interface")
harness.check(str(arguments[2].type), t + "Array", "Expect an ArrayBuffer type")
harness.ok(arguments[2].type.inner.isSpiderMonkeyInterface(), "Should test as a js interface")
harness.check(str(arguments[3].type), t + "Sequence", "Expect an ArrayBuffer type")
harness.ok(arguments[3].type.inner.isSpiderMonkeyInterface(), "Should test as a js interface")
checkStuff(members[0], members[1], "ArrayBuffer")
checkStuff(members[2], members[3], "ArrayBufferView")
checkStuff(members[4], members[5], "Int8Array")
checkStuff(members[6], members[7], "Uint8Array")
checkStuff(members[8], members[9], "Uint8ClampedArray")
checkStuff(members[10], members[11], "Int16Array")
checkStuff(members[12], members[13], "Uint16Array")
checkStuff(members[14], members[15], "Int32Array")
checkStuff(members[16], members[17], "Uint32Array")
checkStuff(members[18], members[19], "Float32Array")
checkStuff(members[20], members[21], "Float64Array")
|
{
"content_hash": "ab8a7dc74473e0ad578d93a568bc6757",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 148,
"avg_line_length": 49.79761904761905,
"alnum_prop": 0.7052354769304328,
"repo_name": "sergecodd/FireFox-OS",
"id": "5b8e56f86ca1daf8b6dfc445989a3c9c7f6195c2",
"size": "4183",
"binary": false,
"copies": "158",
"ref": "refs/heads/master",
"path": "B2G/gecko/dom/bindings/parser/tests/test_arraybuffer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "443"
},
{
"name": "ApacheConf",
"bytes": "85"
},
{
"name": "Assembly",
"bytes": "5123438"
},
{
"name": "Awk",
"bytes": "46481"
},
{
"name": "Batchfile",
"bytes": "56250"
},
{
"name": "C",
"bytes": "101720951"
},
{
"name": "C#",
"bytes": "38531"
},
{
"name": "C++",
"bytes": "148896543"
},
{
"name": "CMake",
"bytes": "23541"
},
{
"name": "CSS",
"bytes": "2758664"
},
{
"name": "DIGITAL Command Language",
"bytes": "56757"
},
{
"name": "Emacs Lisp",
"bytes": "12694"
},
{
"name": "Erlang",
"bytes": "889"
},
{
"name": "FLUX",
"bytes": "34449"
},
{
"name": "GLSL",
"bytes": "26344"
},
{
"name": "Gnuplot",
"bytes": "710"
},
{
"name": "Groff",
"bytes": "447012"
},
{
"name": "HTML",
"bytes": "43343468"
},
{
"name": "IDL",
"bytes": "1455122"
},
{
"name": "Java",
"bytes": "43261012"
},
{
"name": "JavaScript",
"bytes": "46646658"
},
{
"name": "Lex",
"bytes": "38358"
},
{
"name": "Logos",
"bytes": "21054"
},
{
"name": "Makefile",
"bytes": "2733844"
},
{
"name": "Matlab",
"bytes": "67316"
},
{
"name": "Max",
"bytes": "3698"
},
{
"name": "NSIS",
"bytes": "421625"
},
{
"name": "Objective-C",
"bytes": "877657"
},
{
"name": "Objective-C++",
"bytes": "737713"
},
{
"name": "PHP",
"bytes": "17415"
},
{
"name": "Pascal",
"bytes": "6780"
},
{
"name": "Perl",
"bytes": "1153180"
},
{
"name": "Perl6",
"bytes": "1255"
},
{
"name": "PostScript",
"bytes": "1139"
},
{
"name": "PowerShell",
"bytes": "8252"
},
{
"name": "Protocol Buffer",
"bytes": "26553"
},
{
"name": "Python",
"bytes": "8453201"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3481"
},
{
"name": "Ruby",
"bytes": "5116"
},
{
"name": "Scilab",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "3383832"
},
{
"name": "SourcePawn",
"bytes": "23661"
},
{
"name": "TeX",
"bytes": "879606"
},
{
"name": "WebIDL",
"bytes": "1902"
},
{
"name": "XSLT",
"bytes": "13134"
},
{
"name": "Yacc",
"bytes": "112744"
}
],
"symlink_target": ""
}
|
from datetime import date
from os.path import join, split
from uuid import uuid4
import django
from django import forms
from django.forms.extras import SelectDateWidget
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.forms import fields
from mezzanine.forms.models import FormEntry, FieldEntry
from mezzanine.utils.timezone import now
fs = FileSystemStorage(location=settings.FORMS_UPLOAD_ROOT)
FILTER_CHOICE_CONTAINS = "1"
FILTER_CHOICE_DOESNT_CONTAIN = "2"
FILTER_CHOICE_EQUALS = "3"
FILTER_CHOICE_DOESNT_EQUAL = "4"
FILTER_CHOICE_BETWEEN = "5"
TEXT_FILTER_CHOICES = (
("", _("Nothing")),
(FILTER_CHOICE_CONTAINS, _("Contains")),
(FILTER_CHOICE_DOESNT_CONTAIN, _("Doesn't contain")),
(FILTER_CHOICE_EQUALS, _("Equals")),
(FILTER_CHOICE_DOESNT_EQUAL, _("Doesn't equal")),
)
CHOICE_FILTER_CHOICES = (
("", _("Nothing")),
(FILTER_CHOICE_EQUALS, _("Equals")),
(FILTER_CHOICE_DOESNT_EQUAL, _("Doesn't equal")),
)
DATE_FILTER_CHOICES = (
("", _("Nothing")),
(FILTER_CHOICE_BETWEEN, _("Is between")),
)
FILTER_FUNCS = {
FILTER_CHOICE_CONTAINS:
lambda val, field: val.lower() in field.lower(),
FILTER_CHOICE_DOESNT_CONTAIN:
lambda val, field: val.lower() not in field.lower(),
FILTER_CHOICE_EQUALS:
lambda val, field: val.lower() == field.lower(),
FILTER_CHOICE_DOESNT_EQUAL:
lambda val, field: val.lower() != field.lower(),
FILTER_CHOICE_BETWEEN:
lambda val_from, val_to, field: val_from <= field <= val_to
}
text_filter_field = forms.ChoiceField(label=" ", required=False,
choices=TEXT_FILTER_CHOICES)
choice_filter_field = forms.ChoiceField(label=" ", required=False,
choices=CHOICE_FILTER_CHOICES)
date_filter_field = forms.ChoiceField(label=" ", required=False,
choices=DATE_FILTER_CHOICES)
class FormForForm(forms.ModelForm):
"""
Form with a set of fields dynamically assigned, directly based on the
given ``forms.models.Form`` instance.
"""
class Meta:
model = FormEntry
exclude = ("form", "entry_time")
def __init__(self, form, *args, **kwargs):
"""
Dynamically add each of the form fields for the given form model
instance and its related field model instances.
"""
self.form = form
self.form_fields = form.fields.visible()
initial = kwargs.pop("initial", {})
# If a FormEntry instance is given to edit, populate initial
# with its field values.
field_entries = {}
if "instance" in kwargs:
for field_entry in kwargs["instance"].fields.all():
field_entries[field_entry.field_id] = field_entry.value
super(FormForForm, self).__init__(*args, **kwargs)
# Create the form fields.
for field in self.form_fields:
field_key = "field_%s" % field.id
field_class = fields.CLASSES[field.field_type]
field_widget = fields.WIDGETS.get(field.field_type)
field_args = {"label": field.label, "required": field.required,
"help_text": field.help_text}
if field.required and not field.help_text:
field_args["help_text"] = _("required")
arg_names = field_class.__init__.im_func.func_code.co_varnames
if "max_length" in arg_names:
field_args["max_length"] = settings.FORMS_FIELD_MAX_LENGTH
if "choices" in arg_names:
field_args["choices"] = field.get_choices()
if field_widget is not None:
field_args["widget"] = field_widget
#
# Initial value for field, in order of preference:
#
# - If a form model instance is given (eg we're editing a
# form response), then use the instance's value for the
# field.
# - If the developer has provided an explicit "initial"
# dict, use it.
# - The default value for the field instance as given in
# the admin.
#
try:
initial_val = field_entries[field.id]
except KeyError:
try:
self.initial[field_key] = initial[field_key]
except KeyError:
self.initial[field_key] = field.default
else:
if field.is_a(*fields.MULTIPLE):
initial_val = [x.strip() for x in initial_val.split(",")]
self.initial[field_key] = initial_val
self.fields[field_key] = field_class(**field_args)
# Add identifying type attr to the field for styling.
setattr(self.fields[field_key], "type",
field_class.__name__.lower())
if (field.required and settings.FORMS_USE_HTML5 and
field.field_type != fields.CHECKBOX_MULTIPLE):
self.fields[field_key].widget.attrs["required"] = ""
if field.placeholder_text and not field.default:
text = field.placeholder_text
self.fields[field_key].widget.attrs["placeholder"] = text
def save(self, **kwargs):
"""
Create a ``FormEntry`` instance and related ``FieldEntry``
instances for each form field.
"""
entry = super(FormForForm, self).save(commit=False)
entry.form = self.form
entry.entry_time = now()
entry.save()
entry_fields = entry.fields.values_list("field_id", flat=True)
new_entry_fields = []
for field in self.form_fields:
field_key = "field_%s" % field.id
value = self.cleaned_data[field_key]
if value and self.fields[field_key].widget.needs_multipart_form:
value = fs.save(join("forms", str(uuid4()), value.name), value)
if isinstance(value, list):
value = ", ".join([v.strip() for v in value])
if field.id in entry_fields:
field_entry = entry.fields.get(field_id=field.id)
field_entry.value = value
field_entry.save()
else:
new = {"entry": entry, "field_id": field.id, "value": value}
new_entry_fields.append(FieldEntry(**new))
if new_entry_fields:
if django.VERSION >= (1, 4, 0):
FieldEntry.objects.bulk_create(new_entry_fields)
else:
for field_entry in new_entry_fields:
field_entry.save()
return entry
def email_to(self):
"""
Return the value entered for the first field of type
``forms.fields.EMAIL``.
"""
for field in self.form_fields:
if field.is_a(fields.EMAIL):
return self.cleaned_data["field_%s" % field.id]
return None
class EntriesForm(forms.Form):
"""
Form with a set of fields dynamically assigned that can be used to
filter entries for the given ``forms.models.Form`` instance.
"""
def __init__(self, form, request, *args, **kwargs):
"""
Iterate through the fields of the ``forms.models.Form`` instance and
create the form fields required to control including the field in
the export (with a checkbox) or filtering the field which differs
across field types. User a list of checkboxes when a fixed set of
choices can be chosen from, a pair of date fields for date ranges,
and for all other types provide a textbox for text search.
"""
self.form = form
self.request = request
self.form_fields = form.fields.all()
self.entry_time_name = unicode(FormEntry._meta.get_field(
"entry_time").verbose_name).encode("utf-8")
super(EntriesForm, self).__init__(*args, **kwargs)
for field in self.form_fields:
field_key = "field_%s" % field.id
# Checkbox for including in export.
self.fields["%s_export" % field_key] = forms.BooleanField(
label=field.label, initial=True, required=False)
if field.is_a(*fields.CHOICES):
# A fixed set of choices to filter by.
if field.is_a(fields.CHECKBOX):
choices = ((True, _("Checked")), (False, _("Not checked")))
else:
choices = field.get_choices()
contains_field = forms.MultipleChoiceField(label=" ",
choices=choices, widget=forms.CheckboxSelectMultiple(),
required=False)
self.fields["%s_filter" % field_key] = choice_filter_field
self.fields["%s_contains" % field_key] = contains_field
elif field.is_a(*fields.DATES):
# A date range to filter by.
self.fields["%s_filter" % field_key] = date_filter_field
self.fields["%s_from" % field_key] = forms.DateField(
label=" ", widget=SelectDateWidget(), required=False)
self.fields["%s_to" % field_key] = forms.DateField(
label=_("and"), widget=SelectDateWidget(), required=False)
else:
# Text box for search term to filter by.
contains_field = forms.CharField(label=" ", required=False)
self.fields["%s_filter" % field_key] = text_filter_field
self.fields["%s_contains" % field_key] = contains_field
# Add ``FormEntry.entry_time`` as a field.
field_key = "field_0"
self.fields["%s_export" % field_key] = forms.BooleanField(initial=True,
label=FormEntry._meta.get_field("entry_time").verbose_name,
required=False)
self.fields["%s_filter" % field_key] = date_filter_field
self.fields["%s_from" % field_key] = forms.DateField(
label=" ", widget=SelectDateWidget(), required=False)
self.fields["%s_to" % field_key] = forms.DateField(
label=_("and"), widget=SelectDateWidget(), required=False)
def __iter__(self):
"""
Yield pairs of include checkbox / filters for each field.
"""
for field_id in [f.id for f in self.form_fields] + [0]:
prefix = "field_%s_" % field_id
fields = [f for f in super(EntriesForm, self).__iter__()
if f.name.startswith(prefix)]
yield fields[0], fields[1], fields[2:]
def columns(self):
"""
Returns the list of selected column names.
"""
fields = [f.label.encode("utf-8") for f in self.form_fields
if self.cleaned_data["field_%s_export" % f.id]]
if self.cleaned_data["field_0_export"]:
fields.append(self.entry_time_name)
return fields
def rows(self, csv=False):
"""
Returns each row based on the selected criteria.
"""
# Store the index of each field against its ID for building each
# entry row with columns in the correct order. Also store the IDs of
# fields with a type of FileField or Date-like for special handling of
# their values.
field_indexes = {}
file_field_ids = []
date_field_ids = []
for field in self.form_fields:
if self.cleaned_data["field_%s_export" % field.id]:
field_indexes[field.id] = len(field_indexes)
if field.is_a(fields.FILE):
file_field_ids.append(field.id)
elif field.is_a(*fields.DATES):
date_field_ids.append(field.id)
num_columns = len(field_indexes)
include_entry_time = self.cleaned_data["field_0_export"]
if include_entry_time:
num_columns += 1
# Get the field entries for the given form and filter by entry_time
# if specified.
field_entries = FieldEntry.objects.filter(entry__form=self.form
).order_by("-entry__id").select_related(depth=1)
if self.cleaned_data["field_0_filter"] == FILTER_CHOICE_BETWEEN:
time_from = self.cleaned_data["field_0_from"]
time_to = self.cleaned_data["field_0_to"]
if time_from and time_to:
field_entries = field_entries.filter(
entry__entry_time__range=(time_from, time_to))
# Loop through each field value ordered by entry, building up each
# entry as a row. Use the ``valid_row`` flag for marking a row as
# invalid if it fails one of the filtering criteria specified.
current_entry = None
current_row = None
valid_row = True
for field_entry in field_entries:
if field_entry.entry_id != current_entry:
# New entry, write out the current row and start a new one.
if valid_row and current_row is not None:
if not csv:
current_row.insert(0, current_entry)
yield current_row
current_entry = field_entry.entry_id
current_row = [""] * num_columns
valid_row = True
if include_entry_time:
current_row[-1] = field_entry.entry.entry_time
field_value = field_entry.value or ""
# Check for filter.
field_id = field_entry.field_id
filter_type = self.cleaned_data.get("field_%s_filter" % field_id)
filter_args = None
if filter_type:
if filter_type == FILTER_CHOICE_BETWEEN:
f, t = "field_%s_from" % field_id, "field_%s_to" % field_id
filter_args = [self.cleaned_data[f], self.cleaned_data[t]]
if filter_args[0] is None or filter_args[1] is None:
filter_args = None
else:
field_name = "field_%s_contains" % field_id
filter_args = self.cleaned_data[field_name]
if filter_args:
filter_args = [filter_args]
if filter_args:
filter_func = FILTER_FUNCS[filter_type]
if isinstance(filter_args[0], list):
# Criteria is from a range of checkboxes.
for arg in filter_args[0]:
if filter_func(arg, field_value):
break
else:
valid_row = False
else:
# Convert dates before checking filter.
if field_id in date_field_ids:
y, m, d = field_value.split(" ")[0].split("-")
dte = date(int(y), int(m), int(d))
filter_args.append(dte)
else:
filter_args.append(field_value)
if not filter_func(*filter_args):
valid_row = False
# Create download URL for file fields.
if field_entry.value and field_id in file_field_ids:
url = reverse("admin:form_file", args=(field_entry.id,))
field_value = self.request.build_absolute_uri(url)
if not csv:
parts = (field_value, split(field_entry.value)[1])
field_value = mark_safe("<a href=\"%s\">%s</a>" % parts)
# Only use values for fields that were selected.
try:
field_value = field_value.encode("utf-8")
current_row[field_indexes[field_id]] = field_value
except KeyError:
pass
# Output the final row.
if valid_row and current_row is not None:
if not csv:
current_row.insert(0, current_entry)
yield current_row
|
{
"content_hash": "8b39ce28e5e57f208d453dc11c18340d",
"timestamp": "",
"source": "github",
"line_count": 370,
"max_line_length": 79,
"avg_line_length": 43.52972972972973,
"alnum_prop": 0.5583633428535949,
"repo_name": "westinedu/similarinterest",
"id": "1bfb12df542746e907da70bfe42fb1330ced1cb5",
"size": "16106",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "mezzanine/forms/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "446844"
},
{
"name": "PHP",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "6029908"
},
{
"name": "Ruby",
"bytes": "249"
},
{
"name": "Shell",
"bytes": "1355"
}
],
"symlink_target": ""
}
|
import json
import re
import cfgm_common
import netaddr
import uuid
import vnc_quota
from gen.resource_xsd import *
from gen.resource_common import *
from gen.resource_server import *
from pprint import pformat
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
class GlobalSystemConfigServer(GlobalSystemConfigServerGen):
@classmethod
def _check_asn(cls, obj_dict, db_conn):
global_asn = obj_dict.get('autonomous_system')
if not global_asn:
return (True, '')
(ok, result) = db_conn.dbe_list('virtual-network')
if not ok:
return (ok, result)
for vn_name, vn_uuid in result:
ok, result = db_conn.dbe_read('virtual-network', {'uuid': vn_uuid})
if not ok:
return ok, result
rt_dict = result.get('route_target_list', {})
for rt in rt_dict.get('route_target', []):
(_, asn, target) = rt.split(':')
if (int(asn) == global_asn and
int(target) >= cfgm_common.BGP_RTGT_MIN_ID):
return (False, (400, "Virtual network %s is configured "
"with a route target with this ASN and route "
"target value in the same range as used by "
"automatically allocated route targets" % vn_name))
return (True, '')
# end _check_asn
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
ok, result = cls._check_asn(obj_dict, db_conn)
if not ok:
return ok, result
return True, ''
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
ok, result = cls._check_asn(obj_dict, db_conn)
if not ok:
return ok, result
return True, ''
# end http_put
# end class GlobalSystemConfigServer
class FloatingIpServer(FloatingIpServerGen):
generate_default_instance = False
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
proj_dict = obj_dict['project_refs'][0]
if 'uuid' in proj_dict:
proj_uuid = proj_dict['uuid']
else:
proj_uuid = db_conn.fq_name_to_uuid('project', proj_dict['to'])
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'floating_ip_back_refs',
'obj_type': 'floating-ip',
'user_visibility': user_visibility,
'proj_uuid': proj_uuid}
(ok, response) = vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
if not ok:
return (ok, response)
vn_fq_name = obj_dict['fq_name'][:-2]
req_ip = obj_dict.get("floating_ip_address")
if req_ip and cls.addr_mgmt.is_ip_allocated(req_ip, vn_fq_name):
return (False, (403, 'Ip address already in use'))
try:
fip_addr = cls.addr_mgmt.ip_alloc_req(vn_fq_name,
asked_ip_addr=req_ip)
except Exception as e:
return (False, (500, str(e)))
obj_dict['floating_ip_address'] = fip_addr
db_conn.config_log('AddrMgmt: alloc %s FIP for vn=%s, tenant=%s, askip=%s' \
% (obj_dict['floating_ip_address'], vn_fq_name, tenant_name,
req_ip), level=SandeshLevel.SYS_DEBUG)
return True, ""
# end http_post_collection
@classmethod
def http_post_collection_fail(cls, tenant_name, obj_dict, db_conn):
vn_fq_name = obj_dict['fq_name'][:-2]
fip_addr = obj_dict['floating_ip_address']
db_conn.config_log('AddrMgmt: free FIP %s for vn=%s tenant=%s, on post fail'
% (fip_addr, vn_fq_name, tenant_name),
level=SandeshLevel.SYS_DEBUG)
cls.addr_mgmt.ip_free_req(fip_addr, vn_fq_name)
return True, ""
# end http_post_collection_fail
@classmethod
def http_delete(cls, id, obj_dict, db_conn):
vn_fq_name = obj_dict['fq_name'][:-2]
fip_addr = obj_dict['floating_ip_address']
db_conn.config_log('AddrMgmt: free FIP %s for vn=%s'
% (fip_addr, vn_fq_name),
level=SandeshLevel.SYS_DEBUG)
cls.addr_mgmt.ip_free_req(fip_addr, vn_fq_name)
return True, ""
# end http_delete
@classmethod
def http_delete_fail(cls, id, obj_dict, db_conn):
vn_fq_name = obj_dict['fq_name'][:-2]
req_ip = obj_dict.get("floating_ip_address", None)
if req_ip == None:
return True, ""
try:
cls.addr_mgmt.ip_alloc_req(vn_fq_name, asked_ip_addr=req_ip)
except Exception as e:
return (False, (500, str(e)))
db_conn.config_log('AddrMgmt: alloc %s FIP for vn=%s to recover DELETE failure'
% (obj_dict['floating_ip_address'], vn_fq_name),
level=SandeshLevel.SYS_DEBUG)
return True, ""
# end http_delete_fail
@classmethod
def dbe_create_notification(cls, obj_ids, obj_dict):
fip_addr = obj_dict['floating_ip_address']
vn_fq_name = obj_dict['fq_name'][:-2]
cls.addr_mgmt.ip_alloc_notify(fip_addr, vn_fq_name)
# end dbe_create_notification
@classmethod
def dbe_delete_notification(cls, obj_ids, obj_dict):
fip_addr = obj_dict['floating_ip_address']
vn_fq_name = obj_dict['fq_name'][:-2]
cls.addr_mgmt.ip_free_notify(fip_addr, vn_fq_name)
# end dbe_delete_notification
# end class FloatingIpServer
class InstanceIpServer(InstanceIpServerGen):
generate_default_instance = False
@classmethod
def _get_subnet_name(cls, vn_dict, subnet_uuid):
ipam_refs = vn_dict.get('network_ipam_refs', [])
subnet_name = None
for ipam in ipam_refs:
ipam_subnets = ipam['attr'].get('ipam_subnets', [])
for subnet in ipam_subnets:
if subnet['subnet_uuid'] == subnet_uuid:
subnet_dict = subnet['subnet']
subnet_name = subnet_dict['ip_prefix'] + '/' + str(
subnet_dict['ip_prefix_len'])
return subnet_name
@classmethod
def _is_gateway_ip(cls, vn_dict, ip_addr):
ipam_refs = vn_dict.get('network_ipam_refs', [])
for ipam in ipam_refs:
ipam_subnets = ipam['attr'].get('ipam_subnets', [])
for subnet in ipam_subnets:
if subnet['default_gateway'] == ip_addr:
return True
return False
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
vn_fq_name = obj_dict['virtual_network_refs'][0]['to']
if ((vn_fq_name == cfgm_common.IP_FABRIC_VN_FQ_NAME) or
(vn_fq_name == cfgm_common.LINK_LOCAL_VN_FQ_NAME)):
# Ignore ip-fabric and link-local address allocations
return True, ""
req_ip = obj_dict.get("instance_ip_address", None)
req_ip_family = obj_dict.get("instance_ip_family", None)
req_ip_version = 4 # default ip v4
if req_ip_family == "v6": req_ip_version = 6
vn_id = {'uuid': db_conn.fq_name_to_uuid('virtual-network', vn_fq_name)}
(read_ok, vn_dict) = db_conn.dbe_read('virtual-network', vn_id)
if not read_ok:
return (False, (500, 'Internal error : ' + pformat(vn_dict)))
subnet_uuid = obj_dict.get('subnet_uuid', None)
sub = cls._get_subnet_name(vn_dict, subnet_uuid) if subnet_uuid else None
if subnet_uuid and not sub:
return (False, (404, "Subnet id " + subnet_uuid + " not found"))
# If its an external network, check whether floating IP equivalent to
# requested fixed-IP is already reserved.
router_external = vn_dict.get('router_external', None)
if req_ip and router_external and \
not cls._is_gateway_ip(vn_dict, req_ip) and \
cls.addr_mgmt.is_ip_allocated(req_ip, vn_fq_name):
return (False, (403, 'Ip address already in use'))
try:
ip_addr = cls.addr_mgmt.ip_alloc_req(
vn_fq_name, sub=sub, asked_ip_addr=req_ip,
asked_ip_version=req_ip_version)
except Exception as e:
return (False, (500, str(e)))
obj_dict['instance_ip_address'] = ip_addr
db_conn.config_log('AddrMgmt: alloc %s for vn=%s, tenant=%s, askip=%s'
% (obj_dict['instance_ip_address'],
vn_fq_name, tenant_name, req_ip),
level=SandeshLevel.SYS_DEBUG)
return True, ""
# end http_post_collection
@classmethod
def http_post_collection_fail(cls, tenant_name, obj_dict, db_conn):
vn_fq_name = obj_dict['virtual_network_refs'][0]['to']
if ((vn_fq_name == cfgm_common.IP_FABRIC_VN_FQ_NAME) or
(vn_fq_name == cfgm_common.LINK_LOCAL_VN_FQ_NAME)):
# Ignore ip-fabric and link-local address allocations
return True, ""
ip_addr = obj_dict['instance_ip_address']
db_conn.config_log('AddrMgmt: free IP %s, vn=%s tenant=%s on post fail'
% (ip_addr, vn_fq_name, tenant_name),
level=SandeshLevel.SYS_DEBUG)
cls.addr_mgmt.ip_free_req(ip_addr, vn_fq_name)
return True, ""
# end http_post_collection_fail
@classmethod
def http_delete(cls, id, obj_dict, db_conn):
vn_fq_name = obj_dict['virtual_network_refs'][0]['to']
if ((vn_fq_name == cfgm_common.IP_FABRIC_VN_FQ_NAME) or
(vn_fq_name == cfgm_common.LINK_LOCAL_VN_FQ_NAME)):
# Ignore ip-fabric and link-local address allocations
return True, ""
ip_addr = obj_dict['instance_ip_address']
db_conn.config_log('AddrMgmt: free IP %s, vn=%s'
% (ip_addr, vn_fq_name),
level=SandeshLevel.SYS_DEBUG)
cls.addr_mgmt.ip_free_req(ip_addr, vn_fq_name)
return True, ""
# end http_delete
@classmethod
def http_delete_fail(cls, id, obj_dict, db_conn):
vn_fq_name = obj_dict['virtual_network_refs'][0]['to']
if ((vn_fq_name == cfgm_common.IP_FABRIC_VN_FQ_NAME) or
(vn_fq_name == cfgm_common.LINK_LOCAL_VN_FQ_NAME)):
# Ignore ip-fabric and link-local address allocations
return True, ""
req_ip = obj_dict.get("instance_ip_address", None)
if req_ip == None:
return True, ""
try:
cls.addr_mgmt.ip_alloc_req(vn_fq_name, asked_ip_addr=req_ip)
except Exception as e:
return (False, (500, str(e)))
db_conn.config_log('AddrMgmt: alloc %s for vn=%s to recover DELETE failure'
% (obj_dict['instance_ip_address'], vn_fq_name),
level=SandeshLevel.SYS_DEBUG)
return True, ""
# end http_delete_fail
@classmethod
def dbe_create_notification(cls, obj_ids, obj_dict):
ip_addr = obj_dict['instance_ip_address']
vn_fq_name = obj_dict['virtual_network_refs'][0]['to']
cls.addr_mgmt.ip_alloc_notify(ip_addr, vn_fq_name)
# end dbe_create_notification
@classmethod
def dbe_delete_notification(cls, obj_ids, obj_dict):
ip_addr = obj_dict['instance_ip_address']
vn_fq_name = obj_dict['virtual_network_refs'][0]['to']
cls.addr_mgmt.ip_free_notify(ip_addr, vn_fq_name)
# end dbe_delete_notification
# end class InstanceIpServer
class LogicalRouterServer(LogicalRouterServerGen):
generate_default_instance = False
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'logical_routers',
'obj_type': 'logical-router',
'user_visibility': user_visibility}
return vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
# end http_post_collection
# end class LogicalRouterServer
class VirtualMachineInterfaceServer(VirtualMachineInterfaceServerGen):
generate_default_instance = False
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
vn_dict = obj_dict['virtual_network_refs'][0]
vn_uuid = vn_dict.get('uuid')
if not vn_uuid:
vn_fq_name = vn_dict.get('to')
if not vn_fq_name:
return (False, (500, 'Internal error : ' + pformat(vn_dict)))
vn_uuid = db_conn.fq_name_to_uuid('virtual-network', vn_fq_name)
(ok, vn_dict) = db_conn.dbe_read('virtual-network', {'uuid':vn_uuid})
if not ok:
return (False, (500, 'Internal error : ' + pformat(vn_dict)))
proj_uuid = vn_dict['parent_uuid']
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'virtual_machine_interfaces',
'obj_type': 'virtual-machine-interface',
'user_visibility': user_visibility,
'proj_uuid': proj_uuid}
(ok, response) = vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
if not ok:
return (ok, response)
inmac = None
if 'virtual_machine_interface_mac_addresses' in obj_dict:
mc = obj_dict['virtual_machine_interface_mac_addresses']
if 'mac_address' in mc:
if len(mc['mac_address'])==1:
inmac = mc['mac_address']
if inmac!=None:
mac_addrs_obj = MacAddressesType(inmac)
else:
mac_addr = cls.addr_mgmt.mac_alloc(obj_dict)
mac_addrs_obj = MacAddressesType([mac_addr])
mac_addrs_json = json.dumps(
mac_addrs_obj,
default=lambda o: dict((k, v)
for k, v in o.__dict__.iteritems()))
mac_addrs_dict = json.loads(mac_addrs_json)
obj_dict['virtual_machine_interface_mac_addresses'] = mac_addrs_dict
if 'virtual_machine_interface_allowed_address_pairs' in obj_dict:
aap_config = obj_dict['virtual_machine_interface_allowed_address_pairs']
if 'allowed_address_pair' in aap_config:
aaps = aap_config['allowed_address_pair']
for aap in aaps or []:
if aap['mac'] == "":
aap['mac'] = obj_dict['virtual_machine_interface_mac_addresses']['mac_address']
return True, ""
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
if 'virtual_machine_interface_allowed_address_pairs' in obj_dict:
vmi_id = {'uuid': id}
(read_ok, read_result) = db_conn.dbe_read('virtual-machine-interface', vmi_id)
if not read_ok:
return (False, (500, read_result))
aap_config = obj_dict['virtual_machine_interface_allowed_address_pairs']
if 'allowed_address_pair' in aap_config:
aaps = aap_config['allowed_address_pair']
for aap in aaps or []:
if aap['mac'] == "":
aap['mac'] = read_result['virtual_machine_interface_mac_addresses']['mac_address']
return True, ""
# end http_put
# end class VirtualMachineInterfaceServer
class VirtualNetworkServer(VirtualNetworkServerGen):
@classmethod
def _check_route_targets(cls, obj_dict, db_conn):
if 'route_target_list' not in obj_dict:
return (True, '')
config_uuid = db_conn.fq_name_to_uuid('global_system_config', ['default-global-system-config'])
config = db_conn.uuid_to_obj_dict(config_uuid)
global_asn = config.get('prop:autonomous_system')
if not global_asn:
return (True, '')
global_asn = json.loads(global_asn)
rt_dict = obj_dict.get('route_target_list')
if not rt_dict:
return (True, '')
for rt in rt_dict.get('route_target', []):
try:
(prefix, asn, target) = rt.split(':')
if prefix != 'target':
raise ValueError()
target = int(target)
if not asn.isdigit():
netaddr.IPAddress(asn)
except (ValueError, netaddr.core.AddrFormatError) as e:
return (False, "Route target must be of the format "
"'target:<asn>:<number>' or 'target:<ip>:number'")
if asn == global_asn and target >= cfgm_common.BGP_RTGT_MIN_ID:
return (False, "Configured route target must use ASN that is "
"different from global ASN or route target value must"
" be less than %d" % cfgm_common.BGP_RTGT_MIN_ID)
return (True, '')
# end _check_route_targets
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'virtual_networks',
'obj_type': 'virtual-network',
'user_visibility': user_visibility}
(ok, response) = vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
if not ok:
return (ok, response)
db_conn.update_subnet_uuid(obj_dict)
(ok, error) = cls._check_route_targets(obj_dict, db_conn)
if not ok:
return (False, (400, error))
try:
cls.addr_mgmt.net_create_req(obj_dict)
except Exception as e:
return (False, (500, str(e)))
return True, ""
# end http_post_collection
@classmethod
def http_post_collection_fail(cls, tenant_name, obj_dict, db_conn):
cls.addr_mgmt.net_delete_req(obj_dict)
return True, ""
# end post_collection_fail
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
if ((fq_name == cfgm_common.IP_FABRIC_VN_FQ_NAME) or
(fq_name == cfgm_common.LINK_LOCAL_VN_FQ_NAME)):
# Ignore ip-fabric subnet updates
return True, ""
if 'network_ipam_refs' not in obj_dict:
# NOP for addr-mgmt module
return True, ""
vn_id = {'uuid': id}
(read_ok, read_result) = db_conn.dbe_read('virtual-network', vn_id)
if not read_ok:
return (False, (500, read_result))
(ok, result) = cls.addr_mgmt.net_check_subnet(read_result, obj_dict)
if not ok:
return (ok, (409, result))
(ok, result) = cls.addr_mgmt.net_check_subnet_quota(read_result,
obj_dict, db_conn)
if not ok:
return (ok, (403, result))
(ok, result) = cls.addr_mgmt.net_check_subnet_overlap(read_result,
obj_dict)
if not ok:
return (ok, (409, result))
(ok, result) = cls.addr_mgmt.net_check_subnet_delete(read_result,
obj_dict)
if not ok:
return (ok, (409, result))
try:
cls.addr_mgmt.net_update_req(fq_name, read_result, obj_dict, id)
except Exception as e:
return (False, (500, str(e)))
db_conn.update_subnet_uuid(obj_dict)
(ok, error) = cls._check_route_targets(obj_dict, db_conn)
if not ok:
return (False, (400, error))
return True, ""
# end http_put
@classmethod
def http_put_fail(cls, id, fq_name, obj_dict, db_conn):
if ((fq_name == cfgm_common.IP_FABRIC_VN_FQ_NAME) or
(fq_name == cfgm_common.LINK_LOCAL_VN_FQ_NAME)):
# Ignore ip-fabric subnet updates
return True, ""
ipam_refs = obj_dict.get('network_ipam_refs', None)
if not ipam_refs:
# NOP for addr-mgmt module
return True, ""
vn_id = {'uuid': id}
(read_ok, read_result) = db_conn.dbe_read('virtual-network', vn_id)
if not read_ok:
return (False, (500, read_result))
cls.addr_mgmt.net_update_req(fq_name, obj_dict, read_result, id)
# end http_put_fail
@classmethod
def http_delete(cls, id, obj_dict, db_conn):
cls.addr_mgmt.net_delete_req(obj_dict)
return True, ""
# end http_delete
@classmethod
def http_delete_fail(cls, id, obj_dict, db_conn):
cls.addr_mgmt.net_create_req(obj_dict)
return True, ""
# end http_delete_fail
@classmethod
def ip_alloc(cls, vn_fq_name, subnet_name, count):
ip_list = [cls.addr_mgmt.ip_alloc_req(vn_fq_name, subnet_name)
for i in range(count)]
msg = 'AddrMgmt: reserve %d IP for vn=%s, subnet=%s - %s' \
% (count, vn_fq_name, subnet_name if subnet_name else '', ip_list)
cls.addr_mgmt.config_log(msg, level=SandeshLevel.SYS_DEBUG)
return {'ip_addr': ip_list}
# end ip_alloc
@classmethod
def ip_free(cls, vn_fq_name, subnet_name, ip_list):
msg = 'AddrMgmt: release IP %s for vn=%s, subnet=%s' \
% (ip_list, vn_fq_name, subnet_name if subnet_name else '')
cls.addr_mgmt.config_log(msg, level=SandeshLevel.SYS_DEBUG)
for ip_addr in ip_list:
cls.addr_mgmt.ip_free_req(ip_addr, vn_fq_name, subnet_name)
# end ip_free
@classmethod
def subnet_ip_count(cls, obj_dict, subnet_list):
ip_count_list = []
for item in subnet_list:
ip_count_list.append(cls.addr_mgmt.ip_count(obj_dict, item))
return {'ip_count_list': ip_count_list}
# end subnet_ip_count
@classmethod
def dbe_create_notification(cls, obj_ids, obj_dict):
cls.addr_mgmt.net_create_notify(obj_ids, obj_dict)
# end dbe_create_notification
@classmethod
def dbe_update_notification(cls, obj_ids):
cls.addr_mgmt.net_update_notify(obj_ids)
# end dbe_update_notification
@classmethod
def dbe_delete_notification(cls, obj_ids, obj_dict):
cls.addr_mgmt.net_delete_notify(obj_ids, obj_dict)
# end dbe_update_notification
# end class VirtualNetworkServer
class NetworkIpamServer(NetworkIpamServerGen):
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
ipam_uuid = obj_dict['uuid']
ipam_id = {'uuid': ipam_uuid}
(read_ok, read_result) = db_conn.dbe_read('network-ipam', ipam_id)
if not read_ok:
return (False, (500, "Internal error : IPAM is not valid"))
old_ipam_mgmt = read_result.get('network_ipam_mgmt')
new_ipam_mgmt = obj_dict.get('network_ipam_mgmt')
if not old_ipam_mgmt or not new_ipam_mgmt:
return True, ""
old_dns_method = old_ipam_mgmt.get('ipam_dns_method')
new_dns_method = new_ipam_mgmt.get('ipam_dns_method')
if not cls.is_change_allowed(old_dns_method, new_dns_method, obj_dict,
db_conn):
return (False, (409, "Cannot change DNS Method from " +
old_dns_method + " to " + new_dns_method +
" with active VMs referring to the IPAM"))
return True, ""
# end http_put
@classmethod
def http_put_fail(cls, id, fq_name, obj_dict, db_conn):
# undo any state change done by http_put function
return True, ""
# end http_put_fail
@classmethod
def is_change_allowed(cls, old, new, obj_dict, db_conn):
if (old == "default-dns-server" or old == "virtual-dns-server"):
if ((new == "tenant-dns-server" or new == "none") and
cls.is_active_vm_present(obj_dict, db_conn)):
return False
if (old == "tenant-dns-server" and new != old and
cls.is_active_vm_present(obj_dict, db_conn)):
return False
if (old == "none" and new != old and
cls.is_active_vm_present(obj_dict, db_conn)):
return False
return True
# end is_change_allowed
@classmethod
def is_active_vm_present(cls, obj_dict, db_conn):
if 'virtual_network_back_refs' in obj_dict:
vn_backrefs = obj_dict['virtual_network_back_refs']
for vn in vn_backrefs:
vn_uuid = vn['uuid']
vn_id = {'uuid': vn_uuid}
(read_ok, read_result) = db_conn.dbe_read('virtual-network',
vn_id)
if not read_ok:
continue
if 'virtual_machine_interface_back_refs' in read_result:
return True
return False
# end is_active_vm_present
# end class NetworkIpamServer
class VirtualDnsServer(VirtualDnsServerGen):
generate_default_instance = False
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
return cls.validate_dns_server(obj_dict, db_conn)
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
return cls.validate_dns_server(obj_dict, db_conn)
# end http_put
@classmethod
def http_put_fail(cls, id, fq_name, obj_dict, db_conn):
# undo any state change done by http_put function
return True, ""
# end http_put_fail
@classmethod
def http_delete(cls, id, obj_dict, db_conn):
vdns_name = ":".join(obj_dict['fq_name'])
if 'parent_uuid' in obj_dict:
domain_uuid = obj_dict['parent_uuid']
domain_id = {'uuid': domain_uuid}
(read_ok, read_result) = db_conn.dbe_read('domain', domain_id)
if not read_ok:
return (
False,
(500, "Internal error : Virtual DNS is not in a domain"))
virtual_DNSs = read_result.get('virtual_DNSs', [])
for vdns in virtual_DNSs:
vdns_uuid = vdns['uuid']
vdns_id = {'uuid': vdns_uuid}
(read_ok, read_result) = db_conn.dbe_read('virtual-DNS',
vdns_id)
if not read_ok:
return (
False,
(500,
"Internal error : Unable to read Virtual DNS data"))
vdns_data = read_result['virtual_DNS_data']
if 'next_virtual_DNS' in vdns_data:
if vdns_data['next_virtual_DNS'] == vdns_name:
return (
False,
(403,
"Virtual DNS server is referred"
" by other virtual DNS servers"))
return True, ""
# end http_delete
@classmethod
def http_delete_fail(cls, id, obj_dict, db_conn):
# undo any state change done by http_delete function
return True, ""
# end http_delete_fail
@classmethod
def is_valid_dns_name(cls, name):
if len(name) > 255:
return False
if name.endswith("."): # A single trailing dot is legal
# strip exactly one dot from the right, if present
name = name[:-1]
disallowed = re.compile("[^A-Z\d-]", re.IGNORECASE)
return all( # Split by labels and verify individually
(label and len(label) <= 63 # length is within proper range
# no bordering hyphens
and not label.startswith("-") and not label.endswith("-")
and not disallowed.search(label)) # contains only legal char
for label in name.split("."))
# end is_valid_dns_name
@classmethod
def is_valid_ipv4_address(cls, address):
parts = address.split(".")
if len(parts) != 4:
return False
for item in parts:
try:
if not 0 <= int(item) <= 255:
return False
except ValueError:
return False
return True
# end is_valid_ipv4_address
@classmethod
def validate_dns_server(cls, obj_dict, db_conn):
virtual_dns = obj_dict['fq_name'][1]
disallowed = re.compile("[^A-Z\d-]", re.IGNORECASE)
if disallowed.search(virtual_dns) or virtual_dns.startswith("-"):
return (False, (403,
"Special characters are not allowed in " +
"Virtual DNS server name"))
vdns_data = obj_dict['virtual_DNS_data']
if not cls.is_valid_dns_name(vdns_data['domain_name']):
return (
False,
(403, "Domain name does not adhere to DNS name requirements"))
record_order = ["fixed", "random", "round-robin"]
if not str(vdns_data['record_order']).lower() in record_order:
return (False, (403, "Invalid value for record order"))
ttl = vdns_data['default_ttl_seconds']
if ttl < 0 or ttl > 2147483647:
return (False, (403, "Invalid value for TTL"))
if 'next_virtual_DNS' in vdns_data:
vdns_next = vdns_data['next_virtual_DNS']
if not vdns_next or vdns_next is None:
return True, ""
next_vdns = vdns_data['next_virtual_DNS'].split(":")
# check that next vdns exists
try:
next_vdns_uuid = db_conn.fq_name_to_uuid(
'virtual_DNS', next_vdns)
except Exception as e:
if not cls.is_valid_ipv4_address(
vdns_data['next_virtual_DNS']):
return (
False,
(403,
"Invalid Virtual Forwarder(next virtual dns server)"))
else:
return True, ""
# check that next virtual dns servers arent referring to each other
# above check doesnt allow during create, but entry could be
# modified later
next_vdns_id = {'uuid': next_vdns_uuid}
(read_ok, read_result) = db_conn.dbe_read(
'virtual-DNS', next_vdns_id)
if read_ok:
next_vdns_data = read_result['virtual_DNS_data']
if 'next_virtual_DNS' in next_vdns_data:
vdns_name = ":".join(obj_dict['fq_name'])
if next_vdns_data['next_virtual_DNS'] == vdns_name:
return (
False,
(403,
"Cannot have Virtual DNS Servers "
"referring to each other"))
return True, ""
# end validate_dns_server
# end class VirtualDnsServer
class VirtualDnsRecordServer(VirtualDnsRecordServerGen):
generate_default_instance = False
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
return cls.validate_dns_record(obj_dict, db_conn)
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
return cls.validate_dns_record(obj_dict, db_conn)
# end http_put
@classmethod
def http_put_fail(cls, id, fq_name, obj_dict, db_conn):
# undo any state change done by http_put function
return True, ""
# end http_put_fail
@classmethod
def http_delete(cls, id, obj_dict, db_conn):
return True, ""
# end http_delete
@classmethod
def http_delete_fail(cls, id, obj_dict, db_conn):
# undo any state change done by http_delete function
return True, ""
# end http_delete_fail
@classmethod
def validate_dns_record(cls, obj_dict, db_conn):
rec_data = obj_dict['virtual_DNS_record_data']
rec_types = ["a", "cname", "ptr", "ns"]
rec_type = str(rec_data['record_type']).lower()
if not rec_type in rec_types:
return (False, (403, "Invalid record type"))
if str(rec_data['record_class']).lower() != "in":
return (False, (403, "Invalid record class"))
rec_name = rec_data['record_name']
rec_value = rec_data['record_data']
# check rec_name validity
if rec_type == "ptr":
if (not VirtualDnsServer.is_valid_ipv4_address(rec_name) and
not "in-addr.arpa" in rec_name.lower()):
return (
False,
(403,
"PTR Record name has to be IP address"
" or reverse.ip.in-addr.arpa"))
elif not VirtualDnsServer.is_valid_dns_name(rec_name):
return (
False,
(403, "Record name does not adhere to DNS name requirements"))
# check rec_data validity
if rec_type == "a":
if not VirtualDnsServer.is_valid_ipv4_address(rec_value):
return (False, (403, "Invalid IP address"))
elif rec_type == "cname" or rec_type == "ptr" or rec_type == "mx":
if not VirtualDnsServer.is_valid_dns_name(rec_value):
return (
False,
(403,
"Record data does not adhere to DNS name requirements"))
elif rec_type == "ns":
try:
vdns_name = rec_value.split(":")
vdns_uuid = db_conn.fq_name_to_uuid('virtual_DNS', vdns_name)
except Exception as e:
if (not VirtualDnsServer.is_valid_ipv4_address(rec_value) and
not VirtualDnsServer.is_valid_dns_name(rec_value)):
return (
False,
(403, "Invalid virtual dns server in record data"))
ttl = rec_data['record_ttl_seconds']
if ttl < 0 or ttl > 2147483647:
return (False, (403, "Invalid value for TTL"))
if rec_type == "mx":
preference = rec_data['record_mx_preference']
if preference < 0 or preference > 65535:
return (False, (403, "Invalid value for MX record preference"))
return True, ""
# end validate_dns_record
# end class VirtualDnsRecordServer
def _check_policy_rule_uuid(entries):
if not entries:
return
for rule in entries.get('policy_rule') or []:
if not rule.get('rule_uuid'):
rule['rule_uuid'] = str(uuid.uuid4())
# end _check_policy_rule_uuid
class SecurityGroupServer(SecurityGroupServerGen):
generate_default_instance = False
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'security_groups',
'obj_type': 'security-group',
'user_visibility': user_visibility}
(ok, response) = vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
if not ok:
return (ok, response)
_check_policy_rule_uuid(obj_dict.get('security_group_entries'))
return True, ""
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
(ok, sec_dict) = db_conn.dbe_read('security-group', {'uuid': id})
if not ok:
return (False, (500, 'Bad Security Group error : ' + pformat(sec_dict)))
(ok, proj_dict) = vnc_quota.QuotaHelper.get_project_dict(
sec_dict['parent_uuid'], db_conn)
if not ok:
return (False, (500, 'Bad Project error : ' + pformat(proj_dict)))
if 'security_group_entries' in obj_dict:
rule_count = len(obj_dict['security_group_entries']['policy_rule'])
obj_type = 'security-group-rule'
for sg in proj_dict.get('security_groups', []):
if sg['uuid'] == sec_dict['uuid']:
continue
ok, sg_dict = db_conn.dbe_read('security-group', sg)
if not ok:
continue
sge = sg_dict.get('security_group_entries', {})
rule_count += len(sge.get('policy_rule', []))
if sec_dict['id_perms'].get('user_visible', True) is not False:
(ok, quota_limit) = vnc_quota.QuotaHelper.check_quota_limit(
proj_dict, obj_type, rule_count-1)
if not ok:
return (False, (403, pformat(fq_name) + ' : ' + quota_limit))
_check_policy_rule_uuid(obj_dict.get('security_group_entries'))
return True, ""
# end http_put
# end class SecurityGroupServer
class NetworkPolicyServer(NetworkPolicyServerGen):
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'network_policys',
'obj_type': 'network-policy',
'user_visibility': user_visibility}
(ok, response) = vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
if not ok:
return (ok, response)
_check_policy_rule_uuid(obj_dict.get('network_policy_entries'))
try:
cls._check_policy(obj_dict)
except Exception as e:
return (False, (500, str(e)))
return True, ""
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
p_id = {'uuid': id}
(read_ok, read_result) = db_conn.dbe_read('network-policy', p_id)
if not read_ok:
return (False, (500, read_result))
_check_policy_rule_uuid(obj_dict.get('network_policy_entries'))
return True, ""
# end http_put
@classmethod
def _check_policy(cls, obj_dict):
entries = obj_dict.get('network_policy_entries')
if not entries:
return
# end _check_policy
# end class NetworkPolicyServer
class LogicalInterfaceServer(LogicalInterfaceServerGen):
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
(ok, msg) = cls._check_vlan(obj_dict, db_conn)
if ok == False:
return (False, msg)
vlan = 0
if 'logical_interface_vlan_tag' in obj_dict:
vlan = obj_dict['logical_interface_vlan_tag']
return PhysicalInterfaceServer._check_interface_name(obj_dict, db_conn, vlan)
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
interface = {'uuid': id}
(read_ok, read_result) = db_conn.dbe_read('logical-interface', interface)
if not read_ok:
return (False, (500, read_result))
# do not allow change in display name
if 'display_name' in obj_dict:
if obj_dict['display_name'] != read_result.get('display_name'):
return (False, (403, "Cannot change display name !"))
vlan = None
if 'logical_interface_vlan_tag' in obj_dict:
vlan = obj_dict['logical_interface_vlan_tag']
if 'logical_interface_vlan_tag' in read_result:
if int(vlan) != int(read_result.get('logical_interface_vlan_tag')):
return (False, (403, "Cannot change Vlan id"))
return True, ""
# end http_put
@classmethod
def _check_vlan(cls, obj_dict, db_conn):
if 'logical_interface_vlan_tag' in obj_dict:
vlan = obj_dict['logical_interface_vlan_tag']
if vlan < 0 or vlan > 4094:
return (False, (403, "Invalid Vlan id"))
return True, ""
# end _check_vlan
# end class LogicalInterfaceServer
class PhysicalInterfaceServer(PhysicalInterfaceServerGen):
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
return cls._check_interface_name(obj_dict, db_conn, None)
# end http_post_collection
@classmethod
def http_put(cls, id, fq_name, obj_dict, db_conn):
# do not allow change in display name
if 'display_name' in obj_dict:
interface = {'uuid': id}
(read_ok, read_result) = db_conn.dbe_read('physical-interface', interface)
if not read_ok:
return (False, (500, read_result))
if obj_dict['display_name'] != read_result.get('display_name'):
return (False, (403, "Cannot change display name !"))
return True, ""
# end http_put
@classmethod
def _check_interface_name(cls, obj_dict, db_conn, vlan_tag):
interface_name = obj_dict['display_name']
router = obj_dict['fq_name'][:2]
try:
router_uuid = db_conn.fq_name_to_uuid('physical-router', router)
except cfgm_common.exceptions.NoIdError:
return (False, (500, 'Internal error : Physical router ' +
":".join(router) + ' not found'))
physical_interface_uuid = ""
if obj_dict['parent_type'] == 'physical-interface':
try:
physical_interface_name = obj_dict['fq_name'][:3]
physical_interface_uuid = db_conn.fq_name_to_uuid('physical-interface', physical_interface_name)
except cfgm_common.exceptions.NoIdError:
return (False, (500, 'Internal error : Physical interface ' +
":".join(physical_interface_name) + ' not found'))
(ok, physical_router) = db_conn.dbe_read('physical-router', {'uuid':router_uuid})
if not ok:
return (False, (500, 'Internal error : Physical router ' +
":".join(router) + ' not found'))
for physical_interface in physical_router.get('physical_interfaces', []):
(ok, interface_object) = db_conn.dbe_read('physical-interface',
{'uuid':physical_interface['uuid']})
if not ok:
return (False, (500, 'Internal error : physical interface ' +
physical_interface['uuid'] + ' not found'))
if 'display_name' in interface_object:
if interface_name == interface_object['display_name']:
return (False, (403, "Display name already used in another interface :" +
physical_interface['uuid']))
for logical_interface in interface_object.get('logical_interfaces', []):
(ok, li_object) = db_conn.dbe_read('logical-interface',
{'uuid':logical_interface['uuid']})
if not ok:
return (False, (500, 'Internal error : logical interface ' +
logical_interface['uuid'] + ' not found'))
if 'display_name' in li_object:
if interface_name == li_object['display_name']:
return (False, (403, "Display name already used in another interface : " +
logical_interface['uuid']))
if vlan_tag != None:
# check vlan tags on the same physical interface
if obj_dict['parent_type'] == 'physical-interface' and \
physical_interface['uuid'] != physical_interface_uuid:
continue
if 'logical_interface_vlan_tag' in li_object:
if vlan_tag == int(li_object['logical_interface_vlan_tag']):
return (False, (403, "Vlan tag already used in " +
"another interface : " +
logical_interface['uuid']))
for logical_interface in physical_router.get('logical_interfaces', []):
(ok, li_object) = db_conn.dbe_read('logical-interface',
{'uuid':logical_interface['uuid']})
if not ok:
return (False, (500, 'Internal error : logical interface ' +
logical_interface['uuid'] + ' not found'))
if 'display_name' in li_object:
if interface_name == li_object['display_name']:
return (False, (403, "Display name already used in another interface : " +
logical_interface['uuid']))
return True, ""
# end _check_interface_name
# end class PhysicalInterfaceServer
class LoadbalancerMemberServer(LoadbalancerMemberServerGen):
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
try:
fq_name = obj_dict['fq_name']
proj_uuid = db_conn.fq_name_to_uuid('project', fq_name[0:2])
except cfgm_common.exceptions.NoIdError:
return (False, (500, 'No Project ID error : ' + proj_uuid))
ok, proj_dict = db_conn.dbe_read('project', {'uuid': proj_uuid})
if not ok:
return (False, (500, 'Internal error : ' + pformat(proj_dict)))
if not user_visibility:
return True, ""
lb_pools = proj_dict.get('loadbalancer_pools', [])
quota_count = 0
for pool in lb_pools:
ok, lb_pool_dict = db_conn.dbe_read('loadbalancer-pool',
{'uuid': pool['uuid']})
if not ok:
return (False, (500, 'Internal error : ' +
pformat(lb_pool_dict)))
quota_count += len(lb_pool_dict.get('loadbalancer_members', []))
(ok, quota_limit) = vnc_quota.QuotaHelper.check_quota_limit(
proj_dict, 'loadbalancer-member', quota_count)
if not ok:
return (False, (403, pformat(fq_name) + ' : ' + quota_limit))
return True, ""
#end class LoadbalancerMemberServer
class LoadbalancerPoolServer(LoadbalancerPoolServerGen):
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'loadbalancer_pools',
'obj_type': 'loadbalancer-pool',
'user_visibility': user_visibility}
return vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
# end class LoadbalancerPoolServer
class LoadbalancerHealthmonitorServer(LoadbalancerHealthmonitorServerGen):
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'loadbalancer_healthmonitors',
'obj_type': 'loadbalancer-healthmonitor',
'user_visibility': user_visibility}
return vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
# end class LoadbalancerHealthmonitorServer
class VirtualIpServer(VirtualIpServerGen):
@classmethod
def http_post_collection(cls, tenant_name, obj_dict, db_conn):
user_visibility = obj_dict['id_perms'].get('user_visible', True)
verify_quota_kwargs = {'db_conn': db_conn,
'fq_name': obj_dict['fq_name'],
'resource': 'virtual_ips',
'obj_type': 'virtual-ip',
'user_visibility': user_visibility}
return vnc_quota.QuotaHelper.verify_quota_for_resource(
**verify_quota_kwargs)
# end class VirtualIpServer
|
{
"content_hash": "b249a6a8ece4d50b2d06d8749b4ce1d8",
"timestamp": "",
"source": "github",
"line_count": 1223,
"max_line_length": 112,
"avg_line_length": 40.39084219133279,
"alnum_prop": 0.5440301226770314,
"repo_name": "cloudwatt/contrail-controller",
"id": "6c3550ea4d25ea30728fd784baee499a3ba18d08",
"size": "49718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/config/api-server/vnc_cfg_types.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "80579"
},
{
"name": "C",
"bytes": "44989"
},
{
"name": "C++",
"bytes": "14908777"
},
{
"name": "CSS",
"bytes": "531"
},
{
"name": "Java",
"bytes": "171966"
},
{
"name": "Lua",
"bytes": "8164"
},
{
"name": "Makefile",
"bytes": "12449"
},
{
"name": "Objective-C",
"bytes": "720"
},
{
"name": "Protocol Buffer",
"bytes": "1120"
},
{
"name": "Python",
"bytes": "3057429"
},
{
"name": "Shell",
"bytes": "54611"
},
{
"name": "Thrift",
"bytes": "40763"
}
],
"symlink_target": ""
}
|
"""
Mock unit tests for the NetApp block storage C-mode library
"""
import ddt
import mock
from cinder import exception
from cinder import test
import cinder.tests.unit.volume.drivers.netapp.dataontap.fakes as fake
from cinder.tests.unit.volume.drivers.netapp.dataontap.utils import fakes as\
fake_utils
import cinder.tests.unit.volume.drivers.netapp.fakes as na_fakes
from cinder.volume.drivers.netapp.dataontap import block_base
from cinder.volume.drivers.netapp.dataontap import block_cmode
from cinder.volume.drivers.netapp.dataontap.client import api as netapp_api
from cinder.volume.drivers.netapp.dataontap.client import client_base
from cinder.volume.drivers.netapp.dataontap.performance import perf_cmode
from cinder.volume.drivers.netapp.dataontap.utils import data_motion
from cinder.volume.drivers.netapp.dataontap.utils import loopingcalls
from cinder.volume.drivers.netapp.dataontap.utils import utils as config_utils
from cinder.volume.drivers.netapp import utils as na_utils
@ddt.ddt
class NetAppBlockStorageCmodeLibraryTestCase(test.TestCase):
"""Test case for NetApp's C-Mode iSCSI library."""
def setUp(self):
super(NetAppBlockStorageCmodeLibraryTestCase, self).setUp()
kwargs = {
'configuration': self.get_config_cmode(),
'host': 'openstack@cdotblock',
}
self.library = block_cmode.NetAppBlockStorageCmodeLibrary(
'driver', 'protocol', **kwargs)
self.library.zapi_client = mock.Mock()
self.zapi_client = self.library.zapi_client
self.library.perf_library = mock.Mock()
self.library.ssc_library = mock.Mock()
self.library.vserver = mock.Mock()
self.fake_lun = block_base.NetAppLun(fake.LUN_HANDLE, fake.LUN_NAME,
fake.SIZE, None)
self.fake_snapshot_lun = block_base.NetAppLun(
fake.SNAPSHOT_LUN_HANDLE, fake.SNAPSHOT_NAME, fake.SIZE, None)
self.mock_object(self.library, 'lun_table')
self.library.lun_table = {
fake.LUN_NAME: self.fake_lun,
fake.SNAPSHOT_NAME: self.fake_snapshot_lun,
}
self.mock_object(block_base.NetAppBlockStorageLibrary, 'delete_volume')
def get_config_cmode(self):
config = na_fakes.create_configuration_cmode()
config.netapp_storage_protocol = 'iscsi'
config.netapp_login = 'admin'
config.netapp_password = 'pass'
config.netapp_server_hostname = '127.0.0.1'
config.netapp_transport_type = 'https'
config.netapp_server_port = '443'
config.netapp_vserver = 'openstack'
return config
@mock.patch.object(perf_cmode, 'PerformanceCmodeLibrary', mock.Mock())
@mock.patch.object(client_base.Client, 'get_ontapi_version',
mock.MagicMock(return_value=(1, 20)))
@mock.patch.object(na_utils, 'check_flags')
@mock.patch.object(block_base.NetAppBlockStorageLibrary, 'do_setup')
def test_do_setup(self, super_do_setup, mock_check_flags):
self.mock_object(client_base.Client, '_init_ssh_client')
self.mock_object(
config_utils, 'get_backend_configuration',
mock.Mock(return_value=self.get_config_cmode()))
context = mock.Mock()
self.library.do_setup(context)
super_do_setup.assert_called_once_with(context)
self.assertEqual(1, mock_check_flags.call_count)
def test_check_for_setup_error(self):
super_check_for_setup_error = self.mock_object(
block_base.NetAppBlockStorageLibrary, 'check_for_setup_error')
mock_check_api_permissions = self.mock_object(
self.library.ssc_library, 'check_api_permissions')
mock_add_looping_tasks = self.mock_object(
self.library, '_add_looping_tasks')
mock_get_pool_map = self.mock_object(
self.library, '_get_flexvol_to_pool_map',
mock.Mock(return_value={'fake_map': None}))
mock_add_looping_tasks = self.mock_object(
self.library, '_add_looping_tasks')
self.library.check_for_setup_error()
self.assertEqual(1, super_check_for_setup_error.call_count)
mock_check_api_permissions.assert_called_once_with()
self.assertEqual(1, mock_add_looping_tasks.call_count)
mock_get_pool_map.assert_called_once_with()
mock_add_looping_tasks.assert_called_once_with()
def test_check_for_setup_error_no_filtered_pools(self):
self.mock_object(block_base.NetAppBlockStorageLibrary,
'check_for_setup_error')
mock_check_api_permissions = self.mock_object(
self.library.ssc_library, 'check_api_permissions')
self.mock_object(self.library, '_add_looping_tasks')
self.mock_object(
self.library, '_get_flexvol_to_pool_map',
mock.Mock(return_value={}))
self.assertRaises(exception.NetAppDriverException,
self.library.check_for_setup_error)
mock_check_api_permissions.assert_called_once_with()
@ddt.data({'replication_enabled': True, 'failed_over': False},
{'replication_enabled': True, 'failed_over': True},
{'replication_enabled': False, 'failed_over': False})
@ddt.unpack
def test_handle_housekeeping_tasks(self, replication_enabled, failed_over):
ensure_mirrors = self.mock_object(data_motion.DataMotionMixin,
'ensure_snapmirrors')
self.mock_object(self.library.ssc_library, 'get_ssc_flexvol_names',
mock.Mock(return_value=fake_utils.SSC.keys()))
self.library.replication_enabled = replication_enabled
self.library.failed_over = failed_over
self.library._handle_housekeeping_tasks()
(self.zapi_client.remove_unused_qos_policy_groups.
assert_called_once_with())
if replication_enabled and not failed_over:
ensure_mirrors.assert_called_once_with(
self.library.configuration, self.library.backend_name,
fake_utils.SSC.keys())
else:
self.assertFalse(ensure_mirrors.called)
def test_find_mapped_lun_igroup(self):
igroups = [fake.IGROUP1]
self.zapi_client.get_igroup_by_initiators.return_value = igroups
lun_maps = [{'initiator-group': fake.IGROUP1_NAME,
'lun-id': '1',
'vserver': fake.VSERVER_NAME}]
self.zapi_client.get_lun_map.return_value = lun_maps
(igroup, lun_id) = self.library._find_mapped_lun_igroup(
fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
self.assertEqual(fake.IGROUP1_NAME, igroup)
self.assertEqual('1', lun_id)
def test_find_mapped_lun_igroup_initiator_mismatch(self):
self.zapi_client.get_igroup_by_initiators.return_value = []
lun_maps = [{'initiator-group': fake.IGROUP1_NAME,
'lun-id': '1',
'vserver': fake.VSERVER_NAME}]
self.zapi_client.get_lun_map.return_value = lun_maps
(igroup, lun_id) = self.library._find_mapped_lun_igroup(
fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
self.assertIsNone(igroup)
self.assertIsNone(lun_id)
def test_find_mapped_lun_igroup_name_mismatch(self):
igroups = [{'initiator-group-os-type': 'linux',
'initiator-group-type': 'fcp',
'initiator-group-name': 'igroup2'}]
self.zapi_client.get_igroup_by_initiators.return_value = igroups
lun_maps = [{'initiator-group': fake.IGROUP1_NAME,
'lun-id': '1',
'vserver': fake.VSERVER_NAME}]
self.zapi_client.get_lun_map.return_value = lun_maps
(igroup, lun_id) = self.library._find_mapped_lun_igroup(
fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
self.assertIsNone(igroup)
self.assertIsNone(lun_id)
def test_find_mapped_lun_igroup_no_igroup_prefix(self):
igroups = [{'initiator-group-os-type': 'linux',
'initiator-group-type': 'fcp',
'initiator-group-name': 'igroup2'}]
self.zapi_client.get_igroup_by_initiators.return_value = igroups
lun_maps = [{'initiator-group': 'igroup2',
'lun-id': '1',
'vserver': fake.VSERVER_NAME}]
self.zapi_client.get_lun_map.return_value = lun_maps
(igroup, lun_id) = self.library._find_mapped_lun_igroup(
fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
self.assertIsNone(igroup)
self.assertIsNone(lun_id)
def test_clone_lun_zero_block_count(self):
"""Test for when clone lun is not passed a block count."""
self.library._get_lun_attr = mock.Mock(return_value={'Volume':
'fakeLUN'})
self.library.zapi_client = mock.Mock()
self.library.zapi_client.get_lun_by_args.return_value = [
mock.Mock(spec=netapp_api.NaElement)]
lun = fake.FAKE_LUN
self.library._get_lun_by_args = mock.Mock(return_value=[lun])
self.library._add_lun_to_table = mock.Mock()
self.library._update_stale_vols = mock.Mock()
self.library._clone_lun('fakeLUN', 'newFakeLUN', 'false')
self.library.zapi_client.clone_lun.assert_called_once_with(
'fakeLUN', 'fakeLUN', 'newFakeLUN', 'false', block_count=0,
dest_block=0, src_block=0, qos_policy_group_name=None,
source_snapshot=None, is_snapshot=False)
def test_clone_lun_blocks(self):
"""Test for when clone lun is passed block information."""
block_count = 10
src_block = 10
dest_block = 30
self.library._get_lun_attr = mock.Mock(return_value={'Volume':
'fakeLUN'})
self.library.zapi_client = mock.Mock()
self.library.zapi_client.get_lun_by_args.return_value = [
mock.Mock(spec=netapp_api.NaElement)]
lun = fake.FAKE_LUN
self.library._get_lun_by_args = mock.Mock(return_value=[lun])
self.library._add_lun_to_table = mock.Mock()
self.library._update_stale_vols = mock.Mock()
self.library._clone_lun('fakeLUN', 'newFakeLUN', 'false',
block_count=block_count, src_block=src_block,
dest_block=dest_block)
self.library.zapi_client.clone_lun.assert_called_once_with(
'fakeLUN', 'fakeLUN', 'newFakeLUN', 'false',
block_count=block_count, dest_block=dest_block,
src_block=src_block, qos_policy_group_name=None,
source_snapshot=None, is_snapshot=False)
def test_clone_lun_no_space_reservation(self):
"""Test for when space_reservation is not passed."""
self.library._get_lun_attr = mock.Mock(return_value={'Volume':
'fakeLUN'})
self.library.zapi_client = mock.Mock()
self.library.lun_space_reservation = 'false'
self.library.zapi_client.get_lun_by_args.return_value = [
mock.Mock(spec=netapp_api.NaElement)]
lun = fake.FAKE_LUN
self.library._get_lun_by_args = mock.Mock(return_value=[lun])
self.library._add_lun_to_table = mock.Mock()
self.library._update_stale_vols = mock.Mock()
self.library._clone_lun('fakeLUN', 'newFakeLUN', is_snapshot=True)
self.library.zapi_client.clone_lun.assert_called_once_with(
'fakeLUN', 'fakeLUN', 'newFakeLUN', 'false', block_count=0,
dest_block=0, src_block=0, qos_policy_group_name=None,
source_snapshot=None, is_snapshot=True)
def test_get_fc_target_wwpns(self):
ports = [fake.FC_FORMATTED_TARGET_WWPNS[0],
fake.FC_FORMATTED_TARGET_WWPNS[1]]
self.zapi_client.get_fc_target_wwpns.return_value = ports
result = self.library._get_fc_target_wwpns()
self.assertSetEqual(set(ports), set(result))
@mock.patch.object(block_cmode.NetAppBlockStorageCmodeLibrary,
'_get_pool_stats', mock.Mock())
def test_vol_stats_calls_provide_ems(self):
self.library.zapi_client.provide_ems = mock.Mock()
self.library.get_volume_stats(refresh=True)
self.assertEqual(1, self.library.zapi_client.provide_ems.call_count)
def test_create_lun(self):
self.library._create_lun(
fake.VOLUME_ID, fake.LUN_ID, fake.LUN_SIZE, fake.LUN_METADATA)
self.library.zapi_client.create_lun.assert_called_once_with(
fake.VOLUME_ID, fake.LUN_ID, fake.LUN_SIZE, fake.LUN_METADATA,
None)
def test_get_preferred_target_from_list(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
operational_addresses = [
target['address']
for target in target_details_list[2:]]
self.zapi_client.get_operational_lif_addresses = (
mock.Mock(return_value=operational_addresses))
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(target_details_list[2], result)
@ddt.data([], ['target_1', 'target_2'])
def test_get_pool_stats(self, replication_backends):
ssc = {
'vola': {
'pool_name': 'vola',
'thick_provisioning_support': True,
'thin_provisioning_support': False,
'netapp_thin_provisioned': 'false',
'netapp_compression': 'false',
'netapp_mirrored': 'false',
'netapp_dedup': 'true',
'netapp_aggregate': 'aggr1',
'netapp_raid_type': 'raid_dp',
'netapp_disk_type': 'SSD',
},
}
mock_get_ssc = self.mock_object(self.library.ssc_library,
'get_ssc',
mock.Mock(return_value=ssc))
mock_get_aggrs = self.mock_object(self.library.ssc_library,
'get_ssc_aggregates',
mock.Mock(return_value=['aggr1']))
self.mock_object(self.library, 'get_replication_backend_names',
mock.Mock(return_value=replication_backends))
self.library.reserved_percentage = 5
self.library.max_over_subscription_ratio = 10
self.library.perf_library.get_node_utilization_for_pool = (
mock.Mock(return_value=30.0))
mock_capacities = {
'size-total': 10737418240.0,
'size-available': 2147483648.0,
}
self.mock_object(
self.zapi_client, 'get_flexvol_capacity',
mock.Mock(return_value=mock_capacities))
aggr_capacities = {
'aggr1': {
'percent-used': 45,
'size-available': 59055800320.0,
'size-total': 107374182400.0,
},
}
mock_get_aggr_capacities = self.mock_object(
self.zapi_client, 'get_aggregate_capacities',
mock.Mock(return_value=aggr_capacities))
result = self.library._get_pool_stats(filter_function='filter',
goodness_function='goodness')
expected = [{
'pool_name': 'vola',
'QoS_support': True,
'consistencygroup_support': True,
'reserved_percentage': 5,
'max_over_subscription_ratio': 10.0,
'multiattach': True,
'total_capacity_gb': 10.0,
'free_capacity_gb': 2.0,
'provisioned_capacity_gb': 8.0,
'netapp_aggregate_used_percent': 45,
'utilization': 30.0,
'filter_function': 'filter',
'goodness_function': 'goodness',
'thick_provisioning_support': True,
'thin_provisioning_support': False,
'netapp_thin_provisioned': 'false',
'netapp_compression': 'false',
'netapp_mirrored': 'false',
'netapp_dedup': 'true',
'netapp_aggregate': 'aggr1',
'netapp_raid_type': 'raid_dp',
'netapp_disk_type': 'SSD',
'replication_enabled': False,
}]
if replication_backends:
expected[0].update({
'replication_enabled': True,
'replication_count': len(replication_backends),
'replication_targets': replication_backends,
'replication_type': 'async',
})
self.assertEqual(expected, result)
mock_get_ssc.assert_called_once_with()
mock_get_aggrs.assert_called_once_with()
mock_get_aggr_capacities.assert_called_once_with(['aggr1'])
@ddt.data({}, None)
def test_get_pool_stats_no_ssc_vols(self, ssc):
mock_get_ssc = self.mock_object(self.library.ssc_library,
'get_ssc',
mock.Mock(return_value=ssc))
pools = self.library._get_pool_stats()
self.assertListEqual([], pools)
mock_get_ssc.assert_called_once_with()
@ddt.data('open+|demix+', 'open.+', '.+\d', '^((?!mix+).)*$',
'open123, open321')
def test_get_pool_map_match_selected_pools(self, patterns):
self.library.configuration.netapp_pool_name_search_pattern = patterns
mock_list_flexvols = self.mock_object(
self.zapi_client, 'list_flexvols',
mock.Mock(return_value=fake.FAKE_CMODE_VOLUMES))
result = self.library._get_flexvol_to_pool_map()
expected = {
'open123': {
'pool_name': 'open123',
},
'open321': {
'pool_name': 'open321',
},
}
self.assertEqual(expected, result)
mock_list_flexvols.assert_called_once_with()
@ddt.data('', 'mix.+|open.+', '.+', 'open123, mixed, open321',
'.*?')
def test_get_pool_map_match_all_pools(self, patterns):
self.library.configuration.netapp_pool_name_search_pattern = patterns
mock_list_flexvols = self.mock_object(
self.zapi_client, 'list_flexvols',
mock.Mock(return_value=fake.FAKE_CMODE_VOLUMES))
result = self.library._get_flexvol_to_pool_map()
self.assertEqual(fake.FAKE_CMODE_POOL_MAP, result)
mock_list_flexvols.assert_called_once_with()
def test_get_pool_map_invalid_conf(self):
"""Verify an exception is raised if the regex pattern is invalid"""
self.library.configuration.netapp_pool_name_search_pattern = '(.+'
self.assertRaises(exception.InvalidConfigurationValue,
self.library._get_flexvol_to_pool_map)
@ddt.data('abc|stackopen|openstack|abc*', 'abc', 'stackopen', 'openstack',
'abc*', '^$')
def test_get_pool_map_non_matching_patterns(self, patterns):
self.library.configuration.netapp_pool_name_search_pattern = patterns
mock_list_flexvols = self.mock_object(
self.zapi_client, 'list_flexvols',
mock.Mock(return_value=fake.FAKE_CMODE_VOLUMES))
result = self.library._get_flexvol_to_pool_map()
self.assertEqual({}, result)
mock_list_flexvols.assert_called_once_with()
def test_update_ssc(self):
mock_get_pool_map = self.mock_object(
self.library, '_get_flexvol_to_pool_map',
mock.Mock(return_value=fake.FAKE_CMODE_VOLUMES))
result = self.library._update_ssc()
self.assertIsNone(result)
mock_get_pool_map.assert_called_once_with()
self.library.ssc_library.update_ssc.assert_called_once_with(
fake.FAKE_CMODE_VOLUMES)
def test_delete_volume(self):
self.mock_object(na_utils, 'get_valid_qos_policy_group_info',
mock.Mock(
return_value=fake.QOS_POLICY_GROUP_INFO))
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.delete_volume(fake.VOLUME)
(block_base.NetAppBlockStorageLibrary.delete_volume.
assert_called_once_with(fake.VOLUME))
na_utils.get_valid_qos_policy_group_info.assert_called_once_with(
fake.VOLUME)
(self.library._mark_qos_policy_group_for_deletion.
assert_called_once_with(fake.QOS_POLICY_GROUP_INFO))
def test_delete_volume_get_valid_qos_policy_group_info_exception(self):
self.mock_object(na_utils, 'get_valid_qos_policy_group_info',
mock.Mock(side_effect=exception.Invalid))
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.delete_volume(fake.VOLUME)
(block_base.NetAppBlockStorageLibrary.delete_volume.
assert_called_once_with(fake.VOLUME))
(self.library._mark_qos_policy_group_for_deletion.
assert_called_once_with(None))
def test_setup_qos_for_volume(self):
self.mock_object(na_utils, 'get_valid_qos_policy_group_info',
mock.Mock(
return_value=fake.QOS_POLICY_GROUP_INFO))
self.mock_object(self.zapi_client, 'provision_qos_policy_group')
result = self.library._setup_qos_for_volume(fake.VOLUME,
fake.EXTRA_SPECS)
self.assertEqual(fake.QOS_POLICY_GROUP_INFO, result)
self.zapi_client.provision_qos_policy_group.\
assert_called_once_with(fake.QOS_POLICY_GROUP_INFO)
def test_setup_qos_for_volume_exception_path(self):
self.mock_object(na_utils, 'get_valid_qos_policy_group_info',
mock.Mock(
side_effect=exception.Invalid))
self.mock_object(self.zapi_client, 'provision_qos_policy_group')
self.assertRaises(exception.VolumeBackendAPIException,
self.library._setup_qos_for_volume, fake.VOLUME,
fake.EXTRA_SPECS)
self.assertEqual(0,
self.zapi_client.
provision_qos_policy_group.call_count)
def test_mark_qos_policy_group_for_deletion(self):
self.mock_object(self.zapi_client,
'mark_qos_policy_group_for_deletion')
self.library._mark_qos_policy_group_for_deletion(
fake.QOS_POLICY_GROUP_INFO)
self.zapi_client.mark_qos_policy_group_for_deletion\
.assert_called_once_with(fake.QOS_POLICY_GROUP_INFO)
def test_unmanage(self):
self.mock_object(na_utils, 'get_valid_qos_policy_group_info',
mock.Mock(return_value=fake.QOS_POLICY_GROUP_INFO))
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.mock_object(block_base.NetAppBlockStorageLibrary, 'unmanage')
self.library.unmanage(fake.VOLUME)
na_utils.get_valid_qos_policy_group_info.assert_called_once_with(
fake.VOLUME)
self.library._mark_qos_policy_group_for_deletion\
.assert_called_once_with(fake.QOS_POLICY_GROUP_INFO)
block_base.NetAppBlockStorageLibrary.unmanage.assert_called_once_with(
fake.VOLUME)
def test_unmanage_w_invalid_qos_policy(self):
self.mock_object(na_utils, 'get_valid_qos_policy_group_info',
mock.Mock(side_effect=exception.Invalid))
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.mock_object(block_base.NetAppBlockStorageLibrary, 'unmanage')
self.library.unmanage(fake.VOLUME)
na_utils.get_valid_qos_policy_group_info.assert_called_once_with(
fake.VOLUME)
self.library._mark_qos_policy_group_for_deletion\
.assert_called_once_with(None)
block_base.NetAppBlockStorageLibrary.unmanage.assert_called_once_with(
fake.VOLUME)
def test_manage_existing_lun_same_name(self):
mock_lun = block_base.NetAppLun('handle', 'name', '1',
{'Path': '/vol/FAKE_CMODE_VOL1/name'})
self.library._get_existing_vol_with_manage_ref = mock.Mock(
return_value=mock_lun)
self.mock_object(na_utils, 'get_volume_extra_specs')
self.mock_object(na_utils, 'log_extra_spec_warnings')
self.library._check_volume_type_for_lun = mock.Mock()
self.library._setup_qos_for_volume = mock.Mock()
self.mock_object(na_utils, 'get_qos_policy_group_name_from_info',
mock.Mock(return_value=fake.QOS_POLICY_GROUP_NAME))
self.library._add_lun_to_table = mock.Mock()
self.zapi_client.move_lun = mock.Mock()
mock_set_lun_qos_policy_group = self.mock_object(
self.zapi_client, 'set_lun_qos_policy_group')
self.library.manage_existing({'name': 'name'}, {'ref': 'ref'})
self.library._get_existing_vol_with_manage_ref.assert_called_once_with(
{'ref': 'ref'})
self.assertEqual(1, self.library._check_volume_type_for_lun.call_count)
self.assertEqual(1, self.library._add_lun_to_table.call_count)
self.assertEqual(0, self.zapi_client.move_lun.call_count)
self.assertEqual(1, mock_set_lun_qos_policy_group.call_count)
def test_manage_existing_lun_new_path(self):
mock_lun = block_base.NetAppLun(
'handle', 'name', '1', {'Path': '/vol/FAKE_CMODE_VOL1/name'})
self.library._get_existing_vol_with_manage_ref = mock.Mock(
return_value=mock_lun)
self.mock_object(na_utils, 'get_volume_extra_specs')
self.mock_object(na_utils, 'log_extra_spec_warnings')
self.library._check_volume_type_for_lun = mock.Mock()
self.library._add_lun_to_table = mock.Mock()
self.zapi_client.move_lun = mock.Mock()
self.library.manage_existing({'name': 'volume'}, {'ref': 'ref'})
self.assertEqual(
2, self.library._get_existing_vol_with_manage_ref.call_count)
self.assertEqual(1, self.library._check_volume_type_for_lun.call_count)
self.assertEqual(1, self.library._add_lun_to_table.call_count)
self.zapi_client.move_lun.assert_called_once_with(
'/vol/FAKE_CMODE_VOL1/name', '/vol/FAKE_CMODE_VOL1/volume')
@ddt.data({'secondary_id': 'dev0', 'configured_targets': ['dev1']},
{'secondary_id': 'dev3', 'configured_targets': ['dev1', 'dev2']},
{'secondary_id': 'dev1', 'configured_targets': []},
{'secondary_id': None, 'configured_targets': []})
@ddt.unpack
def test_failover_host_invalid_replication_target(self, secondary_id,
configured_targets):
"""This tests executes a method in the DataMotionMixin."""
self.library.backend_name = 'dev0'
self.mock_object(data_motion.DataMotionMixin,
'get_replication_backend_names',
mock.Mock(return_value=configured_targets))
complete_failover_call = self.mock_object(
data_motion.DataMotionMixin, '_complete_failover')
self.assertRaises(exception.InvalidReplicationTarget,
self.library.failover_host, 'fake_context', [],
secondary_id=secondary_id)
self.assertFalse(complete_failover_call.called)
def test_failover_host_unable_to_failover(self):
"""This tests executes a method in the DataMotionMixin."""
self.library.backend_name = 'dev0'
self.mock_object(
data_motion.DataMotionMixin, '_complete_failover',
mock.Mock(side_effect=exception.NetAppDriverException))
self.mock_object(data_motion.DataMotionMixin,
'get_replication_backend_names',
mock.Mock(return_value=['dev1', 'dev2']))
self.mock_object(self.library.ssc_library, 'get_ssc_flexvol_names',
mock.Mock(return_value=fake_utils.SSC.keys()))
self.mock_object(self.library, '_update_zapi_client')
self.assertRaises(exception.UnableToFailOver,
self.library.failover_host, 'fake_context', [],
secondary_id='dev1')
data_motion.DataMotionMixin._complete_failover.assert_called_once_with(
'dev0', ['dev1', 'dev2'], fake_utils.SSC.keys(), [],
failover_target='dev1')
self.assertFalse(self.library._update_zapi_client.called)
def test_failover_host(self):
"""This tests executes a method in the DataMotionMixin."""
self.library.backend_name = 'dev0'
self.mock_object(data_motion.DataMotionMixin, '_complete_failover',
mock.Mock(return_value=('dev1', [])))
self.mock_object(data_motion.DataMotionMixin,
'get_replication_backend_names',
mock.Mock(return_value=['dev1', 'dev2']))
self.mock_object(self.library.ssc_library, 'get_ssc_flexvol_names',
mock.Mock(return_value=fake_utils.SSC.keys()))
self.mock_object(self.library, '_update_zapi_client')
actual_active, vol_updates = self.library.failover_host(
'fake_context', [], secondary_id='dev1')
data_motion.DataMotionMixin._complete_failover.assert_called_once_with(
'dev0', ['dev1', 'dev2'], fake_utils.SSC.keys(), [],
failover_target='dev1')
self.library._update_zapi_client.assert_called_once_with('dev1')
self.assertTrue(self.library.failed_over)
self.assertEqual('dev1', self.library.failed_over_backend_name)
self.assertEqual('dev1', actual_active)
self.assertEqual([], vol_updates)
def test_add_looping_tasks(self):
mock_update_ssc = self.mock_object(self.library, '_update_ssc')
mock_remove_unused_qos_policy_groups = self.mock_object(
self.zapi_client, 'remove_unused_qos_policy_groups')
mock_add_task = self.mock_object(self.library.loopingcalls, 'add_task')
mock_super_add_looping_tasks = self.mock_object(
block_base.NetAppBlockStorageLibrary, '_add_looping_tasks')
self.library._add_looping_tasks()
mock_update_ssc.assert_called_once_with()
mock_add_task.assert_has_calls([
mock.call(mock_update_ssc,
loopingcalls.ONE_HOUR,
loopingcalls.ONE_HOUR),
mock.call(mock_remove_unused_qos_policy_groups,
loopingcalls.ONE_MINUTE,
loopingcalls.ONE_MINUTE)])
mock_super_add_looping_tasks.assert_called_once_with()
def test_get_backing_flexvol_names(self):
mock_ssc_library = self.mock_object(
self.library.ssc_library, 'get_ssc')
self.library._get_backing_flexvol_names()
mock_ssc_library.assert_called_once_with()
|
{
"content_hash": "4790599aee8379f70bbaab109ec0a7a5",
"timestamp": "",
"source": "github",
"line_count": 713,
"max_line_length": 79,
"avg_line_length": 43.86115007012623,
"alnum_prop": 0.6015412656284974,
"repo_name": "cloudbase/cinder",
"id": "74dcbc65b658c9360f482e51006fbd69e60ba8e6",
"size": "32071",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/volume/drivers/netapp/dataontap/test_block_cmode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17586629"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
}
|
"""
This example demonstrates jitclasses and deferred types for writing a
singly-linked-list.
"""
from __future__ import print_function, absolute_import
from collections import OrderedDict
import numpy as np
from numba import njit
from numba import jitclass
from numba import int32, deferred_type, optional
from numba.runtime import rtsys
node_type = deferred_type()
spec = OrderedDict()
spec['data'] = int32
spec['next'] = optional(node_type)
@jitclass(spec)
class LinkedNode(object):
def __init__(self, data, next):
self.data = data
self.next = next
def prepend(self, data):
return LinkedNode(data, self)
@njit
def make_linked_node(data):
return LinkedNode(data, None)
node_type.define(LinkedNode.class_type.instance_type)
@njit
def fill_array(arr):
"""
Fills the array with n, n - 1, n - 2 and so on
First we populate a linked list with values 1 ... n
Then, we traverse the the linked list in reverse and put the value
into the array from the index.
"""
head = make_linked_node(0)
for i in range(1, arr.size):
head = head.prepend(i)
c = 0
while head is not None:
arr[c] = head.data
head = head.next
c += 1
def runme():
arr = np.zeros(10, dtype=np.int32)
fill_array(arr)
print("== Result ==")
print(arr)
# Check answer
np.testing.assert_equal(arr, np.arange(arr.size, dtype=arr.dtype)[::-1])
if __name__ == '__main__':
runme()
print("== Print memory allocation information == ")
print(rtsys.get_allocation_stats())
|
{
"content_hash": "68aac25437ba7b91ab8bc00658f9806b",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 76,
"avg_line_length": 22.855072463768117,
"alnum_prop": 0.6518706404565631,
"repo_name": "jriehl/numba",
"id": "68ec017a5aa05cd5a2db11b22873f884052dae80",
"size": "1624",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/linkedlist.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7023"
},
{
"name": "C",
"bytes": "657637"
},
{
"name": "C++",
"bytes": "49158"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Jupyter Notebook",
"bytes": "110326"
},
{
"name": "Python",
"bytes": "6611899"
},
{
"name": "Shell",
"bytes": "7290"
}
],
"symlink_target": ""
}
|
import json
from colors import black, blue, cyan, green, magenta, red, white
from packaging.version import Version
from pants.option.options_fingerprinter import CoercingOptionEncoder
from pants.option.ranked_value import Rank
from pants.option.scope import GLOBAL_SCOPE
from pants.task.console_task import ConsoleTask
from pants.version import PANTS_SEMVER
class ExplainOptionsTask(ConsoleTask):
"""Display meta-information about options.
This "meta-information" includes what values options have, and what values they *used* to have
before they were overridden by a higher-rank value (eg, a HARDCODED value overridden by a CONFIG
value and then a cli FLAG value).
"""
_register_console_transitivity_option = False
@classmethod
def register_options(cls, register):
super().register_options(register)
register("--scope", help="Only show options in this scope. Use GLOBAL for global scope.")
register("--name", help="Only show options with this name.")
register(
"--rank", type=Rank, help="Only show options with at least this importance.",
)
register(
"--show-history",
type=bool,
help="Show the previous values options had before being overridden.",
)
register("--only-overridden", type=bool, help="Only show values that overrode defaults.")
register(
"--skip-inherited",
type=bool,
default=True,
help="Do not show inherited options, unless their values differ from their parents.",
)
register(
"--output-format",
choices=["text", "json"],
default="text",
help="Specify the format options will be printed.",
)
def _scope_filter(self, scope):
pattern = self.get_options().scope
return (
not pattern
or scope.startswith(pattern)
or (pattern == "GLOBAL" and scope == GLOBAL_SCOPE)
)
def _option_filter(self, option):
pattern = self.get_options().name
if not pattern:
return True
pattern = pattern.replace("-", "_")
return option == pattern
def _rank_filter(self, rank):
minimum_rank = self.get_options().rank
if not minimum_rank:
return True
return rank >= minimum_rank
def _rank_color(self, rank):
if not self.get_options().colors:
return lambda x: x
if rank == Rank.NONE:
return white
if rank == Rank.HARDCODED:
return white
if rank == Rank.ENVIRONMENT:
return red
if rank == Rank.CONFIG:
return blue
if rank == Rank.FLAG:
return magenta
return black
def _format_scope(self, scope, option, no_color=False):
if no_color:
return "{scope}{option}".format(
scope="{}.".format(scope) if scope else "", option=option,
)
scope_color = cyan if self.get_options().colors else lambda x: x
option_color = blue if self.get_options().colors else lambda x: x
return "{scope}{option}".format(
scope=scope_color("{}.".format(scope) if scope else ""), option=option_color(option),
)
def _format_record(self, record):
simple_rank = record.rank.name
if self.is_json():
return record.value, simple_rank
elif self.is_text():
simple_value = str(record.value)
value_color = green if self.get_options().colors else lambda x: x
formatted_value = value_color(simple_value)
rank_color = self._rank_color(record.rank)
formatted_rank = "(from {rank}{details})".format(
rank=simple_rank,
details=rank_color(" {}".format(record.details)) if record.details else "",
)
return "{value} {rank}".format(value=formatted_value, rank=formatted_rank,)
def _show_history(self, history):
for record in reversed(list(history)[:-1]):
if record.rank > Rank.NONE:
yield " overrode {}".format(self._format_record(record))
def _force_option_parsing(self):
scopes = filter(self._scope_filter, list(self.context.options.known_scope_to_info.keys()))
for scope in scopes:
self.context.options.for_scope(scope)
def _get_parent_scope_option(self, scope, name):
if not scope:
return None, None
parent_scope = ""
if "." in scope:
parent_scope, _ = scope.rsplit(".", 1)
options = self.context.options.for_scope(parent_scope)
try:
return parent_scope, options[name]
except AttributeError:
return None, None
def is_json(self):
return self.get_options().output_format == "json"
def is_text(self):
return self.get_options().output_format == "text"
def console_output(self, targets):
self._force_option_parsing()
if self.is_json():
output_map = {}
for scope, options in sorted(self.context.options.tracker.option_history_by_scope.items()):
if not self._scope_filter(scope):
continue
for option, history in sorted(options.items()):
if not self._option_filter(option):
continue
if not self._rank_filter(history.latest.rank):
continue
if self.get_options().only_overridden and not history.was_overridden:
continue
# Skip the option if it has already passed the deprecation period.
if history.latest.deprecation_version and PANTS_SEMVER >= Version(
history.latest.deprecation_version
):
continue
if self.get_options().skip_inherited:
parent_scope, parent_value = self._get_parent_scope_option(scope, option)
if parent_scope is not None and parent_value == history.latest.value:
continue
if self.is_json():
value, rank_name = self._format_record(history.latest)
scope_key = self._format_scope(scope, option, True)
# We rely on the fact that option values are restricted to a set of types compatible with
# json. In particular, we expect dict, list, str, bool, int and float, and so do no
# processing here.
# TODO(John Sirois): The option parsing system currently lets options of unexpected types
# slide by, which can lead to un-overridable values and which would also blow up below in
# json encoding, fix options to restrict the allowed `type`s:
# https://github.com/pantsbuild/pants/issues/4695
inner_map = dict(value=value, source=rank_name)
output_map[scope_key] = inner_map
elif self.is_text():
yield "{} = {}".format(
self._format_scope(scope, option), self._format_record(history.latest)
)
if self.get_options().show_history:
history_list = []
for line in self._show_history(history):
if self.is_text():
yield line
elif self.is_json():
history_list.append(line.strip())
if self.is_json():
inner_map["history"] = history_list
if self.is_json():
yield json.dumps(output_map, indent=2, sort_keys=True, cls=CoercingOptionEncoder)
|
{
"content_hash": "6ca9900e8e31899ee1f365e42f0074be",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 109,
"avg_line_length": 41.74603174603175,
"alnum_prop": 0.5670468948035488,
"repo_name": "tdyas/pants",
"id": "382b6c39b0edf04825f7f20ba16ca74a92b4a709",
"size": "8022",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/core_tasks/explain_options_task.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5596"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "44381"
},
{
"name": "Java",
"bytes": "518180"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "7955590"
},
{
"name": "Rust",
"bytes": "1031208"
},
{
"name": "Scala",
"bytes": "106520"
},
{
"name": "Shell",
"bytes": "109904"
},
{
"name": "Starlark",
"bytes": "502255"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
"""@file evaluate.py
Module containing functions for the evaluation of the constant and variable shear-type branches
of the GREAT3 Challenge.
For the functions used to directly evaluate the GREAT3 metrics, see q_constant() and q_variable().
This code requires the GREAT3 metric evaluation truth data to have been unpacked into the directory
specified by the path in TRUTH_DIR, set below. Please update TRUTH_DIR to match the location of the
truth data on your system.
For information about getting this data, please see the following page on the great3-public wiki:
https://github.com/barnabytprowe/great3-public/wiki/Metric-evaluation-scripts-and-truth-data
Constant shear branches
-----------------------
Each submission file (one per branch) should take the format of a 3-column ASCII catalog, e.g.:
# SUBFIELD_INDEX G1 G2
0 -.26664 0.11230
1 -.13004 0.48103
...
or similar. The hashed header/comment can be omitted, and almost all formats for the numbers are
fine. The main criterion to be satisfied is that after
>>> data = np.loadtxt(submission_file)
the `data` object must be a NumPy array with shape `(NSUBFIELDS, 3)`, where `NSUBFIELDS` is the
total number of subfields in the branch (currently fixed at 200).
In addition, the array slice `data[:, 0]` must be the subfield number in ascending order from `0` to
`NSUBFIELDS - 1`. The array slices `data[:, 1]` and `data[:, 2]` must be the corresponding
estimates of mean shear g1 and g2 in each subfield.
In these details the submission should match the output of the helper script `presubmission.py`
available at https://github.com/barnabytprowe/great3-public .
Variable shear branches
-----------------------
Each submission file (one per branch) should take the format of an ASCII catalog with a minimum of
3 columns as in the following example:
# FIELD_INDEX THETA [degrees] MAP_E
0 0.0246 2.5650e-06
0 0.0372 4.1300e-06
...
The `FIELD_INDEX` will be an integer between 0 and 9. `THETA` should be a sequence of floats giving
the annular bin centres in degrees; these are logarithmically spaced between the minimum separation
considered (0.01 degrees) and the maximum (10.0 degrees). `MAP_E` is the E-mode aperture mass
dispersion. `FIELD`, `THETA` (and the thus corresponding `MAP_E` entries) must be ordered as in the
output of `presubmission.py`.
The hashed header/comment can be omitted. Additional columns can be present provided that the
location and order of the three described above Additional columns can be present provided that the
location and order of the three described above. An example of this is the output of
`presubmission.py` for variable shear branches, which also append columns for the B-mode aperture
mass dispersion and a (shot noise only) error estimate.
After
>>> data = np.loadtxt(submission_file)
the `data` object must be a NumPy array with shape `(NFIELDS * NBINS_THETA, n)`, where `NFIELDS` is
the total number of fields in the branch (currently fixed at 10), `NBINS_THETA` is the number of
annular bins in angle used to estimate Map_E in each field (currently fixed at 15), and `n >= 3`.
As mentioned, in these details the submission should match the output of the helper script
`presubmission.py` available at https://github.com/barnabytprowe/great3-public .
"""
import os
import sys
import logging
import numpy as np
try:
import great3sims
import great3sims.mapper
except ImportError:
path, module = os.path.split(__file__)
sys.path.append(os.path.join(path, "..")) # Appends the folder great3-public/ to sys.path
import great3sims
import great3sims.mapper
try:
import g3metrics
except ImportError:
path, module = os.path.split(__file__)
sys.path.append(os.path.join(path, "..", "metrics")) # Appends the great3-private/metrics
# folder to path
import g3metrics
TRUTH_DIR = "/great3/beta/truth" # Root folder in which the truth values are upacked
NFIELDS = 10 # Total number of fields
NSUBFIELDS = 200 # Total number of subfields, not necessarily equal to the number of subfields made
# in mass_produce as that script also generates the deep fields
NSUBFIELDS_PER_FIELD = NSUBFIELDS / NFIELDS
NGALS_PER_SUBFIELD = 10000 # 100x100 galaxies per subfield
CFID = 2.e-4
MFID = 2.e-3
XMAX_GRID_DEG = 10.0 # Maximum image spatial extent in degrees
DX_GRID_DEG = 0.1 # Grid spacing in degrees
THETA_MIN_DEG = 0.02 # Minimum and maximum angular scales for logarithmic bins used to calculate the
THETA_MAX_DEG = 10.0 # aperture mass disp. - MUST match specs given to participants - in degrees
NBINS_THETA = 15 # Number of logarithmic bins theta for the aperture mass dispersion
EXPECTED_THETA = np.array([ # Array of theta values expected in submissions, good to 3 d.p.
0.0246, 0.0372, 0.0563, 0.0853, 0.1290, 0.1953, 0.2955, 0.4472, 0.6768, 1.0242, 1.5499,
2.3455, 3.5495, 5.3716, 8.1289] * NFIELDS)
USEBINS = np.array([ # Which of the theta bins above to actually use in calculating the metric?
False, False, False, True, True, True, True, True, True, True, True,
True, True, False, False] * NFIELDS) # Note the *NFIELDS to match per-field theta layout
STORAGE_DIR = "./metric_calculation_products" # Folder into which to store useful intermediate
# outputs of metric calculations (e.g. rotation files,
# dicts, mapE tables) which need be calculated only
# once
SUBFIELD_DICT_FILE_PREFIX = "subfield_dict_"
GTRUTH_FILE_PREFIX = "gtruth_"
ROTATIONS_FILE_PREFIX = "rotations_"
OFFSETS_FILE_PREFIX = "offsets_"
MAPESHEAR_FILE_PREFIX = "mapEshear_"
MAPEINT_FILE_PREFIX = "mapEint_"
MAPEOBS_FILE_PREFIX = "mapEobs_"
# These constant normalization factors come from a run of ~1000 sims done on 6 Jan 2014, modified on
# 30 Jan 2014 to bring space and ground into agreement at high bias
NORMALIZATION_CONSTANT_SPACE = 1.232
NORMALIZATION_CONSTANT_GROUND = NORMALIZATION_CONSTANT_SPACE
NORMALIZATION_VARIABLE_SPACE = 0.0001837 # Factor comes from tests with
# tabulate_variable_shear_metric_rev1.py on 1000 runs and
# NOISE_SIGMA = 0.10, 6 Jan 2015, with sigma2_min = 4.e-8
NORMALIZATION_VARIABLE_GROUND = NORMALIZATION_VARIABLE_SPACE # Bring space=ground at high bias
# Values of sigma2_min to adopt as the defaults for the Q_c and Q_v metrics, as of 30 Dec 2013.
# These parameters add a damping
SIGMA2_MIN_CONSTANT_GROUND = 4. # 2**2
SIGMA2_MIN_CONSTANT_SPACE = 1. # 1**2
SIGMA2_MIN_VARIABLE_GROUND = 9.e-8 # [2 * 1.e-3]**2
SIGMA2_MIN_VARIABLE_SPACE = 4.e-8 # [3 * 1.e-3]**2
def get_generate_const_truth(experiment, obs_type, truth_dir=TRUTH_DIR, storage_dir=STORAGE_DIR,
logger=None):
"""Get or generate arrays of subfield_index, g1true, g2true, each of length `NSUBFIELDS`.
If the gtruth file has already been built for this constant shear branch, loads and returns the
saved copies.
If the array of truth values has not been built, or is older than the first entry in the set of
shear_params files, the arrays are built first, saved to file, then returned.
@param experiment Experiment for this branch, one of 'control', 'real_galaxy',
'variable_psf', 'multiepoch', 'full'
@param obs_type Observation type for this branch, one of 'ground' or 'space'
@param storage_dir Directory from/into which to load/store rotation files
@param truth_dir Root directory in which the truth information for the challenge is stored
@param logger Python logging.Logger instance, for message logging
@return subfield_index, g1true, g2true
"""
gtruefile = os.path.join(storage_dir, GTRUTH_FILE_PREFIX+experiment[0]+obs_type[0]+"c.asc")
mapper = great3sims.mapper.Mapper(truth_dir, experiment, obs_type, "constant")
use_stored = True
if not os.path.isfile(gtruefile):
use_stored = False
if logger is not None:
logger.info(
"First build of shear truth tables using values from "+
os.path.join(mapper.full_dir, "shear_params-*.yaml"))
else:
# Compare timestamps for the gtruefile and the first shear_params file
# (subfield = 000) for this branch. If the former is older than the latter, or this file,
# force rebuild...
gtruemtime = os.path.getmtime(gtruefile)
shear_params_file = os.path.join(mapper.full_dir, "shear_params-000.yaml")
shear_params_mtime = os.path.getmtime(shear_params_file)
if gtruemtime < shear_params_mtime or gtruemtime < os.path.getmtime(__file__):
use_stored = False
if logger is not None:
logger.info(
"Updating out-of-date shear truth tables using newer values from "+
os.path.join(mapper.full_dir, "shear_params-*.yaml"))
# Then load or build (and save) the array of truth values per subfield
if use_stored:
if logger is not None:
logger.info("Loading shear truth tables from "+gtruefile)
gtruedata = np.loadtxt(gtruefile)
else:
params_prefix = os.path.join(mapper.full_dir, "shear_params-")
import yaml
# Check to see if this is a variable_psf or full branch, in which case we only need the
# first entry from each set of subfields
if experiment in ("variable_psf", "full"):
gtruedata = np.empty((NFIELDS, 3))
gtruedata[:, 0] = np.arange(NFIELDS)
subfield_index_targets = range(0, NSUBFIELDS, NSUBFIELDS_PER_FIELD)
else:
gtruedata = np.empty((NSUBFIELDS, 3))
gtruedata[:, 0] = np.arange(NSUBFIELDS)
subfield_index_targets = range(NSUBFIELDS)
# Then loop over the required subfields reading in the shears
for i, subfield_index in enumerate(subfield_index_targets):
params_file = params_prefix+("%03d" % subfield_index)+".yaml"
with open(params_file, "rb") as funit:
gdict = yaml.load(funit)
gtruedata[i, 1] = gdict["g1"]
gtruedata[i, 2] = gdict["g2"]
if logger is not None:
logger.info("Saving shear truth table to "+gtruefile)
if not os.path.isdir(storage_dir):
os.mkdir(storage_dir)
with open(gtruefile, "wb") as fout:
fout.write("# True shears for "+experiment+"-"+obs_type+"-constant\n")
fout.write("# subfield_index g1true g2true\n")
np.savetxt(fout, gtruedata, fmt=" %4d %+.18e %+.18e")
return (gtruedata[:, 0]).astype(int), gtruedata[:, 1], gtruedata[:, 2]
def get_generate_const_rotations(experiment, obs_type, storage_dir=STORAGE_DIR, truth_dir=TRUTH_DIR,
logger=None):
"""Get or generate an array of rotation angles for Q_const calculation.
If the rotation file has already been built for this constant shear branch, loads and returns an
array of rotation angles to align with the PSF. This array is of shape `(NSUBFIELDS,)`, having
averaged over the `n_epochs` epochs in the case of multi-epoch branches.
If the rotation file has not been built, or is older than the first entry in the set of
starshape_parameters files, the array of rotations is built, saved to file, then returned.
@param experiment Experiment for this branch, one of 'control', 'real_galaxy',
'variable_psf', 'multiepoch', 'full'
@param obs_type Observation type for this branch, one of 'ground' or 'space'
@param storage_dir Directory from/into which to load/store rotation files
@param truth_dir Root directory in which the truth information for the challenge is stored
@param logger Python logging.Logger instance, for message logging
@return rotations An array containing all the rotation angles, in radians
"""
import great3sims
rotfile = os.path.join(storage_dir, ROTATIONS_FILE_PREFIX+experiment[0]+obs_type[0]+"c.asc")
mapper = great3sims.mapper.Mapper(truth_dir, experiment, obs_type, "constant")
use_stored = True
if not os.path.isfile(rotfile):
use_stored = False
if logger is not None:
logger.info(
"First build of rotations file using starshape_parameters from "+
mapper.full_dir)
else:
# Then compare timestamps for the rotation file and the first starshape_parameters file
# (subfield = 000, epoch =0) for this branch. If the former is older than the latter, or
# this file, force rebuild...
rotmtime = os.path.getmtime(rotfile)
starshape_file_template, _ ,_ = mapper.mappings['starshape_parameters']
starshape_file = os.path.join(
mapper.full_dir, starshape_file_template % {"epoch_index": 0, "subfield_index": 0})
starshapemtime = os.path.getmtime(starshape_file+".yaml")
if rotmtime < starshapemtime or rotmtime < os.path.getmtime(__file__):
use_stored = False
if logger is not None:
logger.info(
"Updating out-of-date rotations file using newer starshape_parameters from "+
mapper.full_dir)
# Then load / build as required
if use_stored is True:
if logger is not None:
logger.info("Loading rotations from "+rotfile)
rotations = np.loadtxt(rotfile)[:, 1] # First column is just subfield indices
else:
# To build we must loop over all the subfields and epochs
# First work out if the experiment is multi-exposure and has multiple epochs
if experiment in ("multiepoch", "full"):
import great3sims.constants
n_epochs = great3sims.constants.n_epochs
else:
n_epochs = 1
# Setup the array for storing the PSF values from which rotations are calculated
psf_g1 = np.empty((NSUBFIELDS, n_epochs))
psf_g2 = np.empty((NSUBFIELDS, n_epochs))
mean_psf_g1 = np.empty(NSUBFIELDS)
mean_psf_g2 = np.empty(NSUBFIELDS)
for subfield_index in range(NSUBFIELDS):
n_ignore = 0 # Counter for how many epochs had flagged, bad PSF g1/g2 values
for epoch_index in range(n_epochs):
starshape_parameters = mapper.read(
"starshape_parameters",
data_id={"epoch_index": epoch_index, "subfield_index": subfield_index})
star_g1 = starshape_parameters["psf_g1"]
star_g2 = starshape_parameters["psf_g2"]
# Test for flagged failures (these do happen rarely and are given the value
# psf_g1=psf_g2=-10.0, see writeStarParameters in great3sims/builders.py)
# If the psf ellipticities are failed, we just ignore these for the (m, c) calcs
if star_g1 > -9.9 and star_g2 > -9.9:
psf_g1[subfield_index, epoch_index] = star_g1
psf_g2[subfield_index, epoch_index] = star_g2
else:
n_ignore += 1
psf_g1[subfield_index, epoch_index] = 0.
psf_g2[subfield_index, epoch_index] = 0.
# Calculate the mean across the epochs in this subfield taking any flagged values into
# account
n_eff = n_epochs - n_ignore
if n_eff > 0:
mean_psf_g1[subfield_index] = (psf_g1[subfield_index, :]).sum() / float(n_eff)
mean_psf_g2[subfield_index] = (psf_g2[subfield_index, :]).sum() / float(n_eff)
else:
mean_psf_g1[subfield_index] = 0. # This is safe in np.arctan2() -> 0.
mean_psf_g2[subfield_index] = 0.
if experiment in ("variable_psf", "full"):
# Average over all subfields per field
final_psf_g1 = np.empty(NFIELDS)
final_psf_g2 = np.empty(NFIELDS)
for i in range(NFIELDS):
final_psf_g1[i] = np.mean(
mean_psf_g1[i * NSUBFIELDS_PER_FIELD: (i + 1) * NSUBFIELDS_PER_FIELD])
final_psf_g2[i] = np.mean(
mean_psf_g2[i * NSUBFIELDS_PER_FIELD: (i + 1) * NSUBFIELDS_PER_FIELD])
else:
final_psf_g1 = mean_psf_g1
final_psf_g2 = mean_psf_g2
rotations = .5 * np.arctan2(final_psf_g2, final_psf_g1)
# We have built rotations, but then save this file as ascii for use next time
if logger is not None:
logger.info("Saving rotations to "+rotfile)
if not os.path.isdir(storage_dir):
os.mkdir(storage_dir)
with open(rotfile, "wb") as fout:
fout.write("# Rotations for "+experiment+"-"+obs_type+"-constant\n")
fout.write("# subfield_index rotation [radians]\n")
np.savetxt(fout, np.array((np.arange(len(rotations)), rotations)).T, fmt=" %4d %+.18f")
return rotations
def get_generate_variable_offsets(experiment, obs_type, storage_dir=STORAGE_DIR,
truth_dir=TRUTH_DIR, logger=None):
"""Get or generate arrays of subfield_index, offset_deg_x, offset_deg_y, each of length
`NSUBFIELDS`.
If the offsets file has already been built for this variable shear branch, loads and returns the
saved arrays.
If the arrays of offset values have not been built, or are older than the first entry in the set
of subfield_offset files, the arrays are built first, saved to file, then returned.
@param experiment Experiment for this branch, one of 'control', 'real_galaxy',
'variable_psf', 'multiepoch', 'full'
@param obs_type Observation type for this branch, one of 'ground' or 'space'
@param storage_dir Directory from/into which to load/store rotation files
@param truth_dir Root directory in which the truth information for the challenge is stored
@param logger Python logging.Logger instance, for message logging
@return subfield_index, offset_deg_x, offset_deg_y
"""
offsetfile = os.path.join(storage_dir, OFFSETS_FILE_PREFIX+experiment[0]+obs_type[0]+"v.asc")
mapper = great3sims.mapper.Mapper(truth_dir, experiment, obs_type, "variable")
use_stored = True
if not os.path.isfile(offsetfile):
use_stored = False
if logger is not None:
logger.info(
"First build of offsets file using subfield_offset files from "+
mapper.full_dir)
else:
# Then compare timestamps for the offsets file and the first file
# (subfield = 000) for this branch. If the former is older than the latter, or
# this file, force rebuild...
offsetmtime = os.path.getmtime(offsetfile)
subfield_offset_file = os.path.join(mapper.full_dir, "subfield_offset-000.yaml")
subfield_offset_mtime = os.path.getmtime(subfield_offset_file)
if offsetmtime < subfield_offset_mtime or offsetmtime < os.path.getmtime(__file__):
use_stored = False
if logger is not None:
logger.info(
"Updating out-of-date offset file using newer values from "+
os.path.join(mapper.full_dir, "subfield_offset-*.yaml"))
# Then load / build as required
if use_stored is True:
if logger is not None:
logger.info("Loading offsets from "+offsetfile)
offsets = np.loadtxt(offsetfile)
else:
offsets_prefix = os.path.join(mapper.full_dir, "subfield_offset-")
offsets = np.empty((NSUBFIELDS, 3))
import yaml
offsets[:, 0] = np.arange(NSUBFIELDS)
for i in range(NSUBFIELDS):
offsets_file = offsets_prefix+("%03d" % i)+".yaml"
with open(offsets_file, "rb") as funit:
offsetdict = yaml.load(funit)
offsets[i, 1] = offsetdict["offset_deg_x"]
offsets[i, 2] = offsetdict["offset_deg_y"]
if logger is not None:
logger.info("Saving offset file to "+offsetfile)
if not os.path.isdir(storage_dir):
os.mkdir(storage_dir)
with open(offsetfile, "wb") as fout:
fout.write("# Subfield offsets for "+experiment+"-"+obs_type+"-variable\n")
fout.write("# subfield_index offset_deg_x offset_deg_y\n")
np.savetxt(fout, offsets, fmt=" %4d %.18e %.18e")
return (offsets[:, 0]).astype(int), offsets[:, 1], offsets[:, 2]
def run_corr2(x, y, e1, e2, w, min_sep=THETA_MIN_DEG, max_sep=THETA_MAX_DEG, nbins=NBINS_THETA,
cat_file_suffix='_temp.fits', params_file_suffix='_corr2.params',
m2_file_suffix='_temp.m2', xy_units='degrees', sep_units='degrees',
corr2_executable='corr2'):
"""Copied from presubmission.py
"""
import pyfits
import subprocess
import tempfile
# Create temporary, unique files for I/O
catfile = tempfile.mktemp(suffix=cat_file_suffix)
paramsfile = tempfile.mktemp(suffix=params_file_suffix)
m2file = tempfile.mktemp(suffix=m2_file_suffix)
# Write the basic corr2.params to temp location
print_basic_corr2_params(paramsfile, min_sep=min_sep, max_sep=max_sep, nbins=nbins,
xy_units=xy_units, sep_units=sep_units, fits_columns=True)
# Use fits binary table for faster I/O. (Converting to/from strings is slow.)
# First, make the data into np arrays
x_array = np.asarray(x).flatten()
y_array = np.asarray(y).flatten()
g1_array = np.asarray(e1).flatten()
g2_array = np.asarray(e2).flatten()
w_array = np.asarray(w).flatten()
# Then, mask out the >= 10 values
use_mask = np.logical_and.reduce([g1_array<10.,g2_array<10.])
# And finally make the FITS file
x_col = pyfits.Column(name='x', format='1D', array=x_array[use_mask])
y_col = pyfits.Column(name='y', format='1D', array=y_array[use_mask])
g1_col = pyfits.Column(name='g1', format='1D', array=g1_array[use_mask])
g2_col = pyfits.Column(name='g2', format='1D', array=g2_array[use_mask])
w_col = pyfits.Column(name='w', format='1D', array=w_array[use_mask])
cols = pyfits.ColDefs([x_col, y_col, g1_col, g2_col, w_col])
table = pyfits.new_table(cols)
phdu = pyfits.PrimaryHDU()
hdus = pyfits.HDUList([phdu, table])
hdus.writeto(catfile, clobber=True)
subprocess.Popen([
corr2_executable, str(paramsfile), 'file_name='+str(catfile), 'm2_file_name='+str(m2file)
]).wait()
results = np.loadtxt(m2file)
os.remove(paramsfile)
os.remove(catfile)
os.remove(m2file)
return results
def print_basic_corr2_params(outfile, min_sep=THETA_MIN_DEG, max_sep=THETA_MAX_DEG,
nbins=NBINS_THETA, xy_units='degrees', sep_units='degrees',
fits_columns=False):
"""Write a bare-bones corr2.params file (used by corr2) to the file named outfile.
"""
with open(outfile, 'wb') as fout:
if fits_columns:
fout.write("# Column description\n")
fout.write("x_col = x\n")
fout.write("y_col = y\n")
fout.write("g1_col = g1\n")
fout.write("g2_col = g2\n")
fout.write("w_col = w\n")
fout.write("\n")
fout.write("# File info\n")
fout.write("file_type=FITS")
else:
fout.write("# Column description\n")
fout.write("x_col = 1\n")
fout.write("y_col = 2\n")
fout.write("g1_col = 3\n")
fout.write("g2_col = 4\n")
fout.write("w_col = 5\n")
fout.write("\n")
fout.write(
"# Assume sign conventions for gamma were correct in the catalog passed to "+
"presubmission.py\n")
fout.write("flip_g1 = false\n")
fout.write("flip_g2 = false\n")
fout.write("\n")
fout.write("# Describe the parameters of the requested correlation function\n")
fout.write('min_sep=%f\n'%min_sep)
fout.write('max_sep=%f\n'%max_sep)
fout.write('nbins=%f\n'%nbins)
fout.write('x_units='+str(xy_units)+'\n')
fout.write('y_units='+str(xy_units)+'\n')
fout.write('sep_units='+str(sep_units)+'\n')
fout.write('\n')
fout.write("# verbose specifies how much progress output the code should emit.\n")
fout.write("verbose = 0\n")
fout.write("\n")
def get_generate_variable_truth(experiment, obs_type, storage_dir=STORAGE_DIR, truth_dir=TRUTH_DIR,
logger=None, corr2_exec="corr2", make_plots=False,
file_prefixes=("galaxy_catalog",), suffixes=("",),
mape_file_prefix=MAPESHEAR_FILE_PREFIX, output_xy_prefix=None):
"""Get or generate an array of truth map_E vectors for all the fields in this branch.
If the map_E truth file has already been built for this variable shear branch, loads and returns
the saved copies.
If the array of truth values has not been built, or is older than the first entry in the set of
galaxy_catalog files, the arrays are built first, saved to file, then returned.
@param experiment Experiment for this branch, one of 'control', 'real_galaxy',
'variable_psf', 'multiepoch', 'full'
@param obs_type Observation type for this branch, one of 'ground' or 'space'
@param storage_dir Directory from/into which to load/store rotation files
@param truth_dir Root directory in which the truth info for the challenge is stored
@param logger Python logging.Logger instance, for message logging
@param corr2_exec Path to Mike Jarvis' corr2 exectuable
@param make_plots Generate plotting output
@param file_prefixes Tuple containing one or more prefixes for file type in which to load
up shears, summing shears when `len(file_prefixes) >= 2`
[default = `("galaxy_catalog",)`]
@param suffixes Load up shear from entries "g1"+suffixes[0] and "g2"+suffixes[0] in the
`file_prefixes[0]`-type files, then add "g1"+suffixes[1] from
`file_prefixes[1]`-type files, etc. Must be same length as
`file_prefixes` tuple [default = `("",)`]
@param mape_file_prefix Prefix for output filename
@param output_xy_prefix Filename prefix (and switch if not None) for x-y position debug output
@return field, theta, map_E, map_B, maperr
"""
# Sanity check on suffixes & prefixes
if len(suffixes) != len(file_prefixes):
raise ValueError("Input file_prefixes and suffixes kwargs must be same length.")
# Build basic x and y grids to use for coord positions: note we do this here rather than as
# needed later so as to check the dimensions (meshgrid is very quick anyway)
xgrid_deg, ygrid_deg = np.meshgrid(
np.arange(0., XMAX_GRID_DEG, DX_GRID_DEG), np.arange(0., XMAX_GRID_DEG, DX_GRID_DEG))
xgrid_deg = xgrid_deg.flatten() # Flatten these - the default C ordering corresponds to the way
ygrid_deg = ygrid_deg.flatten() # the true shears are ordered too, which is handy
if len(xgrid_deg) != NGALS_PER_SUBFIELD:
raise ValueError(
"Dimensions of xgrid_deg and ygrid_deg do not match NGALS_PER_SUBFIELD. Please check "+
"the values of XMAX_GRID_DEG and DX_GRID_DEG in evaluate.py.")
# Define storage file and check for its existence and/or age
mapEtruefile = os.path.join(
storage_dir, mape_file_prefix+experiment[0]+obs_type[0]+"v.asc")
mapper = great3sims.mapper.Mapper(truth_dir, experiment, obs_type, "variable")
use_stored = True
if not os.path.isfile(mapEtruefile):
use_stored = False
if logger is not None:
logger.info(
"First build of map_E truth file using "+str(file_prefixes)+" files from "+
mapper.full_dir)
else:
# Then compare timestamps for the mapE file and the newest file_prefixes[:]-000.fits file
# (subfield = 000) for this branch. If the former is older than the latter, or
# this file, force rebuild...
mapEmtime = os.path.getmtime(mapEtruefile)
catalogmtime = 0 # Set earliest possible T
for prefix in file_prefixes:
catalog_file = os.path.join(mapper.full_dir, prefix+"-000.fits")
tmpmtime = os.path.getmtime(catalog_file)
if tmpmtime > catalogmtime: catalogmtime = tmpmtime
if mapEmtime < catalogmtime or mapEmtime < os.path.getmtime(__file__):
use_stored = False
if logger is not None:
logger.info(
"Updating out-of-date map_E file using newer "+str(file_prefixes)+" files "+
"from "+mapper.full_dir)
# Then load / build as required
if use_stored is True:
if logger is not None:
logger.info("Loading truth map_E from "+mapEtruefile)
data = np.loadtxt(mapEtruefile)
field, theta, map_E, map_B, maperr = (
data[:, 0].astype(int), data[:, 1], data[:, 2], data[:, 3], data[:, 4])
else:
# Define the field array, then theta and map arrays in which we'll store the results
field = np.arange(NBINS_THETA * NFIELDS) / NBINS_THETA
theta = np.empty(NBINS_THETA * NFIELDS)
map_E = np.empty(NBINS_THETA * NFIELDS)
map_B = np.empty(NBINS_THETA * NFIELDS)
maperr = np.empty(NBINS_THETA * NFIELDS)
# Load the offsets
subfield_indices, offset_deg_x, offset_deg_y = get_generate_variable_offsets(
experiment, obs_type, storage_dir=storage_dir, truth_dir=truth_dir, logger=logger)
# Setup some storage arrays into which we'll write
xfield = np.empty((NGALS_PER_SUBFIELD, NSUBFIELDS_PER_FIELD))
yfield = np.empty((NGALS_PER_SUBFIELD, NSUBFIELDS_PER_FIELD))
# Loop over fields
import pyfits
for ifield in range(NFIELDS):
# Read in all the shears in this field and store
g1 = np.zeros((NGALS_PER_SUBFIELD, NSUBFIELDS_PER_FIELD))
g2 = np.zeros((NGALS_PER_SUBFIELD, NSUBFIELDS_PER_FIELD))
for jsub in range(NSUBFIELDS_PER_FIELD):
# Build the x,y grid using the subfield offsets
isubfield_index = jsub + ifield * NSUBFIELDS_PER_FIELD
xfield[:, jsub] = xgrid_deg + offset_deg_x[isubfield_index]
yfield[:, jsub] = ygrid_deg + offset_deg_y[isubfield_index]
# If requested (by setting output_xy_prefix) then write these xy out for diagnostic
if output_xy_prefix is not None:
output_xy_filename = output_xy_prefix+("-sub%03d" % isubfield_index)+".asc"
print "Writing "+output_xy_filename+" as requested..."
with open(output_xy_filename, 'wb') as fout:
fout.write("# x y\n")
np.savetxt(fout, np.array((xfield[:, jsub], yfield[:, jsub])).T)
# Then loop over the supplied file_prefixes and g1/g2 suffixes, summing shears
for prefix, suffix in zip(file_prefixes, suffixes):
galcatfile = os.path.join(
mapper.full_dir, (prefix+"-%03d.fits" % isubfield_index))
truedata = pyfits.getdata(galcatfile)
if len(truedata) != NGALS_PER_SUBFIELD:
raise ValueError(
"Number of records in "+galcatfile+" (="+str(len(truedata))+") is not "+
"equal to NGALS_PER_SUBFIELD (="+str(NGALS_PER_SUBFIELD)+")")
# Use the correct rule for shear addition, best (most safely) evaluated using
# arrays of complex numbers, see Schneider 2006 eq 12
gtoaddc = truedata["g1"+suffix] + truedata["g2"+suffix]*1j
gpriorc = g1[:, jsub] + g2[:, jsub]*1j
gfinalc = (gpriorc + gtoaddc) / (1. + gtoaddc.conj() * gpriorc)
g1[:, jsub] = gfinalc.real
g2[:, jsub] = gfinalc.imag
# If requested (by setting output_xy_prefix) then write these xy out for diagnostic
if output_xy_prefix is not None:
output_xy_filename = output_xy_prefix+("-%03d" % ifield)+".asc"
with open(output_xy_filename, 'wb') as fout:
fout.write("# x y\n")
np.savetxt(fout, np.array((xfield.flatten(), yfield.flatten())).T)
# Having got the x,y and g1, g2 for all the subfields in this field, flatten and use
# to calculate the map_E
map_results = run_corr2(
xfield.flatten(), yfield.flatten(), g1.flatten(), g2.flatten(),
np.ones(NGALS_PER_SUBFIELD * NSUBFIELDS_PER_FIELD), min_sep=THETA_MIN_DEG,
max_sep=THETA_MAX_DEG, nbins=NBINS_THETA, corr2_executable=corr2_exec,
xy_units="degrees", sep_units="degrees")
theta[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA] = map_results[:, 0]
map_E[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA] = map_results[:, 1]
map_B[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA] = map_results[:, 2]
maperr[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA] = map_results[:, 5]
# Save these in ASCII format
if logger is not None:
logger.info("Saving truth map_E file to "+mapEtruefile)
with open(mapEtruefile, "wb") as fout:
fout.write("# True aperture mass statistics for "+experiment+"-"+obs_type+"-variable\n")
fout.write("# field_index theta [deg] map_E map_B maperr\n")
np.savetxt(
fout, np.array((field, theta, map_E, map_B, maperr)).T,
fmt=" %2d %.18e %.18e %.18e %.18e")
if make_plots and not use_stored: # No point plotting if already built!
import matplotlib.pyplot as plt
plt.figure(figsize=(10, 8))
plt.subplot(211)
for ifield in range(NFIELDS):
plt.semilogx(
theta[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA],
map_E[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA], label="Field "+str(ifield))
plt.ylim(-2.e-5, 2.e-5)
plt.title(mapEtruefile+" E-mode")
plt.ylabel("Ap. Mass Dispersion")
plt.axhline(ls="--", color="k")
plt.legend()
plt.subplot(212)
for ifield in range(NFIELDS):
plt.semilogx(
theta[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA],
map_B[ifield * NBINS_THETA: (ifield + 1) * NBINS_THETA], label="Field "+str(ifield))
plt.ylim(-2.e-5, 2.e-5)
plt.title(mapEtruefile+" B-mode")
plt.xlabel("Theta [degrees]")
plt.ylabel("Ap. Mass Dispersion")
plt.axhline(ls="--", color="k")
plt.legend()
plotfile = mapEtruefile.rstrip("asc")+"png"
if logger is not None:
logger.info("Saving plot output to "+plotfile)
plt.savefig(plotfile)
# Then return
return field, theta, map_E, map_B, maperr
def q_constant(submission_file, experiment, obs_type, storage_dir=STORAGE_DIR, truth_dir=TRUTH_DIR,
logger=None, normalization=None, sigma2_min=None, just_q=False, cfid=CFID, mfid=MFID,
pretty_print=False, flip_g1=False, flip_g2=False, plot=False, ignore_fields=None):
"""Calculate the Q_c for a constant shear branch submission.
@param submission_file File containing the user submission.
@param experiment Experiment for this branch, one of 'control', 'real_galaxy',
'variable_psf', 'multiepoch', 'full'
@param obs_type Observation type for this branch, one of 'ground' or 'space'
@param storage_dir Directory from/into which to load/store rotation files
@param truth_dir Root directory in which the truth information for the challenge is
stored
@param logger Python logging.Logger instance, for message logging
@param normalization Normalization factor for the metric (default `None` uses either
`NORMALIZATION_CONSTANT_GROUND` or `NORMALIZATION_CONSTANT_SPACE`
depending on `obs_type`)
@param sigma2_min Damping term to put into the denominator of QZ1 metric (default `None`
uses either `SIGMA2_MIN_CONSTANT_GROUND` or `SIGMA2_MIN_CONSTANT_SPACE`
depending on `obs_type`)
@param just_q Set `just_q = True` (default is `False`) to only return Q_c rather than
the default behaviour of returning a tuple including best fitting c+,
m+, cx, mx, etc.
@param cfid Fiducial, target c value
@param mfid Fiducial, target m value
@param ignore_fields List or tuple of fields to ignore. If None, use all fields.
@return The metric Q_c, & optionally best fitting c+, m+, cx, mx, sigc+, sigcm+, sigcx, sigmx.
"""
if not os.path.isfile(submission_file):
raise ValueError("Supplied submission_file '"+submission_file+"' does not exist.")
# If the sigma2_min is not changed from None, set using defaults based on obs_type
if sigma2_min is None:
if obs_type == "ground":
sigma2_min = SIGMA2_MIN_CONSTANT_GROUND
elif obs_type == "space":
sigma2_min = SIGMA2_MIN_CONSTANT_SPACE
else:
raise ValueError("Default sigma2_min cannot be set as obs_type not recognised")
# If the normalization is not changed from None, set using defaults based on obs_type
if normalization is None:
if obs_type == "ground":
normalization = NORMALIZATION_CONSTANT_GROUND
elif obs_type == "space":
normalization = NORMALIZATION_CONSTANT_SPACE
else:
raise ValueError("Default sigma2_min cannot be set as obs_type not recognised")
# Load the submission and label the slices we're interested in
if logger is not None:
logger.info("Calculating Q_c metric for "+submission_file)
data = np.loadtxt(submission_file)
subfield = data[:, 0]
g1sub = data[:, 1]
g2sub = data[:, 2]
if flip_g1: g1sub = -g1sub
if flip_g2: g2sub = -g2sub
# Load up the rotations, then rotate g1 & g2 in the correct sense.
# NOTE THE MINUS SIGNS! This is because we need to rotate the coordinates *back* into a frame
# in which the primary direction of the PSF is g1, and the orthogonal is g2
try: # Put this in a try except block to handle funky submissions better
rotations = get_generate_const_rotations(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger)
g1srot = g1sub * np.cos(-2. * rotations) - g2sub * np.sin(-2. * rotations)
g2srot = g1sub * np.sin(-2. * rotations) + g2sub * np.cos(-2. * rotations)
# Load the truth
_, g1truth, g2truth = get_generate_const_truth(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger)
# Rotate the truth in the same sense, then use the g3metrics.fitline routine to
# perform simple linear regression
g1trot = g1truth * np.cos(-2. * rotations) - g2truth * np.sin(-2. * rotations)
g2trot = g1truth * np.sin(-2. * rotations) + g2truth * np.cos(-2. * rotations)
# Decide which subfields to use / ignore
use = np.ones_like(g1srot, dtype=bool)
if ignore_fields is not None:
for field_index in ignore_fields:
use[field_index] = False
Q_c, c1, m1, c2, m2, sigc1, sigm1, sigc2, sigm2 = g3metrics.metricQZ1_const_shear(
g1srot[use], g2srot[use], g1trot[use], g2trot[use],
cfid=cfid, mfid=mfid, sigma2_min=sigma2_min)
Q_c *= normalization
if plot:
import matplotlib.pyplot as plt
plt.subplot(211)
plt.plot(g1trot, g1srot - g1trot, 'k+')
plt.plot(
[min(g1trot), max(g1trot)], [m1 * min(g1trot) + c1, m1 * max(g1trot) + c1],
'b-', label="c+ = %+.5f +/- %.5f \nm+ = %+.5f +/- %.5f" % (c1, sigc1, m1, sigm1))
plt.xlim()
plt.xlabel("gtrue_+")
plt.ylabel("(gsub - gtrue)_+")
plt.ylim(-0.015, 0.015)
plt.title(os.path.split(submission_file)[-1])
plt.axhline(ls='--', color='k')
plt.legend()
plt.subplot(212)
plt.plot(g2trot, g2srot - g2trot, 'k+')
plt.plot(
[min(g2trot), max(g2trot)], [m2 * min(g2trot) + c2, m2 * max(g2trot) + c2],
'r-', label="cx = %+.5f +/- %.5f \nmx = %+.5f +/- %.5f" % (c2, sigc2, m2, sigm2))
plt.xlabel("gtrue_x")
plt.ylabel("(gsub - gtrue)_x")
plt.ylim(-0.015, 0.015)
plt.axhline(ls='--', color='k')
plt.legend()
if type(plot) == str:
print "Saving plot to "+plot
plt.savefig(plot)
plt.show()
except Exception as err:
# Something went wrong... We'll handle this silently setting all outputs to zero but warn
# the user via any supplied logger; else raise
Q_c, c1, m1, c2, m2, sigc1, sigm1, sigc2, sigm2 = 0, 0, 0, 0, 0, 0, 0, 0, 0
print err
if logger is not None:
logger.warn(err.message)
else:
raise err # ...Raise exception if there is no logger
# Then return
if just_q:
ret = Q_c
else:
if pretty_print:
print
print "Evaluated results for submission "+str(submission_file)
print "Using sigma2_min = "+str(sigma2_min)
print
print "Q_c = %.4f" % Q_c
print "c+ = %+.5f +/- %.5f" % (c1, sigc1)
print "cx = %+.5f +/- %.5f" % (c2, sigc2)
print "m+ = %+.5f +/- %.5f" % (m1, sigm1)
print "mx = %+.5f +/- %.5f" % (m2, sigm2)
print
ret = (Q_c, c1, m1, c2, m2, sigc1, sigm1, sigc2, sigm2)
return ret
def q_variable(submission_file, experiment, obs_type, normalization=None, truth_dir=TRUTH_DIR,
storage_dir=STORAGE_DIR, logger=None, corr2_exec="corr2", poisson_weight=False,
usebins=USEBINS, fractional_diff=False, squared_diff=False, sigma2_min=None):
"""Calculate the Q_v for a variable shear branch submission.
@param submission_file File containing the user submission.
@param experiment Experiment for this branch, one of 'control', 'real_galaxy',
'variable_psf', 'multiepoch', 'full'
@param obs_type Observation type for this branch, one of 'ground' or 'space'
@param normalization Normalization factor for the metric, default will be set differently for
obs_type='space' and obs_type='ground'
@param storage_dir Directory from/into which to load/store rotation files
@param truth_dir Root directory in which the truth information for the challenge is
stored
@param logger Python logging.Logger instance, for message logging
@param corr2_exec Path to Mike Jarvis' corr2 exectuable
@param poisson_weight If `True`, use the relative Poisson errors in each bin of map_E
to form an inverse variance weight for the difference metric
[default = `False`]
@param usebins An array the same shape as EXPECTED_THETA specifying which bins to
use in the calculation of Q_v [default = `USEBINS`]. If set to `None`,
uses all bins
@param fractional_diff Use a |fractional|, rather than absolute difference in metric
@param squared_diff Use the squared, rather than the absolute difference in metric
@param sigma2_min Damping term to put into the denominator of metric (default `None`
uses either `SIGMA2_MIN_VARIABLE_GROUND` or `SIGMA2_MIN_VARIABLE_SPACE`
depending on `obs_type`)
@return The metric Q_v
"""
if not os.path.isfile(submission_file):
raise ValueError("Supplied submission_file '"+submission_file+"' does not exist.")
# Set the default normalization based on whether ground or space data
if normalization is None:
if obs_type == "ground":
normalization = NORMALIZATION_VARIABLE_GROUND
elif obs_type == "space":
normalization = NORMALIZATION_VARIABLE_SPACE
else:
raise ValueError("Default normalization cannot be set as obs_type not recognised")
# If the sigma2_min is not changed from `None`, set using defaults based on `obs_type`
if sigma2_min is None:
if obs_type == "ground":
sigma2_min = SIGMA2_MIN_VARIABLE_GROUND
elif obs_type == "space":
sigma2_min = SIGMA2_MIN_VARIABLE_SPACE
else:
raise ValueError("Default sigma2_min cannot be set as obs_type not recognised")
# Load the submission and label the slices we're interested in
if logger is not None:
logger.info("Calculating Q_v metric for "+submission_file)
data = np.loadtxt(submission_file)
# We are stating that we want at least 4 and up to 5 columns, so check for this
if data.shape not in ((NBINS_THETA * NFIELDS, 4), (NBINS_THETA * NFIELDS, 5)):
raise ValueError("Submission "+str(submission_file)+" is not the correct shape!")
# Extract the salient parts of the submission from data
field_sub = data[:, 0].astype(int)
theta_sub = data[:, 1]
map_E_sub = data[:, 2]
# Load/generate the truth shear signal
field_shear, theta_shear, map_E_shear, _, maperr_shear = get_generate_variable_truth(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger,
corr2_exec=corr2_exec, mape_file_prefix=MAPESHEAR_FILE_PREFIX, suffixes=("",),
make_plots=False)
# Then generate the intrinsic only map_E, useful for examinging plots, including the maperr
# (a good estimate of the relative Poisson errors per bin) which we will use to provide a weight
field_int, theta_int, map_E_int, _, maperr_int = get_generate_variable_truth(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger,
corr2_exec=corr2_exec, mape_file_prefix=MAPEINT_FILE_PREFIX, suffixes=("_intrinsic",),
make_plots=False)
# Then generate the theory observed = int + shear combined map signals - these are our reference
# Note this uses the new functionality of get_generate_variable_truth for adding shears
field_ref, theta_ref, map_E_ref, _, maperr_ref = get_generate_variable_truth(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger,
corr2_exec=corr2_exec, mape_file_prefix=MAPEOBS_FILE_PREFIX,
file_prefixes=("galaxy_catalog", "galaxy_catalog"), suffixes=("_intrinsic", ""),
make_plots=False)
# Set up the weight
if poisson_weight:
weight = max(maperr_int**2) / maperr_int**2 # Inverse variance weight
else:
weight = np.ones_like(map_E_ref)
# Set up the usebins to use if `usebins == None` (use all bins)
if usebins is None:
usebins = np.repeat(True, NBINS_THETA * NFIELDS)
# Get the total number of active bins per field
nactive = sum(usebins) / NFIELDS
try: # Put this in a try except block to handle funky submissions better
np.testing.assert_array_almost_equal( # Sanity check out truth / expected theta bins
theta_shear, EXPECTED_THETA, decimal=3,
err_msg="BIG SNAFU! Truth theta does not match the EXPECTED_THETA, failing...")
np.testing.assert_array_equal(
field_sub, field_ref, err_msg="User field array does not match truth.")
np.testing.assert_array_almost_equal(
theta_sub, theta_ref, decimal=3, err_msg="User theta array does not match truth.")
# The definition of Q_v is so simple there is no need to use the g3metrics version
# NOTE WE ARE TRYING A NEW DEFINITION OF Q_v THAT IS NOT SO SIMPLE
Q_v_fields = np.zeros(nactive) # To store diffs averaged over fields, per bin
if not fractional_diff:
for i in range(nactive): # Sum over all fields for each bin, nactive being the stride
Q_v_fields[i] = np.sum(((weight * (map_E_sub - map_E_ref))[usebins])[i::nactive])
else:
for i in range(nactive): # Sum over all fields for each bin, nactive being the stride
Q_v_fields[i] = np.sum(
((weight * (map_E_sub - map_E_ref) / map_E_ref)[usebins])[i::nactive])
# Then take the weighted average abs(Q_v_fields)
if not squared_diff:
Q_v = normalization / (
sigma2_min + (np.sum(np.abs(Q_v_fields)) / np.sum(weight[usebins])))
else:
Q_v = normalization / (
sigma2_min + (np.sum(Q_v_fields**2) / np.sum(weight[usebins])))
except Exception as err:
Q_v = 0. # If the theta or field do not match, let's be strict and force Q_v...
if logger is not None:
logger.warn(err.message) # ...But let's warn if there's a logger!
else: # ...And raise the exception if not
raise err
# Then return Q_v
return Q_v
def map_diff_func(cm_array, mapEsub, maperrsub, mapEref, mapEunitc):
"""Difference between an m,c model of a biased aperture mass statistic submission and the
submission itself, as a vector corresponding to the theta vector.
The model of the biased submission is simply:
mapEmodel = mapEunitc * c^2 + mapEref * (1 + 2 * m + m^2)
where c, m = cm_array[0], cm_array[1]. This code returns
(mapEmodel - mapEsub) / maperrsub
for the use of scipy.optimize.leastsq() within q_variable_by_mc().
"""
ret = (
mapEunitc * cm_array[0]**2 + mapEref * (1. + 2. * cm_array[1] + cm_array[1]**2)
- mapEsub) / maperrsub
return ret
def q_variable_by_mc(submission_file, experiment, obs_type, map_E_unitc, normalization=None,
truth_dir=TRUTH_DIR, storage_dir=STORAGE_DIR, logger=None, usebins=None,
corr2_exec="corr2", sigma2_min=None, cfid=CFID, mfid=MFID, just_q=False,
pretty_print=False):
"""Calculate the Q_v for a variable shear branch submission, using a best-fitting m and c model
of submission biases to evaluate the score. Experimental metric, not used in the GREAT3
challenge due to the difficulty of reliably modelling m & c in simulation tests.
@param submission_file File containing the user submission.
@param experiment Experiment for this branch, one of 'control', 'real_galaxy',
'variable_psf', 'multiepoch', 'full'
@param obs_type Observation type for this branch, one of 'ground' or 'space'
@param normalization Normalization factor for the metric, default will be set differently for
obs_type='space' and obs_type='ground'
@param storage_dir Directory from/into which to load/store rotation files
@param truth_dir Root directory in which the truth information for the challenge is
stored
@param logger Python logging.Logger instance, for message logging
@param usebins An array the same shape as EXPECTED_THETA specifying which bins to
use in the calculation of Q_v [default = `USEBINS`]. If set to `None`,
uses all bins
@param corr2_exec Path to Mike Jarvis' corr2 exectuable
@param sigma2_min Damping term to put into the denominator of metric (default `None`
uses either `SIGMA2_MIN_VARIABLE_GROUND` or `SIGMA2_MIN_VARIABLE_SPACE`
depending on `obs_type`)
@param cfid Fiducial, target c value
@param mfid Fiducial, target m value
@param just_q Set `just_q = True` (default is `False`) to only return Q_v rather than
the default behaviour of returning a tuple including best fitting |c|,
m, uncertainties etc.
@return The metric Q_v
"""
if not os.path.isfile(submission_file):
raise ValueError("Supplied submission_file '"+submission_file+"' does not exist.")
# Set the default normalization based on whether ground or space data
if normalization is None:
if obs_type == "ground":
normalization = NORMALIZATION_CONSTANT_GROUND
elif obs_type == "space":
normalization = NORMALIZATION_CONSTANT_SPACE
else:
raise ValueError("Default normalization cannot be set as obs_type not recognised")
# If the sigma2_min is not changed from `None`, set using defaults based on `obs_type`
if sigma2_min is None:
if obs_type == "ground":
sigma2_min = SIGMA2_MIN_VARIABLE_GROUND
elif obs_type == "space":
sigma2_min = SIGMA2_MIN_VARIABLE_SPACE
else:
raise ValueError("Default sigma2_min cannot be set as obs_type not recognised")
# Load the submission and label the slices we're interested in
if logger is not None:
logger.info("Calculating Q_v metric (by m & c) for "+submission_file)
data = np.loadtxt(submission_file)
# We are stating that we want at least 4 and up to 5 columns, so check for this
if data.shape not in ((NBINS_THETA * NFIELDS, 4), (NBINS_THETA * NFIELDS, 5)):
raise ValueError("Submission "+str(submission_file)+" is not the correct shape!")
# Extract the salient parts of the submission from data
field_sub = data[:, 0].astype(int)
theta_sub = data[:, 1]
map_E_sub = data[:, 2]
# Load/generate the truth shear signal
field_shear, theta_shear, map_E_shear, _, maperr_shear = get_generate_variable_truth(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger,
corr2_exec=corr2_exec, mape_file_prefix=MAPESHEAR_FILE_PREFIX, suffixes=("",),
make_plots=False)
# Then generate the intrinsic only map_E, useful for examinging plots, including the maperr
# (a good estimate of the relative Poisson errors per bin) which we will use to provide a weight
field_int, theta_int, map_E_int, _, maperr_int = get_generate_variable_truth(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger,
corr2_exec=corr2_exec, mape_file_prefix=MAPEINT_FILE_PREFIX, suffixes=("_intrinsic",),
make_plots=False)
# Then generate the theory observed = int + shear combined map signals - these are our reference
# Note this uses the new functionality of get_generate_variable_truth for adding shears
field_ref, theta_ref, map_E_ref, _, maperr_ref = get_generate_variable_truth(
experiment, obs_type, truth_dir=truth_dir, storage_dir=storage_dir, logger=logger,
corr2_exec=corr2_exec, mape_file_prefix=MAPEOBS_FILE_PREFIX,
file_prefixes=("galaxy_catalog", "galaxy_catalog"), suffixes=("_intrinsic", ""),
make_plots=False)
# Set up the usebins to use if `usebins == None` (use all bins)
if usebins is None:
usebins = np.repeat(True, NBINS_THETA * NFIELDS)
# Get the total number of active bins per field
nactive = sum(usebins) / NFIELDS
if True: # Put this in a try except block to handle funky submissions better
np.testing.assert_array_almost_equal( # Sanity check out truth / expected theta bins
theta_shear, EXPECTED_THETA, decimal=3,
err_msg="BIG SNAFU! Truth theta does not match the EXPECTED_THETA, failing...")
np.testing.assert_array_equal(
field_sub, field_ref, err_msg="User field array does not match truth.")
np.testing.assert_array_almost_equal(
theta_sub, theta_ref, decimal=3, err_msg="User theta array does not match truth.")
# Use optimize.leastsq to find the best fitting linear bias model params, and covariances
import scipy.optimize
optimize_results = scipy.optimize.leastsq(
map_diff_func, np.array([0., 0.]),
args=(
map_E_sub[usebins],
maperr_ref[usebins],
map_E_ref[usebins], # Note use of ref errors: this will appropriately
# weight different bins and is not itself noisy
map_E_unitc[usebins]), full_output=True)
csub = optimize_results[0][0]
msub = optimize_results[0][1]
map_E_model = map_E_unitc * csub**2 + map_E_ref * (1. + 2. * msub + msub**2)
residual_variance = np.var(
((map_E_sub - map_E_model) / maperr_ref)[usebins], ddof=1)
if optimize_results[1] is not None:
covcm = optimize_results[1] * residual_variance
sigcsub = np.sqrt(covcm[0, 0])
sigmsub = np.sqrt(covcm[1, 1])
covcm = covcm[0, 1]
else:
sigcsub = 0.
sigmsub = 0.
covcm = 0.
# Then we define the Q_v
Q_v = 2449. * normalization / np.sqrt(
(csub / cfid)**2 + (msub / mfid)**2 + sigma2_min)
try:
pass
except Exception as err:
Q_v = 0. # If the theta or field do not match, let's be strict and force Q_v...
if logger is not None:
logger.warn(err.message) # ...But let's warn if there's a logger!
else: # ...And raise the exception if not
raise err
# Then return
# Then return
if just_q:
ret = Q_v
else:
if pretty_print:
print "Evaluated results for submission "+str(submission_file)
print "Using sigma2_min = "+str(sigma2_min)
print "Q_v = %.4f" % Q_v
print "|c| = %+.5f +/- %.5f" % (csub, sigcsub)
print " m = %+.5f +/- %.5f" % (msub, sigmsub)
print "Cov(0, 0) = %+.4e" % sigcsub**2
print "Cov(0, 1) = %+.4e" % covcm
print "Cov(1, 1) = %+.4e" % sigmsub**2
ret = (Q_v, csub, msub, sigcsub, sigmsub, covcm)
return ret
|
{
"content_hash": "dcdb9f2844f22c7fe749f248dc59df70",
"timestamp": "",
"source": "github",
"line_count": 1114,
"max_line_length": 100,
"avg_line_length": 53.35188509874327,
"alnum_prop": 0.6139246895716257,
"repo_name": "barnabytprowe/great3-public",
"id": "71ac5deb2673d925e05e1b5abbc22bce83a52096",
"size": "61024",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "metrics/evaluate.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Prolog",
"bytes": "23397"
},
{
"name": "Python",
"bytes": "1002269"
},
{
"name": "Zimpl",
"bytes": "417"
}
],
"symlink_target": ""
}
|
#!/usr/bin/env python
"""
This script is used to run tests, create a coverage report and output the
statistics at the end of the tox run.
To run this script just execute ``tox``
"""
import re
from fabric.api import local, warn
from fabric.colors import green, red
if __name__ == '__main__':
local('flake8 --ignore=E126 --ignore=W391 --statistics'
' --exclude=submodules,migrations,south_migrations,build .')
local('coverage run --source="document_library" manage.py test -v 2'
' --traceback --failfast --settings=document_library.tests.settings'
' --pattern="*_tests.py"')
local('coverage html -d coverage --omit="*__init__*,*/settings/*,'
'*/south_migrations/*,*/migrations/*,*/tests/*,*admin*"')
total_line = local('grep -n pc_cov coverage/index.html', capture=True)
percentage = float(re.findall(r'(\d+)%', total_line)[-1])
if percentage < 100:
warn(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
|
{
"content_hash": "dd4f6ab49bdfcbff5f05c355b7627532",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 41.12,
"alnum_prop": 0.646887159533074,
"repo_name": "bitmazk/django-document-library",
"id": "65b73dd219f2f5d4494a495c13424f585a804900",
"size": "1028",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtests.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2508"
},
{
"name": "Python",
"bytes": "369101"
}
],
"symlink_target": ""
}
|
from enum import Enum
class Mode(Enum):
copy = 0
move = 1
replace = 2
def __str__(self):
return self.name
class Encode(Enum):
no = 0
yes = 1
later = 2
def __str__(self):
return self.name
|
{
"content_hash": "56c973257ad5413c1d7bff10af9f5104",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 24,
"avg_line_length": 12.68421052631579,
"alnum_prop": 0.5186721991701245,
"repo_name": "gotling/photo-sort",
"id": "7c90dd0f4145d4e0386f9a2ce86962616c13eef8",
"size": "241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "photo_sort/enums.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39040"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class ImaginaryaxisValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="imaginaryaxis", parent_name="layout.smith", **kwargs
):
super(ImaginaryaxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Imaginaryaxis"),
data_docs=kwargs.pop(
"data_docs",
"""
color
Sets default for all colors associated with
this axis all at once: line, font, tick, and
grid colors. Grid color is lightened by
blending this with the plot background
Individual pieces can override this.
gridcolor
Sets the color of the grid lines.
griddash
Sets the dash style of lines. Set to a dash
type string ("solid", "dot", "dash",
"longdash", "dashdot", or "longdashdot") or a
dash length list in px (eg "5px,10px,2px,2px").
gridwidth
Sets the width (in px) of the grid lines.
hoverformat
Sets the hover text formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
layer
Sets the layer on which this axis is displayed.
If *above traces*, this axis is displayed above
all the subplot's traces If *below traces*,
this axis is displayed below all the subplot's
traces, but above the grid lines. Useful when
used together with scatter-like traces with
`cliponaxis` set to False to show markers
and/or text nodes above this axis.
linecolor
Sets the axis line color.
linewidth
Sets the width (in px) of the axis line.
showgrid
Determines whether or not grid lines are drawn.
If True, the grid lines are drawn at every tick
mark.
showline
Determines whether or not a line bounding this
axis is drawn.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
tickcolor
Sets the tick color.
tickfont
Sets the tick font.
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
ticklen
Sets the tick length (in px).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
tickvals
Sets the values at which ticks on this axis
appear. Defaults to `realaxis.tickvals` plus
the same as negatives and zero.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for `tickvals`.
tickwidth
Sets the tick width (in px).
visible
A single toggle to hide the axis while
preserving interaction like dragging. Default
is true when a cheater plot is present on the
axis, otherwise false
""",
),
**kwargs,
)
|
{
"content_hash": "163d21f0fab33cde50510b96b920ca89",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 79,
"avg_line_length": 44.49586776859504,
"alnum_prop": 0.524888558692422,
"repo_name": "plotly/plotly.py",
"id": "ff2661dd8bda455d437e1de4383e3c24eaf12bde",
"size": "5384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/layout/smith/_imaginaryaxis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
"""
This module solves (certain types of) cryptarithmetic problems.
"""
from csp import *
import itertools
import re
class Cryptarithmetic(ConstraintSatisfactionProblem):
"""
The cryptarithmetic solver.
This one only solves decimal domain puzzles, and only puzzles with two
addends and a sum.
"""
def __init__(self, puzzle):
"""
Constructor.
Args:
puzzle (str): a cryptarithmetic with two addends and a sum, on one
line
Raises:
InvalidPuzzleException: The puzzle is invalid.
"""
ConstraintSatisfactionProblem.__init__(self)
# Extract the words from the puzzle.
self.puzzle = puzzle
match = re.match('(\w+)\s*\+\s*(\w+)\s*=\s*(\w+)', puzzle)
if match:
(addend1, addend2, the_sum) = match.groups()
else:
raise InvalidPuzzleException()
m = max(len(the_sum), len(addend2) + 2)
self.puzzle = ''.join([addend1.rjust(m), "\n", ("+ " + addend2).rjust(m),
"\n", '-' * m, "\n", the_sum.rjust(m)])
# Create a map: letter -> variable. Then create auxiliary
# variables and add them to the map.
self.variables = {char: CryptarithmeticVariable(self, char)
for char in puzzle if str.isalpha(char)}
if len(self.variables) > 10:
raise InvalidPuzzleException()
for i in (addend1, addend2, the_sum):
self.variables[i[0]].domain.discard(0)
for i in range(1, len(the_sum)):
name = 'aux' + str(i)
self.variables[name] = CryptarithmeticVariable(self, name, aux=True)
# Create the constraints.
for i in range(1, len(the_sum) + 1):
left = list()
if 'aux' + str(i - 1) in self.variables:
left.append(self.variables['aux' + str(i - 1)])
if i <= len(addend1):
left.append(self.variables[addend1[-i]])
if i <= len(addend2):
left.append(self.variables[addend2[-i]])
right = [self.variables[the_sum[-i]]]
if 'aux' + str(i) in self.variables:
right.append(self.variables['aux' + str(i)])
self.constraints.add(SumConstraint(left, right))
self.constraints.add(AllDifferentConstraint(
[var for var in self.variables.values() if not var.aux]))
def __str__(self):
p = self.puzzle[:]
for letter in self.variables:
p = p.replace(letter, str(self.variables[letter].value))
return self.puzzle + "\n\n" + p
class InvalidPuzzleException(Exception):
pass
class CryptarithmeticVariable(BaseVariable):
def __init__(self, csp, name, aux=False):
BaseVariable.__init__(self, csp, name, aux)
self.domain = set(range(10)) if not self.aux else set(range(2))
class SumConstraint(BaseConstraint):
"""
A constraint of the form aux_n + A + B = C + 10 ** (aux_n+1)
"""
def __init__(self, left_vars, right_vars):
"""
Constructor.
Args:
left_vars (list of Variable): the variables on the left side
of the constraint equation
right_vars (list of Variable): the variable on the right side
of the constraint equation
"""
BaseConstraint.__init__(self, left_vars + right_vars)
self.left_vars = left_vars
self.right_vars = right_vars
def is_satisfiable(self, variable, assignment):
result = False
old_domain = variable.domain
variable.domain = {assignment}
all_assignments = itertools.product(
itertools.product(*(var.domain for var in self.left_vars)),
itertools.product(*(var.domain for var in self.right_vars)))
try:
for combination in all_assignments:
left_sum = sum(x for x in combination[0])
right_sum = 0
for i in range(len(combination[1])):
right_sum += combination[1][i] * (10 ** self.right_vars[i].aux)
if left_sum == right_sum:
raise SumConstraint.FoundException
except SumConstraint.FoundException:
result = True
finally:
variable.domain = old_domain
return result
class FoundException(Exception): pass
def __repr__(self):
return ' '.join["[Constraint",
' + '.join(var.name for var in self.left_vars), "=",
self.right_vars[0].name, '+ 10 *',
self.right_vars[1].name
if len(self.right_vars) == 2 else '']
def main(puzzle):
c = Cryptarithmetic(puzzle)
if c.solve():
print(c)
else:
print("No solution.")
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("puzzle", help="Enter the cryptarithmetic in double quotes, on one line")
args = parser.parse_args()
main(args.puzzle)
|
{
"content_hash": "7831504e0fc3d8b7f3ebd957c2dde8ff",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 97,
"avg_line_length": 33.28104575163399,
"alnum_prop": 0.5581304006284368,
"repo_name": "jharris119/ConstraintSatisfactionProblem",
"id": "6a9dd4e2ed3fb5d89ed193aa27e4c28f12ec3cb8",
"size": "5092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/cryptarithmetic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23978"
}
],
"symlink_target": ""
}
|
"""
sentry.db.models.manager
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import six
import threading
import weakref
from django.conf import settings
from django.db import router
from django.db.models import Manager, Model
from django.db.models.signals import (
post_save, post_delete, post_init, class_prepared)
from django.utils.encoding import smart_text
from sentry import nodestore
from sentry.utils.cache import cache
from sentry.utils.hashlib import md5_text
from .query import create_or_update
__all__ = ('BaseManager',)
logger = logging.getLogger('sentry')
def __prep_value(model, key, value):
if isinstance(value, Model):
value = value.pk
else:
value = six.text_type(value)
return value
def __prep_key(model, key):
if key == 'pk':
return model._meta.pk.name
return key
def make_key(model, prefix, kwargs):
kwargs_bits = []
for k, v in sorted(six.iteritems(kwargs)):
k = __prep_key(model, k)
v = smart_text(__prep_value(model, k, v))
kwargs_bits.append('%s=%s' % (k, v))
kwargs_bits = ':'.join(kwargs_bits)
return '%s:%s:%s' % (
prefix,
model.__name__,
md5_text(kwargs_bits).hexdigest()
)
class BaseManager(Manager):
lookup_handlers = {
'iexact': lambda x: x.upper(),
}
use_for_related_fields = True
def __init__(self, *args, **kwargs):
self.cache_fields = kwargs.pop('cache_fields', [])
self.cache_ttl = kwargs.pop('cache_ttl', 60 * 5)
self.cache_version = kwargs.pop('cache_version', None)
self.__local_cache = threading.local()
super(BaseManager, self).__init__(*args, **kwargs)
def _get_cache(self):
if not hasattr(self.__local_cache, 'value'):
self.__local_cache.value = weakref.WeakKeyDictionary()
return self.__local_cache.value
def _set_cache(self, value):
self.__local_cache.value = value
def _generate_cache_version(self):
return md5_text(
'&'.join(sorted(f.attname for f in self.model._meta.fields))
).hexdigest()[:3]
__cache = property(_get_cache, _set_cache)
def __getstate__(self):
d = self.__dict__.copy()
# we cant serialize weakrefs
d.pop('_BaseManager__cache', None)
d.pop('_BaseManager__local_cache', None)
return d
def __setstate__(self, state):
self.__dict__.update(state)
self.__local_cache = weakref.WeakKeyDictionary()
def __class_prepared(self, sender, **kwargs):
"""
Given the cache is configured, connects the required signals for invalidation.
"""
post_save.connect(self.post_save, sender=sender, weak=False)
post_delete.connect(self.post_delete, sender=sender, weak=False)
if not self.cache_fields:
return
if not self.cache_version:
self.cache_version = self._generate_cache_version()
post_init.connect(self.__post_init, sender=sender, weak=False)
post_save.connect(self.__post_save, sender=sender, weak=False)
post_delete.connect(self.__post_delete, sender=sender, weak=False)
def __cache_state(self, instance):
"""
Updates the tracked state of an instance.
"""
if instance.pk:
self.__cache[instance] = {
f: self.__value_for_field(instance, f)
for f in self.cache_fields
}
def __post_init(self, instance, **kwargs):
"""
Stores the initial state of an instance.
"""
self.__cache_state(instance)
def __post_save(self, instance, **kwargs):
"""
Pushes changes to an instance into the cache, and removes invalid (changed)
lookup values.
"""
pk_name = instance._meta.pk.name
pk_names = ('pk', pk_name)
pk_val = instance.pk
for key in self.cache_fields:
if key in pk_names:
continue
# store pointers
value = self.__value_for_field(instance, key)
cache.set(
key=self.__get_lookup_cache_key(**{key: value}),
value=pk_val,
timeout=self.cache_ttl,
version=self.cache_version,
)
# Ensure we don't serialize the database into the cache
db = instance._state.db
instance._state.db = None
# store actual object
try:
cache.set(
key=self.__get_lookup_cache_key(**{pk_name: pk_val}),
value=instance,
timeout=self.cache_ttl,
version=self.cache_version,
)
except Exception as e:
logger.error(e, exc_info=True)
instance._state.db = db
# Kill off any keys which are no longer valid
if instance in self.__cache:
for key in self.cache_fields:
if key not in self.__cache[instance]:
continue
value = self.__cache[instance][key]
current_value = self.__value_for_field(instance, key)
if value != current_value:
cache.delete(
key=self.__get_lookup_cache_key(**{key: value}),
version=self.cache_version,
)
self.__cache_state(instance)
def __post_delete(self, instance, **kwargs):
"""
Drops instance from all cache storages.
"""
pk_name = instance._meta.pk.name
for key in self.cache_fields:
if key in ('pk', pk_name):
continue
# remove pointers
value = self.__value_for_field(instance, key)
cache.delete(
key=self.__get_lookup_cache_key(**{key: value}),
version=self.cache_version,
)
# remove actual object
cache.delete(
key=self.__get_lookup_cache_key(**{pk_name: instance.pk}),
version=self.cache_version,
)
def __get_lookup_cache_key(self, **kwargs):
return make_key(self.model, 'modelcache', kwargs)
def __value_for_field(self, instance, key):
"""
Return the cacheable value for a field.
ForeignKey's will cache via the primary key rather than using an
instance ref. This is needed due to the way lifecycle of models works
as otherwise we end up doing wasteful queries.
"""
if key == 'pk':
return instance.pk
field = instance._meta.get_field(key)
return getattr(instance, field.attname)
def contribute_to_class(self, model, name):
super(BaseManager, self).contribute_to_class(model, name)
class_prepared.connect(self.__class_prepared, sender=model)
def get_from_cache(self, **kwargs):
"""
Wrapper around QuerySet.get which supports caching of the
intermediate value. Callee is responsible for making sure
the cache key is cleared on save.
"""
if not self.cache_fields or len(kwargs) > 1:
return self.get(**kwargs)
key, value = next(six.iteritems(kwargs))
pk_name = self.model._meta.pk.name
if key == 'pk':
key = pk_name
# We store everything by key references (vs instances)
if isinstance(value, Model):
value = value.pk
# Kill __exact since it's the default behavior
if key.endswith('__exact'):
key = key.split('__exact', 1)[0]
if key in self.cache_fields or key == pk_name:
cache_key = self.__get_lookup_cache_key(**{key: value})
retval = cache.get(cache_key, version=self.cache_version)
if retval is None:
result = self.get(**kwargs)
# Ensure we're pushing it into the cache
self.__post_save(instance=result)
return result
# If we didn't look up by pk we need to hit the reffed
# key
if key != pk_name:
return self.get_from_cache(**{pk_name: retval})
if type(retval) != self.model:
if settings.DEBUG:
raise ValueError('Unexpected value type returned from cache')
logger.error('Cache response returned invalid value %r', retval)
return self.get(**kwargs)
if key == pk_name and int(value) != retval.pk:
if settings.DEBUG:
raise ValueError('Unexpected value returned from cache')
logger.error('Cache response returned invalid value %r', retval)
return self.get(**kwargs)
retval._state.db = router.db_for_read(self.model, **kwargs)
return retval
else:
return self.get(**kwargs)
def create_or_update(self, **kwargs):
return create_or_update(self.model, **kwargs)
def bind_nodes(self, object_list, *node_names):
object_node_list = []
for name in node_names:
object_node_list.extend((
(i, getattr(i, name))
for i in object_list
if getattr(i, name).id
))
node_ids = [n.id for _, n in object_node_list]
if not node_ids:
return
node_results = nodestore.get_multi(node_ids)
for item, node in object_node_list:
data = node_results.get(node.id) or {}
node.bind_data(data, ref=node.get_ref(item))
def uncache_object(self, instance_id):
pk_name = self.model._meta.pk.name
cache_key = self.__get_lookup_cache_key(**{pk_name: instance_id})
cache.delete(cache_key, version=self.cache_version)
def post_save(self, instance, **kwargs):
"""
Triggered when a model bound to this manager is saved.
"""
def post_delete(self, instance, **kwargs):
"""
Triggered when a model bound to this manager is deleted.
"""
|
{
"content_hash": "8aa7b226303d2286388be35d8541c778",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 86,
"avg_line_length": 32.58412698412698,
"alnum_prop": 0.5641075604053001,
"repo_name": "JackDanger/sentry",
"id": "527909a16f1b33ca2f078490a0c2a2e2672ee577",
"size": "10264",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/sentry/db/models/manager.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "583430"
},
{
"name": "HTML",
"bytes": "319622"
},
{
"name": "JavaScript",
"bytes": "624672"
},
{
"name": "Makefile",
"bytes": "2660"
},
{
"name": "Python",
"bytes": "6279717"
}
],
"symlink_target": ""
}
|
def log(str):
print str
|
{
"content_hash": "e58019628758801dd96710d111bee231",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 13,
"avg_line_length": 14,
"alnum_prop": 0.6071428571428571,
"repo_name": "kevinb7/js2py",
"id": "f1120b00ee30370d25cf9e68c19cfcfd34db89ff",
"size": "46",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/console.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "185974"
},
{
"name": "Python",
"bytes": "2221"
}
],
"symlink_target": ""
}
|
import os
import shutil
from plenum.common.util import randomString
keepFilesInClientReset = [
'pool_transactions_sandbox',
'sovrin_config.py',
'sample',
'pool_transactions_local',
'pool_transactions_live'
]
def performSovrinBaseDirCleanup(baseDir):
backupDir = None
while True:
backupDir = baseDir + "-" + randomString(6)
if not os.path.exists(backupDir):
shutil.copytree(baseDir, backupDir)
print("\nSovrin base directory {} backed up at: {}".
format(baseDir, backupDir))
break
for filename in os.listdir(baseDir):
filepath = os.path.join(baseDir, filename)
if filename not in keepFilesInClientReset:
if os.path.isdir(filepath):
shutil.rmtree(filepath)
else:
os.remove(filepath)
return backupDir
|
{
"content_hash": "1aa9b92330293af19dc892fa2954e1a8",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 64,
"avg_line_length": 27.53125,
"alnum_prop": 0.6186152099886493,
"repo_name": "keenondrums/sovrin-node",
"id": "e646b0f4325b78e81a1d14b296a825f769cad1a5",
"size": "881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sovrin_client/script_helper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3195"
},
{
"name": "Python",
"bytes": "1088655"
},
{
"name": "Rust",
"bytes": "25532"
},
{
"name": "Shell",
"bytes": "15720"
}
],
"symlink_target": ""
}
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("SGDClassifier" , "FourClass_100" , "duckdb")
|
{
"content_hash": "3cf22a963171223e923d61e1845bd84a",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 72,
"avg_line_length": 35.5,
"alnum_prop": 0.7816901408450704,
"repo_name": "antoinecarme/sklearn2sql_heroku",
"id": "3b322a07ba9f201c4d97343bee2999b7fd229e40",
"size": "142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/classification/FourClass_100/ws_FourClass_100_SGDClassifier_duckdb_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "507043"
},
{
"name": "Procfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "1021137"
},
{
"name": "R",
"bytes": "2521"
}
],
"symlink_target": ""
}
|
from sympy.core.basic import Basic, S, cache_it, cache_it_immutable
from sympy.core import oo, Rational, Pow
from sympy.core.methods import ArithMeths, RelMeths
class Order(Basic, ArithMeths, RelMeths):
"""
Represents O(f(x)) at the point x = 0.
Definition
==========
g(x) = O(f(x)) as x->0 if and only if
|g(x)|<=M|f(x)| near x=0 (1)
for some positive but finite M. An equivalent way of saying (1) is:
lim_{x->0} |g(x)/f(x)| < oo
Let's illustrate it on the following example:
sin x = x - x**3/3! + O(x**5)
where in this case O(x**5) = x**5/5! - x**7/7! + .... and the definition
of O means:
|x**5/5! - x**7/7! + ....| <= M|x**5| near x=0
or equivalently:
lim_{x->0} | (x**5/5! - x**7/7! + ....) / x**5| < oo
which surely is true, because
lim_{x->0} | (x**5/5! - x**7/7! + ....) / x**5| = 1/5!
So intuitively O(x**3) means: all terms x**3, x**4 and
higher. But not x**2, x or 1.
Examples:
=========
>>> from sympy import *
>>> x = Symbol("x")
>>> O(x)
O(x)
>>> O(x)*x
O(x**2)
>>> O(x)-O(x)
O(x)
External links
--------------
U{Big O notation<http://en.wikipedia.org/wiki/Big_O_notation>}
Properties:
===========
g(x) = O(f(x)) as x->0 <-> |g(x)|<=M|f(x)| near x=0 <-> lim_{x->0} |g(x)/f(x)| < oo
g(x,y) = O(f(x,y)) <-> lim_{x,y->0} |g(x,y)/f(x,y)| < oo, we'll assume that limits commute.
Notes:
======
In O(f(x),x) the expression f(x) is assumed to have a leading term.
O(f(x),x) is automatically transformed to O(f(x).as_leading_term(x),x).
O(expr*f(x),x) is O(f(x),x)
O(expr,x) is O(1)
O(0, x) is 0.
Multivariate O is also supported:
O(f(x,y),x,y) is transformed to O(f(x,y).as_leading_term(x,y).as_leading_term(y), x, y)
If O is used with only expression argument then the symbols are
all symbols in the expression.
"""
precedence = Basic.Apply_precedence
_cache = {}
@cache_it_immutable
def __new__(cls, expr, *symbols, **assumptions):
expr = Basic.sympify(expr).expand(trig=True)
if isinstance(expr, Basic.NaN):
return S.NaN
if symbols:
symbols = map(Basic.sympify, symbols)
else:
symbols = list(expr.atoms(Basic.Symbol))
symbols.sort(Basic.compare)
if isinstance(expr, Order):
new_symbols = list(expr.symbols)
for s in symbols:
if s not in new_symbols:
new_symbols.append(s)
if len(new_symbols)==len(expr.symbols):
return expr
symbols = new_symbols
elif symbols:
symbol_map = {}
new_symbols = []
for s in symbols:
if isinstance(s, Basic.Symbol):
new_symbols.append(s)
continue
z = Basic.Symbol('z',dummy=True)
x1,s1 = s.solve4linearsymbol(z)
expr = expr.subs(x1,s1)
symbol_map[z] = s
new_symbols.append(z)
if symbol_map:
r = Order(expr, *new_symbols, **assumptions)
expr = r.expr.subs_dict(symbol_map)
symbols = []
for s in r.symbols:
if symbol_map.has_key(s):
symbols.append(symbol_map[s])
else:
symbols.append(s)
else:
if isinstance(expr, Basic.Add):
lst = expr.extract_leading_order(*symbols)
expr = Basic.Add(*[f.expr for (e,f) in lst])
else:
expr = expr.as_leading_term(*symbols)
coeff, terms = expr.as_coeff_terms()
if isinstance(coeff, Basic.Zero):
return coeff
expr = Basic.Mul(*[t for t in terms if t.has(*symbols)])
elif not isinstance(expr, Basic.Zero):
expr = Basic.One()
if isinstance(expr, Basic.Zero):
return expr
# remove unused symbols
#symbols = tuple([s for s in symbols if expr.has(s)])
symbols = tuple(symbols)
# look Order symbols from cache, TODO: make cache a dictionary
cache = Order._cache.get(symbols,[])
for o in cache:
if o.expr==expr:
return o
# Order symbols are assumed to be close to 0 from right:
for s in symbols:
assume_dict = {}
#if not s.is_infinitesimal:
assume_dict['infinitesimal'] = True
#XXX This causes problems, that it changes the assumption in the
# symbol, outside the scope of Order and breaks code. Don't know
# why
# But sometimes it's necessary for simplifications...
# well, how to solve that? I don't know...
# ok - so the problem is in caching - in core/function.py:63
# see the issue 369
#if s.is_positive is None:
# assume_dict['positive'] = True
assume_dict['positive'] = True
#
if assume_dict:
s.assume(**assume_dict)
# create Order instance:
obj = Basic.__new__(cls, expr, *symbols, **assumptions)
# cache univariate Order symbols:
if len(symbols)>1:
for s in symbols:
Order(expr, s)._get_cache_index(s)
elif symbols:
obj._get_cache_index(symbols[0])
# cache multivariate Order symbols:
cache.append(obj)
Order._cache[symbols] = cache
return obj
def _get_cache_index(obj, symbol):
if len(obj.symbols)>1:
obj = Order(obj.expr, symbol)
elif not obj.symbols:
obj = Order(obj.expr, symbol)
cache = Order._cache.get(symbol,[])
try: return cache.index(obj)
except ValueError: pass
i = -1
for o in cache:
i += 1
l = Order.find_limit(obj.expr/o.expr, symbol)
if l.is_unbounded:
cache.insert(i,obj)
break
if l.is_bounded:
continue
# If l.is_number is True, then l.is_bounded above should also be
# True, imho. It's probably a bug in assumptions.
if l.is_number:
continue
#try the new limit algorithm:
#from sympy import limit
#l = limit(obj.expr/o.expr, symbol, 0)
#if l.is_unbounded:
# cache.insert(i,obj)
# break
#if l.is_number:
# continue
print obj.expr/o.expr,l
raise NotImplementedError("failed to determine the inclusion relation between %s and %s (got lim=%s)" % (o, obj, l))
else:
cache.append(obj)
Order._cache[symbol] = cache
return cache.index(obj)
@classmethod
def find_limit(cls, f, x):
"""Basically identical to:
return limit(f, x, 0, dir="+")
but first trying some easy cases (like x**2) using heuristics, to avoid
infinite recursion. This is only needed in the Order class and series
expansion (that shouldn't rely on the Gruntz algorithm too much),
that's why find_limit() is defined here.
"""
if isinstance(f, Pow):
if f[0] == x:
if isinstance(f[1], Rational):
if f[1] > 0:
return Rational(0)
else:
return oo
if f[1].is_number:
if f[1].evalf() > 0:
return Rational(0)
else:
return oo
# you can use both limits here - the first is a lot faster, the second
# one is a lot slower, but more correct. We need to speed it up, before
# we can switch to the second one.
return f.limit(x, 0, direction='<')
#from sympy import limit
#return limit(f, x, 0, dir="+")
@property
def expr(self):
return self._args[0]
@property
def symbols(self):
return self._args[1:]
def tostr(self, level = 0):
if len(self.symbols) <= 1:
r = 'O(%s)' % self.expr.tostr()
else:
r = 'O(%s)' % (', '.join([s.tostr() for s in self]))
if self.precedence <= level:
r = '(%s)' % (r)
return r
def _eval_power(b, e):
if isinstance(e, Basic.Number):
return Order(b.expr ** e, *b.symbols)
return
def as_expr_symbols(self, order_symbols):
if order_symbols is None:
order_symbols = self.symbols
else:
for s in self.symbols:
if s not in order_symbols:
order_symbols = order_symbols + (s,)
return self.expr, order_symbols
@cache_it_immutable
def contains(self, expr):
"""
Return True if expr belongs to Order(self.expr, *self.symbols).
Return False if self belongs to expr.
Return None if the inclusion relation cannot be determined (e.g. when self and
expr have different symbols).
"""
if isinstance(expr, Basic.Zero):
return True
if isinstance(expr, Order):
if self.symbols and expr.symbols:
common_symbols = tuple([s for s in self.symbols if s in expr.symbols])
elif self.symbols:
common_symbols = self.symbols
else:
common_symbols = expr.symbols
if not common_symbols:
if not (self.symbols or expr.symbols): # O(1),O(1)
return True
return None
r = None
for s in common_symbols:
i1 = self._get_cache_index(s)
i2 = expr._get_cache_index(s)
if r is None:
r = (i1<=i2)
else:
if r != (i1<=i2):
return None
return r
obj = Order(expr, *self.symbols)
return self.contains(obj)
def _eval_subs(self, old, new):
if self==old:
return new
if isinstance(old, Basic.Symbol) and old in self.symbols:
i = list(self.symbols).index(old)
if isinstance(new, Basic.Symbol):
return Order(self.expr.subs(old, new), *(self.symbols[:i]+(new,)+self.symbols[i+1:]))
return Order(self.expr.subs(old, new), *(self.symbols[:i]+self.symbols[i+1:]))
return Order(self.expr.subs(old, new), *self.symbols)
def _calc_splitter(self, d):
return Basic.Zero()
Basic.singleton['O'] = lambda : Order
|
{
"content_hash": "03c4d16cafa591f6ce33355a6e22b64b",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 128,
"avg_line_length": 32.712166172106826,
"alnum_prop": 0.49909288824383163,
"repo_name": "certik/sympy-oldcore",
"id": "250a4c31830a9b46c14910275c2c1ee671b338d2",
"size": "11024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympy/series/order.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "readthedocs.settings.dev")
sys.path.append(os.getcwd())
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "7eced286e2ed2cee4a4fb68ffd5970ab",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 79,
"avg_line_length": 26.9,
"alnum_prop": 0.7063197026022305,
"repo_name": "safwanrahman/readthedocs.org",
"id": "966dad77845753de3dffeb9c3c18e759ceaafc83",
"size": "291",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "4515"
},
{
"name": "CSS",
"bytes": "63656"
},
{
"name": "HTML",
"bytes": "192701"
},
{
"name": "JavaScript",
"bytes": "425566"
},
{
"name": "Makefile",
"bytes": "4594"
},
{
"name": "Python",
"bytes": "1337480"
},
{
"name": "Shell",
"bytes": "358"
}
],
"symlink_target": ""
}
|
import django_filters
from waldur_core.structure import filters as structure_filters
from . import models
class ImageFilter(structure_filters.BaseServicePropertyFilter):
class Meta:
model = models.Image
fields = structure_filters.BaseServicePropertyFilter.Meta.fields + ('region',)
region = django_filters.UUIDFilter(name='region__uuid')
class SizeFilter(structure_filters.BaseServicePropertyFilter):
class Meta:
model = models.Size
fields = structure_filters.BaseServicePropertyFilter.Meta.fields + ('region',)
region = django_filters.UUIDFilter(name='regions__uuid')
class RegionFilter(structure_filters.BaseServicePropertyFilter):
class Meta(structure_filters.BaseServicePropertyFilter.Meta):
model = models.Region
|
{
"content_hash": "9569fe3a7fbd9f3c74a566ce187d9333",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 86,
"avg_line_length": 27.275862068965516,
"alnum_prop": 0.7496839443742098,
"repo_name": "opennode/nodeconductor-aws",
"id": "5c9352636d592968608e754e2844c076b9e7227e",
"size": "791",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/waldur_aws/filters.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "96648"
}
],
"symlink_target": ""
}
|
from tests import unittest
import itertools
from nose.plugins.attrib import attr
import botocore.session
from botocore.exceptions import ClientError
class TestEC2(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client(
'ec2', region_name='us-west-2')
def test_can_make_request(self):
# Basic smoke test to ensure we can talk to ec2.
result = self.client.describe_availability_zones()
zones = list(
sorted(a['ZoneName'] for a in result['AvailabilityZones']))
self.assertTrue(
set(['us-west-2a', 'us-west-2b', 'us-west-2c']).issubset(zones))
def test_get_console_output_handles_error(self):
# Want to ensure the underlying ClientError is propogated
# on error.
with self.assertRaises(ClientError):
self.client.get_console_output(InstanceId='i-12345')
class TestEC2Pagination(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client(
'ec2', region_name='us-west-2')
def test_can_paginate(self):
# Using an operation that we know will paginate.
paginator = self.client.get_paginator(
'describe_reserved_instances_offerings')
pages = paginator.paginate()
results = list(itertools.islice(pages, 0, 3))
self.assertEqual(len(results), 3)
self.assertTrue(results[0]['NextToken'] != results[1]['NextToken'])
def test_can_paginate_with_page_size(self):
# Using an operation that we know will paginate.
paginator = self.client.get_paginator(
'describe_reserved_instances_offerings')
pages = paginator.paginate(PaginationConfig={'PageSize': 1})
results = list(itertools.islice(pages, 0, 3))
self.assertEqual(len(results), 3)
for parsed in results:
reserved_inst_offer = parsed['ReservedInstancesOfferings']
# There should be no more than one reserved instance
# offering on each page.
self.assertLessEqual(len(reserved_inst_offer), 1)
def test_can_fall_back_to_old_starting_token(self):
# Using an operation that we know will paginate.
paginator = self.client.get_paginator(
'describe_reserved_instances_offerings')
pages = paginator.paginate(PaginationConfig={'NextToken': 'None___1'})
try:
results = list(itertools.islice(pages, 0, 3))
self.assertEqual(len(results), 3)
self.assertTrue(results[0]['NextToken'] != results[1]['NextToken'])
except ValueError:
self.fail("Old style paginator failed.")
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "1dbdcab3001a7d76362ad8b396145fcc",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 79,
"avg_line_length": 38.229729729729726,
"alnum_prop": 0.6401555319901026,
"repo_name": "pplu/botocore",
"id": "cc8dcf0908f8addae3bac8425dbcdaa5fc272ef9",
"size": "3395",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/integration/test_ec2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "23824"
},
{
"name": "Python",
"bytes": "2691062"
}
],
"symlink_target": ""
}
|
from datetime import timedelta
from django.core.files.storage import default_storage
from django.utils import timezone
from freezegun import freeze_time
from ...webhook.event_types import WebhookEventAsyncType
from ..models import EventDelivery, EventDeliveryAttempt, EventPayload
from ..tasks import (
delete_event_payloads_task,
delete_files_from_storage_task,
delete_from_storage_task,
)
def test_delete_from_storage_task(product_with_image, media_root):
# given
path = product_with_image.media.first().image.name
assert default_storage.exists(path)
# when
delete_from_storage_task(path)
# then
assert not default_storage.exists(path)
def test_delete_from_storage_task_file_that_not_exists(media_root):
"""Ensure method not fail when trying to remove not existing file."""
# given
path = "random/test-path"
assert not default_storage.exists(path)
# when
delete_from_storage_task(path)
def test_delete_event_payloads_task(webhook, settings):
delete_period = settings.EVENT_PAYLOAD_DELETE_PERIOD
start_time = timezone.now()
before_delete_period = start_time - delete_period - timedelta(seconds=1)
after_delete_period = start_time - delete_period + timedelta(seconds=1)
for creation_time in [before_delete_period, after_delete_period]:
with freeze_time(creation_time):
payload = EventPayload.objects.create(payload='{"key": "data"}')
delivery = EventDelivery.objects.create(
event_type=WebhookEventAsyncType.ANY,
payload=payload,
webhook=webhook,
)
with freeze_time(creation_time + timedelta(seconds=2)):
EventDeliveryAttempt.objects.create(delivery=delivery)
with freeze_time(start_time):
delete_event_payloads_task()
assert EventPayload.objects.count() == 1
assert EventDelivery.objects.count() == 1
assert EventDeliveryAttempt.objects.count() == 1
def test_delete_files_from_storage_task(
product_with_image, variant_with_image, media_root
):
# given
path_1 = product_with_image.media.first().image.path
path_2 = variant_with_image.media.first().image.path
assert default_storage.exists(path_1)
assert default_storage.exists(path_2)
# when
delete_files_from_storage_task([path_1, path_2])
# then
assert not default_storage.exists(path_1)
assert not default_storage.exists(path_2)
def test_delete_files_from_storage_task_files_not_existing_files(media_root):
"""Ensure method not fail when trying to remove not existing file."""
# given
path = "random/test-path"
path_2 = "random/test-path-2"
assert not default_storage.exists(path)
assert not default_storage.exists(path_2)
# when
delete_files_from_storage_task([path, path_2])
|
{
"content_hash": "4d3693d8ed359e654bf578f42db03528",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 77,
"avg_line_length": 32.35227272727273,
"alnum_prop": 0.6972251492799438,
"repo_name": "mociepka/saleor",
"id": "9bc8b1fd4c1d3a77d18f1e1b927772f08b31b85d",
"size": "2847",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saleor/core/tests/test_tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "2228"
},
{
"name": "HTML",
"bytes": "249248"
},
{
"name": "Procfile",
"bytes": "290"
},
{
"name": "Python",
"bytes": "12686831"
},
{
"name": "Shell",
"bytes": "439"
}
],
"symlink_target": ""
}
|
from enable.component_editor import ComponentEditor
from pyface.tasks.traits_dock_pane import TraitsDockPane
from pyface.tasks.traits_task_pane import TraitsTaskPane
from traits.api import Button, Bool, Int, Float
from traitsui.api import View, Item, UItem, VGroup, HGroup, spring, Tabbed
from pychron.core.ui.lcd_editor import LCDEditor
from pychron.envisage.icon_button_editor import icon_button_editor
class ControlPane(TraitsDockPane):
name = "Controls"
id = "pychron.ldeofurnace.controls"
extract_value = Float
extract_button = Button("Extract")
dump_sample_number = Int
dump_sample_button = Button("Dump")
# jitter_button = Button
# jitter_label = Str('Start')
# jittering = Bool
# configure_jitter_button = Button
# refresh_states_button = Button('Refresh')
set_home_button = Button("Set Home")
toggle_advanced_view_button = Button
_advanced_view_state = Bool(False)
disable_button = Button
motor_stop_button = Button
clear_sample_states_button = Button("Clear Dumped Samples")
def _motor_stop_button_fired(self):
self.model.stop_motors()
def _set_home_button_fired(self):
self.model.furnace_maset_home()
def _disable_button_fired(self):
self.model.stop_motors() # just refers to motor stop function for now
def _dump_sample_button_fired(self):
self.model.drop_sample(self.dump_sample_number)
def _extract_button_fired(self):
self.model.extract(self.extract_value, "percent")
# def _jitter_button_fired(self):
# if not self.jittering:
# self.model.start_jitter_feeder()
# self.jitter_label = 'Stop'
# else:
# self.model.stop_jitter_feeder()
# self.jitter_label = 'Start'
# self.jittering = not self.jittering
#
# def _configure_jitter_button_fired(self):
# self.model.configure_jitter_feeder()
def _toggle_advanced_view_button_fired(self):
self._advanced_view_state = not self._advanced_view_state
def _refresh_states_button_fired(self):
self.model.refresh_states()
def _clear_sample_states_button_fired(self):
self.model.clear_sample_states()
def trait_context(self):
return {"object": self.model, "pane": self, "manager": self.model}
def traits_view(self):
c_grp = VGroup(
# HGroup(Item('setpoint'),
# UItem('water_flow_state', editor=LEDEditor(label='H2O Flow')),
# spring, icon_button_editor('pane.disable_button', 'cancel')),
VGroup(UItem("output_percent_readback", editor=LCDEditor())),
icon_button_editor(
"start_record_button",
"media-record",
tooltip="Start recording",
enabled_when="not _recording",
),
icon_button_editor(
"stop_record_button",
"media-playback-stop",
tooltip="Stop recording",
enabled_when="_recording",
),
label="Controller",
show_border=True,
)
power_grp = HGroup(
UItem(
"pane.extract_value",
width=50,
enabled_when="furnace_enabled",
tooltip="Power setting for furnace (0-100%)",
),
UItem(
"pane.extract_button",
enabled_when="furnace_enabled",
tooltip="Send the value to the furnace",
),
show_border=True,
label="Furnace Power",
)
# jitter_grp = HGroup(UItem('pane.jitter_button', editor=ButtonEditor(label_value='pane.jitter_label')),
# icon_button_editor('pane.configure_jitter_button', 'cog', tooltip='Configure Jitter'),
# show_border=True, label='Jitter')
dump_grp = HGroup(
UItem(
"pane.dump_sample_number",
width=50,
enabled_when="dump_sample_enabled",
tooltip="Sample number to dump",
),
UItem(
"pane.dump_sample_button",
enabled_when="dump_sample_enabled",
tooltip="Execute the complete sample loading procedure",
),
UItem("pane.clear_sample_states_button"),
show_border=True,
label="Dump",
)
# status_grp = HGroup(CustomLabel('status_txt', size=14))
d1 = VGroup(power_grp, dump_grp)
d2 = VGroup(
# UItem('pane.refresh_states_button'),
UItem("dumper_canvas", editor=ComponentEditor())
)
d_grp = HGroup(d1, d2, label="Dumper", show_border=True)
# v_grp = VGroup(UItem('video_canvas', editor=VideoComponentEditor()),
# visible_when='video_enabled',
# label='Camera')
g_grp = VGroup(
Item("graph_scan_width", label="Scan Width (mins)"),
HGroup(
Item("graph_scale", label="Scale"),
Item("graph_y_auto", label="Autoscale Y"),
Item(
"graph_ymax",
label="Max",
format_str="%0.3f",
enabled_when="not graph_y_auto",
),
Item(
"graph_ymin",
label="Min",
format_str="%0.3f",
enabled_when="not graph_y_auto",
),
),
HGroup(
icon_button_editor(
"clear_button", "clear", tooltip="Clear and reset graph"
),
spring,
),
HGroup(
icon_button_editor(
"start_record_button",
"media-record",
tooltip="Start recording",
enabled_when="not _recording",
),
icon_button_editor(
"stop_record_button",
"media-playback-stop",
tooltip="Stop recording",
enabled_when="_recording",
),
icon_button_editor(
"add_marker_button", "flag", enabled_when="_recording"
),
show_border=True,
label="Record Scan",
),
label="Graph",
)
v = View(VGroup(c_grp, HGroup(Tabbed(d_grp, g_grp))))
return v
class FurnacePane(TraitsTaskPane):
def trait_context(self):
return {"object": self.model, "pane": self, "manager": self.model}
def traits_view(self):
canvas_grp = VGroup(
# HGroup(UItem('stage_manager.stage_map_name', editor=EnumEditor(name='stage_manager.stage_map_names')),
# spring),
UItem("canvas", style="custom", editor=ComponentEditor())
)
v = View(VGroup(UItem("graph", style="custom"), canvas_grp))
return v
# ============= EOF =============================================
|
{
"content_hash": "1f7ec5d61e0a1fc110a96ce342fb9a4a",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 116,
"avg_line_length": 34.32857142857143,
"alnum_prop": 0.521986405881537,
"repo_name": "NMGRL/pychron",
"id": "fa2f8c2220acd5b7e6b24fbad960ebfc8af914de",
"size": "7943",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pychron/furnace/tasks/ldeo/panes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
}
|
from matplotlib import pyplot as plt
from matplotlib import animation, colors
class Display(object):
def __init__(self, mapa, values, paints, update):
self.__make_cmap(paints, values)
self.update = update
self.__show(mapa)
def __make_cmap(self, paints, values):
self.cmap = colors.ListedColormap(paints)
bounds = []
for i in xrange(len(values) - 1):
a = values[i]
b = values[i+1]
x = (a+b)/2.
bounds += [a, x]
bounds.append(b)
self.norm = colors.BoundaryNorm(bounds, self.cmap.N)
def __show(self, mapa):
self.fig = plt.figure()
self.im = plt.imshow(mapa, interpolation='nearest', cmap=self.cmap, norm=self.norm)
self.ani = animation.FuncAnimation(self.fig, self.__updateAnimation, frames=None, interval=20, blit=True)
plt.axis('off')
#self.__record()
plt.show()
def __updateAnimation(self, *args):
map = self.update()
self.im.set_array(map)
return self.im,
def __record(self):
self.ani.save('movie.mp4', fps=15, extra_args=['-vcodec', 'libx264'])
|
{
"content_hash": "754ec9e62d3a318713aff912da2932fa",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 113,
"avg_line_length": 32.361111111111114,
"alnum_prop": 0.5768240343347639,
"repo_name": "kotwgarnku/predatory-prey-simulation",
"id": "828810f240e4a0af957caba9dc046f8574cd49a1",
"size": "1200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "display.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5253"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/event_perk/shared_rsf_2x10_honorguard_deed.iff"
result.attribute_template_id = 2
result.stfName("event_perk","rsf_2x10_honorguard_deed_name")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "f58fbac3da4ed07a10d7278e47b617b6",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 88,
"avg_line_length": 26,
"alnum_prop": 0.7100591715976331,
"repo_name": "obi-two/Rebelion",
"id": "bd521ea93f04a4bcad2639cd2a2523245e70794d",
"size": "483",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/tangible/deed/event_perk/shared_rsf_2x10_honorguard_deed.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
from networking_vsphere.tests import base
from networking_vsphere.utils import error_util
class TestVimException(base.TestCase):
def test_str(self):
exc = Exception("Test Exception")
exception_summary = "Test Summary "
vim_excep = error_util.VimException(exception_summary, exc)
self.assertEqual(str(exception_summary) + str(exc), str(vim_excep))
class TestVimFaultException(base.TestCase):
def test_str(self):
exc = Exception("Test Exception")
fault_list = ["NotAuthenticated"]
vim_fault = error_util.VimFaultException(fault_list, exc)
self.assertEqual(str(exc), str(vim_fault))
|
{
"content_hash": "cb8cf2d592a26aea472e9f9b44f1405a",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 75,
"avg_line_length": 32.9,
"alnum_prop": 0.6914893617021277,
"repo_name": "Mirantis/vmware-dvs",
"id": "55d1756029e2e8a56c633a5b27cd78c847313f1e",
"size": "1319",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "networking_vsphere/tests/unit/utils/test_error_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1189969"
},
{
"name": "Shell",
"bytes": "11512"
}
],
"symlink_target": ""
}
|
__author__ = "Martin Felder"
__version__ = '$Id$'
from pylab import figure, ioff, clf, contourf, ion, draw, show
from pybrain.utilities import percentError
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules import SoftmaxLayer
from datasets import generateGridData, generateClassificationData, plotData
# load the training data set
trndata = generateClassificationData(250)
# neural networks work better if classes are encoded using
# one output neuron per class
trndata._convertToOneOfMany( bounds=[0,1] )
# same for the independent test data set
tstdata = generateClassificationData(100)
tstdata._convertToOneOfMany( bounds=[0,1] )
# build a feed-forward network with 20 hidden units, plus
# a corresponding trainer
fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)
# generate a grid of data points for visualization
griddata, X, Y = generateGridData([-3.,6.,0.2],[-3.,6.,0.2])
# repeat 20 times
for i in range(20):
# train the network for 1 epoch
trainer.trainEpochs( 1 )
# evaluate the result on the training and test data
trnresult = percentError( trainer.testOnClassData(),
trndata['class'] )
tstresult = percentError( trainer.testOnClassData(
dataset=tstdata ), tstdata['class'] )
# print the result
print "epoch: %4d" % trainer.totalepochs, \
" train error: %5.2f%%" % trnresult, \
" test error: %5.2f%%" % tstresult
# run our grid data through the FNN, get the most likely class
# and shape it into an array
out = fnn.activateOnDataset(griddata)
out = out.argmax(axis=1)
out = out.reshape(X.shape)
# plot the test data and the underlying grid as a filled contour
figure(1)
ioff() # interactive graphics off
clf()
# plot the datapoints
plotData(tstdata)
# overlay a contour plot of the functional margin
if out.max()!=out.min():
CS = contourf(X, Y, out)
ion() # interactive graphics on
draw() # update the plot
# show the plot until user kills it
ioff()
show()
|
{
"content_hash": "124ccd10031897bc7e18ee4b3f79f28a",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 94,
"avg_line_length": 33.38235294117647,
"alnum_prop": 0.6951541850220264,
"repo_name": "fxsjy/pybrain",
"id": "1ecaf50ecdcbbbe91117b22e033fd90de960d443",
"size": "2352",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "examples/supervised/neuralnets+svm/example_fnn.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "411"
},
{
"name": "C++",
"bytes": "9048"
},
{
"name": "JavaScript",
"bytes": "19227"
},
{
"name": "Python",
"bytes": "1326594"
},
{
"name": "Shell",
"bytes": "148"
}
],
"symlink_target": ""
}
|
import paho.mqtt.client as mqttc
import time
import random
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.user_data_set("ы" * random.randint(1, 5))
client.subscribe("$SYS/#")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
print(msg.topic+" "+str(msg.payload))
def on_publish_custom(client, userdata, mid):
print("--- M E S S A G E ---\n\ttopic: {}\n\t msg: {}\n\t ыыы: {}".\
format("paho/temperature", temperature, userdata))
def main():
client = mqttc.Client(userdata="ОЙ")
client.on_connect = on_connect
client.on_message = on_message
client.on_publish = on_publish_custom
client.connect("localhost", 1883, 60)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_start()
while True:
time.sleep(5)
temperature = random.randint(0, 273)
client.publish("paho/temperature", payload=temperature)
if __name__=='__main__':
main()
|
{
"content_hash": "a0ea0b0ccf811e696b97ad7019230672",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 80,
"avg_line_length": 34.26829268292683,
"alnum_prop": 0.6804270462633452,
"repo_name": "Samsung-IoT-Academy/mockquitto",
"id": "8b5c26f5a780f07b0f589f453cabecac632d92f5",
"size": "1411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mockquitto/scripts/mqtt_generator.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1182"
},
{
"name": "Python",
"bytes": "39907"
}
],
"symlink_target": ""
}
|
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.append(min_node)
return ordered_array
|
{
"content_hash": "814bb127d194e6261aa105b0dbf45b94",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 46,
"avg_line_length": 28.285714285714285,
"alnum_prop": 0.5328282828282829,
"repo_name": "andreweb/HeapHeap",
"id": "31d5558e501ce9b866dc650496dc34f9deeb3144",
"size": "398",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Heap/HeapSort.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25426"
}
],
"symlink_target": ""
}
|
import json
import logging
import os
from pwd import getpwnam
import shutil
import sys
# TODO(rhallisey): add docstring.
logging.basicConfig()
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
def validate_config(config):
required_keys = {'source', 'dest', 'owner', 'perm'}
if 'command' not in config:
LOG.error('Config is missing required "command" key')
sys.exit(1)
# Validate config sections
for data in config.get('config_files', list()):
# Verify required keys exist. Only 'source' and 'dest' are
# required. 'owner' and 'perm' should user system defaults if not
# specified
if not data.viewkeys() >= required_keys:
LOG.error('Config is missing required keys: {}'.format(data))
sys.exit(1)
def validate_source(data):
source = data.get('source')
if not os.path.exists(source):
if data.get('optional'):
LOG.warn('{} does not exist, but is not required'.format(source))
return False
else:
LOG.error('The source to copy does not exist: {}'.format(source))
sys.exit(1)
return True
def copy_files(data):
dest = data.get('dest')
source = data.get('source')
if os.path.exists(dest):
LOG.info('Removing existing destination: {}'.format(dest))
if os.path.isdir(dest):
shutil.rmtree(dest)
else:
os.remove(dest)
if os.path.isdir(source):
source_path = source
dest_path = dest
else:
source_path = os.path.dirname(source)
dest_path = os.path.dirname(dest)
if not os.path.exists(dest_path):
LOG.info('Creating dest parent directory: {}'.format(dest_path))
os.makedirs(dest_path)
if source != source_path:
# Source is file
LOG.info('Copying {} to {}'.format(source, dest))
shutil.copy(source, dest)
else:
# Source is a directory
for src in os.listdir(source_path):
LOG.info('Copying {} to {}'.format(
os.path.join(source_path, src), dest_path))
if os.path.isdir(src):
shutil.copytree(os.path.join(source_path, src), dest_path)
else:
shutil.copy(os.path.join(source_path, src), dest_path)
def set_permissions(data):
def set_perms(file_, uid, guid, perm):
LOG.info('Setting permissions for {}'.format(file_))
# Give config file proper perms.
try:
os.chown(file_, uid, gid)
except OSError as e:
LOG.error('While trying to chown {} received error: {}'.format(
file_, e))
sys.exit(1)
try:
os.chmod(file_, perm)
except OSError as e:
LOG.error('While trying to chmod {} received error: {}'.format(
file_, e))
sys.exit(1)
dest = data.get('dest')
owner = data.get('owner')
perm = int(data.get('perm'), 0)
# Check for user and group id in the environment.
try:
uid = getpwnam(owner).pw_uid
except KeyError:
LOG.error('The specified user does not exist: {}'.format(owner))
sys.exit(1)
try:
gid = getpwnam(owner).pw_gid
except KeyError:
LOG.error('The specified group does not exist: {}'.format(owner))
sys.exit(1)
# Set permissions on the top level dir or file
set_perms(dest, uid, gid, perm)
if os.path.isdir(dest):
# Recursively set permissions
for root, dirs, files in os.walk(dest):
for dir_ in dirs:
set_perms(os.path.join(root, dir_), uid, gid, perm)
for file_ in files:
set_perms(os.path.join(root, file_), uid, gid, perm)
def load_config():
config_file = '/opt/kolla/config_files/config.json'
LOG.info('Loading config file at {}'.format(config_file))
# Attempt to read config file
with open(config_file) as f:
try:
config = json.load(f)
except ValueError:
LOG.error('Invalid json file found at {}'.format(config_file))
sys.exit(1)
except IOError as e:
LOG.error('Could not read file {}. Failed with error {}'.format(
config_file, e))
sys.exit(1)
LOG.info('Validating config file')
validate_config(config)
if 'config_files' in config:
LOG.info('Copying service configuration files')
for data in config['config_files']:
if validate_source(data):
copy_files(data)
set_permissions(data)
else:
LOG.debug('No files to copy found in config')
LOG.info('Writing out command to execute')
LOG.debug('Command is: {}'.format(config['command']))
# The value from the 'command' key will be written to '/run_command'
with open('/run_command', 'w+') as f:
f.write(config['command'])
def execute_config_strategy():
try:
config_strategy = os.environ.get("KOLLA_CONFIG_STRATEGY")
LOG.info('Kolla config strategy set to: {}'.format(config_strategy))
except KeyError:
LOG.error("KOLLA_CONFIG_STRATEGY is not set properly.")
sys.exit(1)
if config_strategy == "COPY_ALWAYS":
load_config()
elif config_strategy == "COPY_ONCE":
if os.path.exists('/configured'):
LOG.info("The config strategy prevents copying new configs")
sys.exit(0)
else:
load_config()
f = open('/configured', 'w+')
f.close()
else:
LOG.error('KOLLA_CONFIG_STRATEGY is not set properly')
sys.exit(1)
def main():
execute_config_strategy()
return 0
if __name__ == "__main__":
sys.exit(main())
|
{
"content_hash": "1d77e4a260000bd880e76c18031a97e7",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 77,
"avg_line_length": 30.151041666666668,
"alnum_prop": 0.5767835550181378,
"repo_name": "fdumpling/kolla",
"id": "941e74ca230b508bc1786027149d6d39633a21e7",
"size": "6357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docker/base/set_configs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "60219"
},
{
"name": "Ruby",
"bytes": "6555"
},
{
"name": "Shell",
"bytes": "39717"
}
],
"symlink_target": ""
}
|
import os
import sys
import numpy as np
import argparse
import struct
def process_dlrm_data(embedding_rows_bound, data_file, dest_dir, num_samples = 0):
dest_dir = output_dir + "/build/criteo"
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
# No of lines in the file
if num_samples == 0:
# 40 int_32 values per line
n_lines = os.path.getsize(data_file) // 40 // 4
else:
n_lines = num_samples
with open(os.path.join(dest_dir, "val_map.txt"), "w") as f:
for i in range(n_lines):
print("{:08d}".format(i), file=f)
ground_truth_list = []
int_features_list = []
int_features_int8_list = []
cat_features_list = []
with open(str(data_file), "rb") as f:
for n in range(n_lines):
if n % 1000 == 0:
print("Processing No.{:d}/{:d}...".format(n, n_lines))
# Save one line into list
nums = struct.unpack_from("40i", f.read(40 * 4))
ground_truth_list.append(nums[0])
int_features = nums[1:14]
# In reference implementation, we do log(max(0, feature) + 1).
# TODO: should this be in timed path?
int_features = [np.log(max(0.0, i) + 1.0) for i in int_features]
int_features_list.append(int_features)
# Using [-14.2313, 14.2313] as the range for the numerical input according to calibration cache.
int8_factor = 127.0 / 14.2313
int_features_int8 = [min(max(i * int8_factor, -128.0), 127.0) for i in int_features]
int_features_int8_list.append(int_features_int8)
cat_features = np.array(nums[14:40], dtype=np.int32)
cat_features = [x % embedding_rows_bound for x in cat_features]
cat_features_list.append(cat_features)
np.save(os.path.join(dest_dir, "ground_truth.npy".format(n)), np.array(ground_truth_list, dtype=np.int32))
np.save(os.path.join(dest_dir, "numeric_fp32.npy".format(n)), np.array(int_features_list, dtype=np.float32))
np.save(os.path.join(dest_dir, "numeric_fp16.npy".format(n)), np.array(int_features_list, dtype=np.float16))
np.save(os.path.join(dest_dir, "numeric_int8_linear.npy".format(n)), np.array(int_features_int8_list, dtype=np.int8))
np.save(os.path.join(dest_dir, "numeric_int8_chw4.npy".format(n)), np.array([i + [0 for j in range(16-13)] for i in int_features_int8_list], dtype=np.int8))
np.save(os.path.join(dest_dir, "numeric_int8_chw32.npy".format(n)), np.array([i + [0 for j in range(32-13)] for i in int_features_int8_list], dtype=np.int8))
np.save(os.path.join(dest_dir, "categorical_int32.npy".format(n)), np.array(cat_features_list, dtype=np.int32))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--embedding_rows_bound", "-b", help="Specifies the upper bound on the number of embedding rows", default=40000000)
parser.add_argument("--data_file", "-d", help="Specifies the input data file test_data.bin")
parser.add_argument("--output_dir", "-o", help="Specifies the output directory for the npy files")
parser.add_argument("--num_samples", "-n", help="Specifies the number of samples to be processed. Default: all", type=int, default=0)
args = parser.parse_args()
data_file = args.data_file
output_dir = args.output_dir
embedding_rows_bound = args.embedding_rows_bound
num_samples = args.num_samples
process_dlrm_data(embedding_rows_bound, data_file, output_dir, num_samples)
if __name__ == "__main__":
main()
|
{
"content_hash": "ac23ba333bca3031cced99da977d9793",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 161,
"avg_line_length": 49.416666666666664,
"alnum_prop": 0.6329398538504778,
"repo_name": "mlperf/inference_results_v0.7",
"id": "5788a8252f64a9d0c0d0f574a174dbcd0b2624e5",
"size": "4174",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "closed/QCT/code/dlrm-99/tensorrt/scripts/convert_dlrm_data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "148628"
},
{
"name": "C++",
"bytes": "14551146"
},
{
"name": "CMake",
"bytes": "380597"
},
{
"name": "Cuda",
"bytes": "3604332"
},
{
"name": "Dockerfile",
"bytes": "32985"
},
{
"name": "Makefile",
"bytes": "103953"
},
{
"name": "Objective-C",
"bytes": "5470"
},
{
"name": "Python",
"bytes": "11627827"
},
{
"name": "Roff",
"bytes": "153"
},
{
"name": "Shell",
"bytes": "349257"
}
],
"symlink_target": ""
}
|
import platform
import re
from awscli.compat import urlopen, URLError
from awscli.customizations.codedeploy.systems import System, Ubuntu, Windows, RHEL
from socket import timeout
MAX_INSTANCE_NAME_LENGTH = 100
MAX_TAGS_PER_INSTANCE = 10
MAX_TAG_KEY_LENGTH = 128
MAX_TAG_VALUE_LENGTH = 256
INSTANCE_NAME_PATTERN = r'^[A-Za-z0-9+=,.@_-]+$'
IAM_USER_ARN_PATTERN = r'^arn:aws:iam::[0-9]{12}:user/[A-Za-z0-9/+=,.@_-]+$'
INSTANCE_NAME_ARG = {
'name': 'instance-name',
'synopsis': '--instance-name <instance-name>',
'required': True,
'help_text': (
'Required. The name of the on-premises instance.'
)
}
IAM_USER_ARN_ARG = {
'name': 'iam-user-arn',
'synopsis': '--iam-user-arn <iam-user-arn>',
'required': False,
'help_text': (
'Optional. The IAM user associated with the on-premises instance.'
)
}
def validate_region(params, parsed_globals):
if parsed_globals.region:
params.region = parsed_globals.region
else:
params.region = params.session.get_config_variable('region')
if not params.region:
raise RuntimeError('Region not specified.')
def validate_instance_name(params):
if params.instance_name:
if not re.match(INSTANCE_NAME_PATTERN, params.instance_name):
raise ValueError('Instance name contains invalid characters.')
if params.instance_name.startswith('i-'):
raise ValueError('Instance name cannot start with \'i-\'.')
if len(params.instance_name) > MAX_INSTANCE_NAME_LENGTH:
raise ValueError(
'Instance name cannot be longer than {0} characters.'.format(
MAX_INSTANCE_NAME_LENGTH
)
)
def validate_tags(params):
if params.tags:
if len(params.tags) > MAX_TAGS_PER_INSTANCE:
raise ValueError(
'Instances can only have a maximum of {0} tags.'.format(
MAX_TAGS_PER_INSTANCE
)
)
for tag in params.tags:
if len(tag['Key']) > MAX_TAG_KEY_LENGTH:
raise ValueError(
'Tag Key cannot be longer than {0} characters.'.format(
MAX_TAG_KEY_LENGTH
)
)
if len(tag['Value']) > MAX_TAG_KEY_LENGTH:
raise ValueError(
'Tag Value cannot be longer than {0} characters.'.format(
MAX_TAG_VALUE_LENGTH
)
)
def validate_iam_user_arn(params):
if params.iam_user_arn and \
not re.match(IAM_USER_ARN_PATTERN, params.iam_user_arn):
raise ValueError('Invalid IAM user ARN.')
def validate_instance(params):
if platform.system() == 'Linux':
if 'Ubuntu' in platform.linux_distribution()[0]:
params.system = Ubuntu(params)
if 'Red Hat Enterprise Linux Server' in platform.linux_distribution()[0]:
params.system = RHEL(params)
elif platform.system() == 'Windows':
params.system = Windows(params)
if 'system' not in params:
raise RuntimeError(
System.UNSUPPORTED_SYSTEM_MSG
)
try:
urlopen('http://169.254.169.254/latest/meta-data/', timeout=1)
raise RuntimeError('Amazon EC2 instances are not supported.')
except (URLError, timeout):
pass
def validate_s3_location(params, arg_name):
arg_name = arg_name.replace('-', '_')
if arg_name in params:
s3_location = getattr(params, arg_name)
if s3_location:
matcher = re.match('s3://(.+?)/(.+)', str(s3_location))
if matcher:
params.bucket = matcher.group(1)
params.key = matcher.group(2)
else:
raise ValueError(
'--{0} must specify the Amazon S3 URL format as '
's3://<bucket>/<key>.'.format(
arg_name.replace('_', '-')
)
)
|
{
"content_hash": "31c4054f7cbdc76a4f18e663d9c6e66a",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 82,
"avg_line_length": 33.33057851239669,
"alnum_prop": 0.5648400694272254,
"repo_name": "LockScreen/Backend",
"id": "7aee4bb77e4053905e00a50f80ec6ecba85b7dc0",
"size": "4595",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/awscli/customizations/codedeploy/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1411"
},
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "59046"
},
{
"name": "HTML",
"bytes": "449"
},
{
"name": "JavaScript",
"bytes": "21987"
},
{
"name": "Python",
"bytes": "14239313"
},
{
"name": "Shell",
"bytes": "5692"
},
{
"name": "TeX",
"bytes": "1527"
}
],
"symlink_target": ""
}
|
from plankton import codec
# A generic object.
class Object(object):
def __init__(self, serial):
self.header = None
self.payload = None
self.serial = serial
def set_header(self, value):
self.header = value
return self
def set_payload(self, value):
self.payload = value
return self
def __str__(self):
return "Object(#%i)" % self.serial
class EnvironmentReference(object):
def __init__(self, key, serial):
self.key = key
self.serial = serial
# Creates a new, initially empty, object and if an id is specified registers it
# under that id.
def new_object(id=None):
return get_active().new_object(id)
# Returns the object registered under the given id.
def ref(id):
return get_active().get_ref(id)
# Creates a new environment reference that resolves to the given value.
def env(key, id=None):
return get_active().new_env(key, id)
ACTIVE = None
# Returns the currently active assembler.
def get_active():
global ACTIVE
return ACTIVE
# Sets the currently active assembler
def set_active(value):
global ACTIVE
ACTIVE = value
# The machinery used to generate a test case.
class TestAssembler(object):
def __init__(self, generator):
self.name = None
self.refs = {}
self.input = None
self.assembly = None
self.generator = generator
self.next_serial = 0
# Sets the descriptive name of this test
def set_name(self, value):
self.value = value
return self
# Add a reference to the given value.
def add_ref(self, id, value):
self.refs[id] = value
# Returns the value with the given id.
def get_ref(self, id):
return self.refs[id]
def get_next_serial(self):
result = self.next_serial
self.next_serial += 1
return result
def new_object(self, id):
result = Object(self.get_next_serial())
if not id is None:
self.add_ref(id, result)
return result
def new_env(self, key, id):
result = EnvironmentReference(key, self.get_next_serial())
if not id is None:
self.add_ref(id, result)
return result
# Sets the input to check for.
def set_input(self, value):
self.input = value
return self
# Sets the high-level opcode corresponding to the input.
def get_assembly(self):
return self.assembly
# Registers this as the active assembler.
def begin(self):
assert get_active() == None
set_active(self)
# Unregisters this as the active assembler.
def end(self):
assert get_active() == self
set_active(None)
# Wrapper that escapes a value given to the output stream's append and ensures
# that it gets emitted using the generator rather than just added as a string.
class E(object):
def __init__(self, value):
self.value = value
class OutputStream(object):
def __init__(self, generator):
self.generator = generator
self.level = 0
self.pending_newline = False
self.refs = {}
self.chars = []
def append(self, *parts):
for part in parts:
if isinstance(part, E):
self.generator.emit_value(part.value, self)
else:
self.append_chunk(part)
return self
def append_chunk(self, part):
for c in part:
if c == '\n':
self.flush_pending_newline()
self.pending_newline = True
else:
self.append_char(c)
def append_char(self, c):
self.flush_pending_newline()
self.chars.append(c)
def flush_pending_newline(self):
if self.pending_newline:
self.pending_newline = False
self.append_char('\n')
for i in xrange(0, self.level):
self.append_char(' ')
def indent(self, delta):
self.level += delta
return self
def flush(self):
self.flush_pending_newline()
return "".join(self.chars)
class AbstractAssembly(object):
def string(self, str):
return self.tag(tSTRING).uint32(len(str)).blob(bytearray(str))
def int(self, value):
return self.tag(tINT32).int32(value)
def array(self, n):
return self.tag(tARRAY).uint32(n)
def map(self, n):
return self.tag(tMAP).uint32(n)
def object(self):
return self.tag(tOBJECT)
def ref(self, n):
return self.tag(tREF).uint32(n)
def null(self):
return self.tag(tNULL)
def true(self):
return self.tag(tTRUE)
def false(self):
return self.tag(tFALSE)
def env(self):
return self.tag(tENV)
# Tag values.
tINT32 = codec._INT32_TAG
tSTRING = codec._STRING_TAG
tNULL = codec._NULL_TAG
tTRUE = codec._TRUE_TAG
tFALSE = codec._FALSE_TAG
tARRAY = codec._ARRAY_TAG
tMAP = codec._MAP_TAG
tOBJECT = codec._OBJECT_TAG
tREF = codec._REFERENCE_TAG
tENV = codec._ENVIRONMENT_TAG
|
{
"content_hash": "48a62c160d56a0374459640512f821e8",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 79,
"avg_line_length": 21.164383561643834,
"alnum_prop": 0.6586839266450917,
"repo_name": "plesner/neutrino",
"id": "1513133ccafba02c358b4f2cfe6c936c0aaa356f",
"size": "4799",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/gen/plankton/framework.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "889170"
},
{
"name": "C++",
"bytes": "162362"
},
{
"name": "Nemerle",
"bytes": "64810"
},
{
"name": "PHP",
"bytes": "1009"
},
{
"name": "Python",
"bytes": "139652"
},
{
"name": "Shell",
"bytes": "5026"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from django.utils.decorators import method_decorator
from django.views.decorators.http import require_POST
from django.utils.translation import ugettext_lazy as _
from solo.admin import SingletonModelAdmin
from allink_apps.config.models import Config
require_POST = method_decorator(require_POST)
@admin.register(Config)
class ConfigAdmin(SingletonModelAdmin):
pass
|
{
"content_hash": "cef4495786a66c6eb387b214cbde385e",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 55,
"avg_line_length": 27.2,
"alnum_prop": 0.8259803921568627,
"repo_name": "allink/allink-apps",
"id": "7cd1c8585c90eef5f74df8a780d1b814f53f1677",
"size": "432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "994"
},
{
"name": "HTML",
"bytes": "47533"
},
{
"name": "Python",
"bytes": "183917"
}
],
"symlink_target": ""
}
|
import logging
import inspect
import imp
import os
import operator
import re
import sys
import time
import traceback
from clint.textui import colored, puts, indent
from os.path import abspath, dirname
from multiprocessing import Process, Queue
import bottle
from listener import WillXMPPClientMixin
from mixins import ScheduleMixin, StorageMixin, ErrorMixin, HipChatMixin,\
RoomMixin, PluginModulesLibraryMixin, EmailMixin
from scheduler import Scheduler
import settings
from utils import show_valid, error, warn, print_head
# Force UTF8
if sys.version_info < (3, 0):
reload(sys)
sys.setdefaultencoding('utf8')
else:
raw_input = input
# Update path
PROJECT_ROOT = abspath(os.path.join(dirname(__file__)))
PLUGINS_ROOT = abspath(os.path.join(PROJECT_ROOT, "plugins"))
TEMPLATES_ROOT = abspath(os.path.join(PROJECT_ROOT, "templates"))
PROJECT_TEMPLATE_ROOT = abspath(os.path.join(os.getcwd(), "templates"))
sys.path.append(PROJECT_ROOT)
sys.path.append(os.path.join(PROJECT_ROOT, "will"))
class WillBot(EmailMixin, WillXMPPClientMixin, StorageMixin, ScheduleMixin,
ErrorMixin, RoomMixin, HipChatMixin, PluginModulesLibraryMixin):
def __init__(self, **kwargs):
if "template_dirs" in kwargs:
warn("template_dirs is now depreciated")
if "plugin_dirs" in kwargs:
warn("plugin_dirs is now depreciated")
log_level = getattr(settings, 'LOGLEVEL', logging.ERROR)
logging.basicConfig(
level=log_level,
format='%(levelname)-8s %(message)s'
)
# Find all the PLUGINS modules
plugins = settings.PLUGINS
self.plugins_dirs = {}
# Set template dirs.
full_path_template_dirs = []
for t in settings.TEMPLATE_DIRS:
full_path_template_dirs.append(os.path.abspath(t))
# Add will's templates_root
if TEMPLATES_ROOT not in full_path_template_dirs:
full_path_template_dirs += [TEMPLATES_ROOT, ]
# Add this project's templates_root
if PROJECT_TEMPLATE_ROOT not in full_path_template_dirs:
full_path_template_dirs += [PROJECT_TEMPLATE_ROOT, ]
# Convert those to dirs
for plugin in plugins:
path_name = None
for mod in plugin.split('.'):
if path_name is not None:
path_name = [path_name]
file_name, path_name, description = imp.find_module(mod, path_name)
# Add, uniquely.
self.plugins_dirs[os.path.abspath(path_name)] = plugin
if os.path.exists(os.path.join(os.path.abspath(path_name), "templates")):
full_path_template_dirs.append(
os.path.join(os.path.abspath(path_name), "templates")
)
# Key by module name
self.plugins_dirs = dict(zip(self.plugins_dirs.values(), self.plugins_dirs.keys()))
# Storing here because storage hasn't been bootstrapped yet.
os.environ["WILL_TEMPLATE_DIRS_PICKLED"] =\
";;".join(full_path_template_dirs)
def bootstrap(self):
print_head()
self.verify_environment()
self.load_config()
self.bootstrap_storage_mixin()
self.bootstrap_plugins()
self.verify_plugin_settings()
puts("Bootstrapping complete.")
puts("\nStarting core processes:")
# Scheduler
scheduler_thread = Process(target=self.bootstrap_scheduler)
# scheduler_thread.daemon = True
# Bottle
bottle_thread = Process(target=self.bootstrap_bottle)
# bottle_thread.daemon = True
# XMPP Listener
xmpp_thread = Process(target=self.bootstrap_xmpp)
# xmpp_thread.daemon = True
with indent(2):
try:
# Start up threads.
xmpp_thread.start()
scheduler_thread.start()
bottle_thread.start()
errors = self.get_startup_errors()
if len(errors) > 0:
default_room = self.get_room_from_name_or_id(settings.DEFAULT_ROOM)["room_id"]
error_message = "FYI, I ran into some problems while starting up:"
for err in errors:
error_message += "\n%s\n" % err
self.send_room_message(default_room, error_message, color="yellow")
puts(colored.red(error_message))
while True:
time.sleep(100)
except (KeyboardInterrupt, SystemExit):
scheduler_thread.terminate()
bottle_thread.terminate()
xmpp_thread.terminate()
print '\n\nReceived keyboard interrupt, quitting threads.',
while (scheduler_thread.is_alive() or
bottle_thread.is_alive() or
xmpp_thread.is_alive()):
sys.stdout.write(".")
sys.stdout.flush()
time.sleep(0.5)
def verify_individual_setting(self, test_setting, quiet=False):
if not test_setting.get("only_if", True):
return True
if hasattr(settings, test_setting["name"][5:]):
with indent(2):
show_valid(test_setting["name"])
return True
else:
error("%(name)s... missing!" % test_setting)
with indent(2):
puts("""To obtain a %(name)s: \n%(obtain_at)s
To set your %(name)s:
1. On your local machine, add this to your virtual environment's bin/postactivate file:
export %(name)s=YOUR_ACTUAL_%(name)s
2. If you've deployed will on heroku, run
heroku config:set %(name)s=YOUR_ACTUAL_%(name)s
""" % test_setting)
return False
def verify_environment(self):
missing_settings = False
required_settings = [
{
"name": "WILL_USERNAME",
"obtain_at": """1. Go to hipchat, and create a new user for will.
2. Log into will, and go to Account settings>XMPP/Jabber Info.
3. On that page, the 'Jabber ID' is the value you want to use.""",
},
{
"name": "WILL_PASSWORD",
"obtain_at": (
"1. Go to hipchat, and create a new user for will. "
"Note that password - this is the value you want. "
"It's used for signing in via XMPP."
),
},
{
"name": "WILL_V2_TOKEN",
"obtain_at": """1. Log into hipchat using will's user.
2. Go to https://your-org.hipchat.com/account/api
3. Create a token.
4. Copy the value - this is the WILL_V2_TOKEN.""",
},
{
"name": "WILL_REDIS_URL",
"only_if": getattr(settings, "STORAGE_BACKEND", "redis") == "redis",
"obtain_at": """1. Set up an accessible redis host locally or in production
2. Set WILL_REDIS_URL to its full value, i.e. redis://localhost:6379/7""",
},
]
puts("")
puts("Verifying environment...")
for r in required_settings:
if not self.verify_individual_setting(r):
missing_settings = True
if missing_settings:
error(
"Will was unable to start because some required environment "
"variables are missing. Please fix them and try again!"
)
sys.exit(1)
else:
puts("")
puts("Verifying credentials...")
# Parse 11111_222222@chat.hipchat.com into id, where 222222 is the id.
user_id = settings.USERNAME.split('@')[0].split('_')[1]
# Splitting into a thread. Necessary because *BSDs (including OSX) don't have threadsafe DNS.
# http://stackoverflow.com/questions/1212716/python-interpreter-blocks-multithreaded-dns-requests
q = Queue()
p = Process(target=self.get_hipchat_user, args=(user_id,), kwargs={"q": q, })
p.start()
user_data = q.get()
p.join()
if "error" in user_data:
error("We ran into trouble: '%(message)s'" % user_data["error"])
sys.exit(1)
with indent(2):
show_valid("%s authenticated" % user_data["name"])
os.environ["WILL_NAME"] = user_data["name"]
show_valid("@%s verified as handle" % user_data["mention_name"])
os.environ["WILL_HANDLE"] = user_data["mention_name"]
puts("")
def load_config(self):
puts("Loading configuration...")
with indent(2):
settings.import_settings(quiet=False)
puts("")
def verify_rooms(self):
puts("Verifying rooms...")
# If we're missing ROOMS, join all of them.
with indent(2):
if settings.ROOMS is None:
# Yup. Thanks, BSDs.
q = Queue()
p = Process(target=self.update_available_rooms, args=(), kwargs={"q": q, })
p.start()
rooms_list = q.get()
show_valid("Joining all %s known rooms." % len(rooms_list))
os.environ["WILL_ROOMS"] = ";".join(rooms_list)
p.join()
settings.import_settings()
else:
show_valid(
"Joining the %s room%s specified." % (
len(settings.ROOMS),
"s" if len(settings.ROOMS) > 1 else ""
)
)
puts("")
def verify_plugin_settings(self):
puts("Verifying settings requested by plugins...")
missing_settings = False
missing_setting_error_messages = []
with indent(2):
for name, meta in self.required_settings_from_plugins.items():
if not hasattr(settings, name):
error_message = (
"%(setting_name)s is missing. It's required by the"
"%(plugin_name)s plugin's '%(function_name)s' method."
) % meta
puts(colored.red("✗ %(setting_name)s" % meta))
missing_setting_error_messages.append(error_message)
missing_settings = True
else:
show_valid("%(setting_name)s" % meta)
if missing_settings:
puts("")
warn(
"Will is missing settings required by some plugins. "
"He's starting up anyway, but you will run into errors"
" if you try to use those plugins!"
)
self.add_startup_error("\n".join(missing_setting_error_messages))
else:
puts("")
def bootstrap_storage_mixin(self):
puts("Bootstrapping storage...")
try:
self.bootstrap_storage()
with indent(2):
show_valid("Bootstrapped!")
puts("")
except ImportError, e:
module_name = traceback.format_exc(e).split(" ")[-1]
error("Unable to bootstrap storage - attempting to load %s" % module_name)
puts(traceback.format_exc(e))
sys.exit(1)
except Exception, e:
error("Unable to bootstrap storage!")
puts(traceback.format_exc(e))
sys.exit(1)
def bootstrap_scheduler(self):
bootstrapped = False
try:
self.save("plugin_modules_library", self._plugin_modules_library)
Scheduler.clear_locks(self)
self.scheduler = Scheduler()
for plugin_info, fn, function_name in self.periodic_tasks:
meta = fn.will_fn_metadata
self.add_periodic_task(
plugin_info["full_module_name"],
plugin_info["name"],
function_name,
meta["sched_args"],
meta["sched_kwargs"],
meta["function_name"],
)
for plugin_info, fn, function_name in self.random_tasks:
meta = fn.will_fn_metadata
self.add_random_tasks(
plugin_info["full_module_name"],
plugin_info["name"],
function_name,
meta["start_hour"],
meta["end_hour"],
meta["day_of_week"],
meta["num_times_per_day"]
)
bootstrapped = True
except Exception, e:
self.startup_error("Error bootstrapping scheduler", e)
if bootstrapped:
show_valid("Scheduler started.")
self.scheduler.start_loop(self)
def bootstrap_bottle(self):
bootstrapped = False
try:
for cls, function_name in self.bottle_routes:
instantiated_cls = cls()
instantiated_fn = getattr(instantiated_cls, function_name)
bottle_route_args = {}
for k, v in instantiated_fn.will_fn_metadata.items():
if "bottle_" in k and k != "bottle_route":
bottle_route_args[k[len("bottle_"):]] = v
bottle.route(instantiated_fn.will_fn_metadata["bottle_route"], **bottle_route_args)(instantiated_fn)
bootstrapped = True
except Exception, e:
self.startup_error("Error bootstrapping bottle", e)
if bootstrapped:
show_valid("Web server started.")
bottle.run(host='0.0.0.0', port=settings.HTTPSERVER_PORT, server='cherrypy', quiet=True)
def bootstrap_xmpp(self):
bootstrapped = False
try:
self.start_xmpp_client()
sorted_help = {}
for k, v in self.help_modules.items():
sorted_help[k] = sorted(v)
self.save("help_modules", sorted_help)
self.save("all_listener_regexes", self.all_listener_regexes)
self.connect()
bootstrapped = True
except Exception, e:
self.startup_error("Error bootstrapping xmpp", e)
if bootstrapped:
show_valid("Chat client started.")
show_valid("Will is running.")
self.process(block=True)
def bootstrap_plugins(self):
puts("Bootstrapping plugins...")
OTHER_HELP_HEADING = "Other"
plugin_modules = {}
plugin_modules_library = {}
# NOTE: You can't access self.storage here, or it will deadlock when the threads try to access redis.
with indent(2):
parent_help_text = None
for plugin_name, plugin_root in self.plugins_dirs.items():
for root, dirs, files in os.walk(plugin_root, topdown=False):
for f in files:
if f[-3:] == ".py" and f != "__init__.py":
try:
module_path = os.path.join(root, f)
path_components = os.path.split(module_path)
module_name = path_components[-1][:-3]
full_module_name = ".".join(path_components)
# Need to pass along module name, path all the way through
combined_name = ".".join([plugin_name, module_name])
# Check blacklist.
blacklisted = False
for b in settings.PLUGIN_BLACKLIST:
if b in combined_name:
blacklisted = True
try:
plugin_modules[full_module_name] = imp.load_source(module_name, module_path)
except:
# If it's blacklisted, don't worry if this blows up.
if blacklisted:
pass
else:
raise
parent_mod = path_components[-2].split("/")[-1]
parent_help_text = parent_mod.title()
try:
parent_root = os.path.join(root, "__init__.py")
parent = imp.load_source(parent_mod, parent_root)
parent_help_text = getattr(parent, "MODULE_DESCRIPTION", parent_help_text)
except:
# If it's blacklisted, don't worry if this blows up.
if blacklisted:
pass
else:
raise
plugin_modules_library[full_module_name] = {
"full_module_name": full_module_name,
"file_path": module_path,
"name": module_name,
"parent_name": plugin_name,
"parent_module_name": parent_mod,
"parent_help_text": parent_help_text,
"blacklisted": blacklisted,
}
except Exception, e:
self.startup_error("Error loading %s" % (module_path,), e)
self.plugins = []
for name, module in plugin_modules.items():
try:
for class_name, cls in inspect.getmembers(module, predicate=inspect.isclass):
try:
if hasattr(cls, "is_will_plugin") and cls.is_will_plugin and class_name != "WillPlugin":
self.plugins.append({
"name": class_name,
"class": cls,
"module": module,
"full_module_name": name,
"parent_name": plugin_modules_library[name]["parent_name"],
"parent_module_name": plugin_modules_library[name]["parent_module_name"],
"parent_help_text": plugin_modules_library[name]["parent_help_text"],
"blacklisted": plugin_modules_library[name]["blacklisted"],
})
except Exception, e:
self.startup_error("Error bootstrapping %s" % (class_name,), e)
except Exception, e:
self.startup_error("Error bootstrapping %s" % (name,), e)
self._plugin_modules_library = plugin_modules_library
# Sift and Sort.
self.message_listeners = []
self.periodic_tasks = []
self.random_tasks = []
self.bottle_routes = []
self.all_listener_regexes = []
self.help_modules = {}
self.help_modules[OTHER_HELP_HEADING] = []
self.some_listeners_include_me = False
self.plugins.sort(key=operator.itemgetter("parent_module_name"))
self.required_settings_from_plugins = {}
last_parent_name = None
for plugin_info in self.plugins:
try:
if last_parent_name != plugin_info["parent_help_text"]:
friendly_name = "%(parent_help_text)s " % plugin_info
module_name = " %(parent_name)s" % plugin_info
# Justify
friendly_name = friendly_name.ljust(50, '-')
module_name = module_name.rjust(40, '-')
puts("")
puts("%s%s" % (friendly_name, module_name))
last_parent_name = plugin_info["parent_help_text"]
with indent(2):
plugin_name = plugin_info["name"]
# Just a little nicety
if plugin_name[-6:] == "Plugin":
plugin_name = plugin_name[:-6]
if plugin_info["blacklisted"]:
puts("✗ %s (blacklisted)" % plugin_name)
else:
plugin_instances = {}
for function_name, fn in inspect.getmembers(
plugin_info["class"],
predicate=inspect.ismethod
):
try:
# Check for required_settings
with indent(2):
if hasattr(fn, "will_fn_metadata"):
meta = fn.will_fn_metadata
if "required_settings" in meta:
for s in meta["required_settings"]:
self.required_settings_from_plugins[s] = {
"plugin_name": plugin_name,
"function_name": function_name,
"setting_name": s,
}
if (
"listens_to_messages" in meta and
meta["listens_to_messages"] and
"listener_regex" in meta
):
# puts("- %s" % function_name)
regex = meta["listener_regex"]
if not meta["case_sensitive"]:
regex = "(?i)%s" % regex
help_regex = meta["listener_regex"]
if meta["listens_only_to_direct_mentions"]:
help_regex = "@%s %s" % (settings.HANDLE, help_regex)
self.all_listener_regexes.append(help_regex)
if meta["__doc__"]:
pht = plugin_info.get("parent_help_text", None)
if pht:
if pht in self.help_modules:
self.help_modules[pht].append(meta["__doc__"])
else:
self.help_modules[pht] = [meta["__doc__"]]
else:
self.help_modules[OTHER_HELP_HEADING].append(meta["__doc__"])
if meta["multiline"]:
compiled_regex = re.compile(regex, re.MULTILINE | re.DOTALL)
else:
compiled_regex = re.compile(regex)
if plugin_info["class"] in plugin_instances:
instance = plugin_instances[plugin_info["class"]]
else:
instance = plugin_info["class"]()
plugin_instances[plugin_info["class"]] = instance
self.message_listeners.append({
"function_name": function_name,
"class_name": plugin_info["name"],
"regex_pattern": meta["listener_regex"],
"regex": compiled_regex,
"fn": getattr(instance, function_name),
"args": meta["listener_args"],
"include_me": meta["listener_includes_me"],
"direct_mentions_only": meta["listens_only_to_direct_mentions"],
"admin_only": meta["listens_only_to_admin"],
"acl": meta["listeners_acl"],
})
if meta["listener_includes_me"]:
self.some_listeners_include_me = True
elif "periodic_task" in meta and meta["periodic_task"]:
# puts("- %s" % function_name)
self.periodic_tasks.append((plugin_info, fn, function_name))
elif "random_task" in meta and meta["random_task"]:
# puts("- %s" % function_name)
self.random_tasks.append((plugin_info, fn, function_name))
elif "bottle_route" in meta:
# puts("- %s" % function_name)
self.bottle_routes.append((plugin_info["class"], function_name))
except Exception, e:
error(plugin_name)
self.startup_error(
"Error bootstrapping %s.%s" % (
plugin_info["class"],
function_name,
), e
)
show_valid(plugin_name)
except Exception, e:
self.startup_error("Error bootstrapping %s" % (plugin_info["class"],), e)
puts("")
|
{
"content_hash": "5ea0730dccd71f5bb8b5d809778bd0ac",
"timestamp": "",
"source": "github",
"line_count": 586,
"max_line_length": 120,
"avg_line_length": 46.339590443686006,
"alnum_prop": 0.448388878659547,
"repo_name": "mvanbaak/will",
"id": "ab3e09c9655d477bce47204dedc0b6734b873780",
"size": "27184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "will/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1768"
},
{
"name": "Python",
"bytes": "143505"
},
{
"name": "Shell",
"bytes": "280"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
urlpatterns = [
# Examples:
# url(r'^$', 'evetool.views.home', name='home'),
url(r'^', include('users.urls')),
url(r'^', include('admins.urls')),
url(r'^', include('apis.urls')),
url(r'^', include('characters.urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
|
{
"content_hash": "c62c5a136e8898ec9c2b446c5371355a",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 77,
"avg_line_length": 35.642857142857146,
"alnum_prop": 0.6693386773547094,
"repo_name": "Sult/evehub",
"id": "f683fb024cfec72e3e6eca04b2eefe731643215f",
"size": "499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "evehub/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7974"
},
{
"name": "HTML",
"bytes": "30605"
},
{
"name": "JavaScript",
"bytes": "546"
},
{
"name": "Python",
"bytes": "290574"
}
],
"symlink_target": ""
}
|
"""Support for monitoring the state of Vultr Subscriptions."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from . import (
ATTR_CURRENT_BANDWIDTH_USED,
ATTR_PENDING_CHARGES,
CONF_SUBSCRIPTION,
DATA_VULTR,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Vultr {} {}"
MONITORED_CONDITIONS = {
ATTR_CURRENT_BANDWIDTH_USED: [
"Current Bandwidth Used",
"GB",
"mdi:chart-histogram",
],
ATTR_PENDING_CHARGES: ["Pending Charges", "US$", "mdi:currency-usd"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SUBSCRIPTION): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(MONITORED_CONDITIONS)
): vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Vultr subscription (server) sensor."""
vultr = hass.data[DATA_VULTR]
subscription = config.get(CONF_SUBSCRIPTION)
name = config.get(CONF_NAME)
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
if subscription not in vultr.data:
_LOGGER.error("Subscription %s not found", subscription)
return
sensors = []
for condition in monitored_conditions:
sensors.append(VultrSensor(vultr, subscription, condition, name))
add_entities(sensors, True)
class VultrSensor(Entity):
"""Representation of a Vultr subscription sensor."""
def __init__(self, vultr, subscription, condition, name):
"""Initialize a new Vultr sensor."""
self._vultr = vultr
self._condition = condition
self._name = name
self.subscription = subscription
self.data = None
condition_info = MONITORED_CONDITIONS[condition]
self._condition_name = condition_info[0]
self._units = condition_info[1]
self._icon = condition_info[2]
@property
def name(self):
"""Return the name of the sensor."""
try:
return self._name.format(self._condition_name)
except IndexError:
try:
return self._name.format(self.data["label"], self._condition_name)
except (KeyError, TypeError):
return self._name
@property
def icon(self):
"""Return the icon used in the frontend if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit of measurement to present the value in."""
return self._units
@property
def state(self):
"""Return the value of this given sensor type."""
try:
return round(float(self.data.get(self._condition)), 2)
except (TypeError, ValueError):
return self.data.get(self._condition)
def update(self):
"""Update state of sensor."""
self._vultr.update()
self.data = self._vultr.data[self.subscription]
|
{
"content_hash": "f6a28d481bfefb9ee8c6587c4fc78d75",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 82,
"avg_line_length": 29.28828828828829,
"alnum_prop": 0.6419563211319594,
"repo_name": "Teagan42/home-assistant",
"id": "fec912f00d8150d36be3cccee595c54d09798311",
"size": "3251",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/components/vultr/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19774313"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import django_resized.forms
import phoenix.custom_storages
import pyotp
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Badge',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name', models.CharField(max_length=100)),
('secret_key', models.CharField(default=pyotp.random_base32, max_length=16)),
('image', django_resized.forms.ResizedImageField(storage=phoenix.custom_storages.SecureStorage(), upload_to='BadgePictures')),
],
options={
'permissions': (('view_badge_secret', 'Can view badge secret'),),
},
),
migrations.CreateModel(
name='CrawledBadge',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('achieved_at', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('badge', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='crawled', to='boothcrawl.Badge')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='crawledbadges', to=settings.AUTH_USER_MODEL)),
],
),
]
|
{
"content_hash": "a239d1edd9374fbbcdb3d4c5eb7d3b3d",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 148,
"avg_line_length": 39.41860465116279,
"alnum_prop": 0.6324483775811209,
"repo_name": "vchrisb/emc_phoenix3",
"id": "20980894f611c4311fd079f39ff464f76a81d8c2",
"size": "1768",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boothcrawl/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "175838"
},
{
"name": "HTML",
"bytes": "60569"
},
{
"name": "JavaScript",
"bytes": "2082"
},
{
"name": "Python",
"bytes": "91711"
},
{
"name": "Shell",
"bytes": "4651"
}
],
"symlink_target": ""
}
|
"""
These tests contain real-life-ish code snippets with different
problematic input configs that should cause the expected errors.
"""
from unittest import TestCase
from jsoncfg import loads_config, expect_object, expect_array, JSONConfigNodeTypeError
class ServerConfigLoader(object):
"""
This is the workhorse that we will use with different
input configs for testing.
"""
def __init__(self, json_string):
self.logs = []
self.cfg = loads_config(json_string)
def __call__(self):
self._load_servers(expect_array(self.cfg.servers))
self._load_users(expect_object(self.cfg.users))
return self.logs
def _log(self, msg):
self.logs.append(msg)
def _load_servers(self, servers_cfg):
for server_cfg in servers_cfg:
expect_object(server_cfg)
self._log('%s|%s|%s' % (server_cfg.ip_address(), server_cfg.port(8000),
server_cfg.wwwroot()))
def _load_users(self, users_cfg):
for username, user_cfg in users_cfg:
expect_object(user_cfg)
self._log('%s|%s|%s' % (username, user_cfg.password(), user_cfg.is_admin(False)))
class TestServerConfigExample(TestCase):
def test_successful_loading(self):
cfg = """
{
servers: [
{
ip_address: "127.0.0.1",
// The port is optional and defaults to 8000
//port: 8080,
wwwroot: "/home/tron/www/root0",
},
{
ip_address: "127.0.0.1",
port: 8081,
wwwroot: "/home/tron/www/root1",
}
],
users: {
tron: {
password: "trons_hashed_pwd",
is_admin: true,
},
tom: {
password: "toms_hashed_pwd",
// is_admin is optional, the config loader uses a default of false
// is_admin: false,
},
"jerry the mouse": {
password: "jerrys_hashed_pwd",
is_admin: false,
},
},
}"""
logs = ServerConfigLoader(cfg)()
self.assertSetEqual(set(logs), {
'127.0.0.1|8000|/home/tron/www/root0',
'127.0.0.1|8081|/home/tron/www/root1',
'tron|trons_hashed_pwd|True',
'tom|toms_hashed_pwd|False',
'jerry the mouse|jerrys_hashed_pwd|False',
})
def test_servers_is_not_list(self):
cfg = '{servers: {}, users: {}}'
self.assertRaises(JSONConfigNodeTypeError, ServerConfigLoader(cfg))
def test_users_is_not_dict(self):
cfg = '{servers: [], users: []}'
self.assertRaises(JSONConfigNodeTypeError, ServerConfigLoader(cfg))
|
{
"content_hash": "34c193114c61d62dafef5eb3801bb88a",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 93,
"avg_line_length": 34.70454545454545,
"alnum_prop": 0.4931237721021611,
"repo_name": "pasztorpisti/json-cfg",
"id": "d757023c1f055827ca507867dc94c66d8f624d53",
"size": "3054",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_library_usage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "101660"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.