function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def test_delete_versioned_submission_past(self):
"""Deleting an old versioned ``Submission`` should fail"""
submission_b = self.create_submission(title='b')
self.parent.update_version(submission_b)
self.client.login(username='alex', password='alex')
response = self.client.post(self.delete_path, {})
assert_equal(response.status_code, 404) | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def test_delete_versioned_submission(self):
"""Deleting a versioned ``Submission`` should take down all the related
content"""
submission_b = self.create_submission(title='b')
self.parent.update_version(submission_b)
self.client.login(username='alex', password='alex')
result = self.client.post(submission_b.get_delete_url(), {})
assert_equal((Submission.objects.filter(created_by=self.alex_profile)
.count()), 0)
assert_equal((SubmissionParent.objects
.filter(submission__created_by=self.alex_profile)
.count()), 0)
assert_equal((SubmissionVersion.objects
.filter(submission__created_by=self.alex_profile)
.count()), 0) | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def setUp(self):
challenge_setup()
profile_list = create_users()
self.phase = Phase.objects.all()[0]
self.alex = profile_list[0]
self.category = Category.objects.all()[0]
create_submissions(1, self.phase, self.alex)
self.submission_a = Submission.objects.get()
self.parent = self.submission_a.parent
self.help_url = reverse('entry_help', args=[self.parent.slug])
self.valid_data = {
'notes': 'Help Wanted',
'status': SubmissionHelp.PUBLISHED,
} | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def create_submission_help(self, **kwargs):
defaults = {'parent': self.parent,
'status': SubmissionHelp.PUBLISHED}
if kwargs:
defaults.update(kwargs)
instance, created = SubmissionHelp.objects.get_or_create(**defaults)
return instance | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def test_submission_help_not_owner(self):
self.client.login(username='bob', password='bob')
response = self.client.get(self.help_url)
eq_(response.status_code, 404)
response = self.client.post(self.help_url, self.valid_data)
eq_(response.status_code, 404) | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def test_submission_help_listing(self):
self.create_submission_help()
response = self.client.get(reverse('entry_help_list'))
eq_(response.status_code, 200)
page = response.context['page']
eq_(page.paginator.count, 1) | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
Entirely based on the Django Test Client
https://github.com/django/django/blob/master/django/test/client.py#L88
"""
store.setdefault('templates', []).append(template)
store.setdefault('context', ContextList()).append(copy(context)) | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def __init__(self, *args, **kwargs):
super(TestAddSubmissionView, self).__init__(*args, **kwargs)
# Add context and template to the response
on_template_render = curry(store_rendered_templates, {})
signals.template_rendered.connect(on_template_render,
dispatch_uid="template-render") | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def test_add_submission_get(self):
request = self.factory.get('/')
request.user = AnonymousUser()
request.development = development_mock
response = views.add_submission(request, self.ideation)
eq_(response.status_code, 200) | mozilla/mozilla-ignite | [
13,
15,
13,
6,
1319036985
] |
def __init__(self, check_x_type : Union[PointerType, LLVMType, None]) -> None:
super().__init__()
self.check_x_type = check_x_type | GaloisInc/saw-script | [
408,
63,
408,
366,
1429220372
] |
def test_points_to_at_type(self):
connect(reset_server=True)
if __name__ == "__main__": view(LogResults())
bcname = str(Path('tests','saw','test-files', 'points_to_at_type.bc'))
mod = llvm_load_module(bcname)
result = llvm_verify(mod, "f", FPointsToContract(None))
self.assertIs(result.is_success(), True)
result = llvm_verify(mod, "f", FPointsToContract(array_ty(2, i32)))
self.assertIs(result.is_success(), True)
# with self.assertRaises(VerificationError):
# llvm_verify(mod, "f", FPointsToContract(PointerType()))
# with self.assertRaises(VerificationError):
# llvm_verify(mod, "f", FPointsToContract(ty.array(3, ty.i32))) | GaloisInc/saw-script | [
408,
63,
408,
366,
1429220372
] |
def __init__(self, *args, **kwargs):
super(MobileMe, self).__init__(*args, **kwargs)
self.devices = set()
self._devices = {}
self._get_devices() | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def _auth(self, passwd):
super(MobileMe, self)._auth(passwd)
data = {
'anchor': 'findmyiphone',
'lang': 'en',
}
self._get('https://secure.me.com/wo/WebObjects/Account2.woa', data, headers={'X-Mobileme-Version': '1.0'}) | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def _add_device(self, id, type, cls, osver):
self._devices[id] = {
'id': id,
'type': type,
'class': cls,
'osver': osver,
} | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def get_device(self, id=None):
if id is None:
if len(self._devices) == 1:
id = self._devices.keys()[0]
else:
self._logger.error('Multiple devices found and no ID specified, bailing.')
raise MobileMe.MultipleDevicesFound('Device ID must be specified.')
return self._devices[id] | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def msg_device(self, msg, alarm=False, device_id=None):
device = self.get_device(device_id)
self._logger.info('Sending "%s" to device "%s" with%s alarm', msg, device['id'], 'out' if not alarm else '')
body = {
'deviceClass': device['class'],
'deviceId': device['id'],
'deviceOsVersion': device['osver'],
'deviceType': device['type'],
'message': msg,
'playAlarm': 'Y' if alarm else 'N',
}
data = {'postBody': json.dumps(body)}
resp = self._js_post('https://secure.me.com/wo/WebObjects/DeviceMgmt.woa/wa/SendMessageAction/sendMessage', data)
resp_data = json.loads(resp.read())
if resp_data['status'] == 1:
return True
self._logger.error('Sending message to device "%s" failed!', device['id'])
self._logger.debug('%s', resp_data) | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def __init__(self, json_data):
data = json.loads(json_data)
for k, v in data.iteritems():
if k not in ('date', 'time'):
setattr(self, self._uncamel(k), v)
self.datetime = datetime.strptime('%s %s' % (data['date'], data['time']), '%B %d, %Y %I:%M %p')
self.timestamp = int(time.mktime(self.datetime.timetuple())) | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def _uncamel(self, str):
return ''.join('_%s' % c.lower() if c.isupper() else c for c in str) | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def __init__(self, *args, **kwargs):
super(MobileMeAction, self).__init__(*args, **kwargs)
self.parser.add_argument('-m', '--mobileme-user', dest='m_user', help='MobileMe username, will be prompted for if not provided', metavar='MOBILEMEUSER')
self.parser.add_argument('-M', '--mobileme-pass', dest='m_pass', help='MobileMe password, will be prompted for if not provided', metavar='MOBILEMEPASS') | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def setup(self):
self.parser.add_argument('-D', '--device', dest='device', help='Device ID', metavar='DEVICE')
self.parser.add_argument('-a', '--alarm', dest='alarm', action='store_true', help='Play a sound for 2 minutes with this message')
self.parser.add_argument('message', nargs='+', help='Message to be sent to device') | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def setup(self):
self.parser.add_argument('-D', '--device', dest='device', help='Device ID', metavar='DEVICE') | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def setup(self):
self.parser.add_argument('-D', '--device', dest='device', help='Device ID', metavar='DEVICE')
self.parser.add_argument('pin', type=int, help='PIN to lock the device with', metavar='PIN') | wrboyce/autolat | [
2,
2,
2,
1,
1258709282
] |
def extractThatbadtranslatorWordpressCom(item):
'''
Parser for 'thatbadtranslator.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | [
191,
16,
191,
3,
1437712243
] |
def extract_alleles( input_file, output_file=None, reference_file=None,
alignment_file=None,
method=METHOD,
sort=SORT,
loci=LOCI ):
"""Pick the top 2 Amplicon Analysis consensus seqs per group from a Fasta"""
method = method or METHOD
loci = loci or LOCI
# Set the output file if not specified
output_file = output_file or _get_output_file( input_file )
output_type = get_file_type( output_file )
# If align to reference for breaking ties
alignment_file = get_alignment_file( input_file, reference_file, alignment_file )
alignments = list( BlasrReader( alignment_file ))
# Run the appropriate grouping
if method == 'locus':
groups = _group_by_locus( alignments, loci )
elif method == 'barcode':
groups = _group_by_barcode( alignments )
elif method == 'both':
groups = _group_by_both( alignments, loci )
elif method == 'all':
groups = {a.qname: [a] for a in alignments}
else:
msg = "Invalid Selection Metric: %s" % method
log.error( msg )
raise ValueError( msg )
# Read the input sequences and use them to generate our sorting data
sequences = read_sequences( input_file )
if sort == 'num_reads':
sorting_data = {s.name: consensus_size(s) for s in sequences}
elif sort == 'accuracy':
assert get_file_type(input_file) == 'fastq'
sorting_data = {s.name: record_accuracy(s) for s in sequences}
else:
msg = "Invalid Sorting Metric: %s" % sort
log.error( msg )
raise ValueError( msg )
log.info('Sorting sequences for selection according to "%s"' % sort)
ordered = _sort_groups( groups, sorting_data )
log.info('Selecting top sequences from %s according to the "%s" policy' % (input_file, method))
selected = list( _select_sequences( ordered ))
log.info('Selected %s sequences from %s total for further analysis' % (len(selected), len(sequences)))
log.info('Writing the selected sequences out to %s' % output_file)
subset = list( _subset_sequences( sequences, selected ))
_write_output( subset, output_file, output_type )
return output_file | bnbowman/HlaTools | [
21,
5,
21,
2,
1365456399
] |
def _group_by_barcode( alignments ):
"""Group reads by their barcode"""
groups = {}
for alignment in alignments:
name = alignment.qname
if name.startswith('Barcode'):
name = name[7:]
if name.startswith('_'):
name = name[1:]
barcode = name.split('_Cluster')[0]
try:
groups[barcode].append( alignment )
except KeyError:
groups[barcode] = [ alignment ]
return groups | bnbowman/HlaTools | [
21,
5,
21,
2,
1365456399
] |
def _sort_groups( groups, sorting_data ):
"""Order each group of records individually"""
ordered = {}
for locus, group in groups.iteritems():
sorted_records = sorted( group, key=lambda x: sorting_data[x.qname], reverse=True )
ordered[locus] = sorted_records
return ordered | bnbowman/HlaTools | [
21,
5,
21,
2,
1365456399
] |
def _subset_sequences( sequences, selected ):
"""Subset only the sequences that match"""
for record in sequences:
name = record.name.split()[0]
if name in selected:
yield record | bnbowman/HlaTools | [
21,
5,
21,
2,
1365456399
] |
def _get_output_file( input_file ):
basename = '.'.join( input_file.split('.')[:-1] )
file_type = get_file_type( input_file )
return '%s.selected.%s' % (basename, file_type) | bnbowman/HlaTools | [
21,
5,
21,
2,
1365456399
] |
def __init__(self, autoencoders, preact_cors=None, postact_cors=None,
layer_samplers=None):
super(GSN, self).__init__(autoencoders)
# only for convenience
self.aes = self._layers
# easy way to turn off corruption (True => corrupt, False => don't)
self._corrupt_switch = True
# easy way to turn off sampling
self._sample_switch = True
# easy way to not use bias (True => use bias, False => don't)
self._bias_switch = True
# check that autoencoders are the correct sizes by looking at previous
# layer. We can't do this for the first ae, so we skip it.
for i in xrange(1, len(self.aes)):
assert (self.aes[i].weights.get_value().shape[0] ==
self.aes[i - 1].nhid)
# do some type checking and convert None's to identity function
def _make_callable_list(previous):
"""
.. todo::
WRITEME
"""
if len(previous) != self.nlayers:
raise ValueError("Need same number of corruptors/samplers as layers")
if not all(map(lambda x: callable(x) or x is None, previous)):
raise ValueError("All elements must either be None or be a callable")
return map(lambda x: identity if x is None else x, previous)
self._preact_cors = _make_callable_list(preact_cors)
self._postact_cors = _make_callable_list(postact_cors)
self._layer_samplers = _make_callable_list(layer_samplers) | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _make_aes(layer_sizes, activation_funcs, tied=True):
"""
Creates the Autoencoder objects needed by the GSN.
Parameters
----------
layer_sizes : WRITEME
activation_funcs : WRITEME
tied : WRITEME
"""
aes = []
assert len(activation_funcs) == len(layer_sizes)
for i in xrange(len(layer_sizes) - 1):
# activation for visible layer is aes[0].act_dec
act_enc = activation_funcs[i + 1]
act_dec = act_enc if i != 0 else activation_funcs[0]
aes.append(
Autoencoder(layer_sizes[i], layer_sizes[i + 1],
act_enc, act_dec, tied_weights=tied)
)
return aes | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def new(cls,
layer_sizes,
activation_funcs,
pre_corruptors,
post_corruptors,
layer_samplers,
tied=True):
"""
An easy (and recommended) way to initialize a GSN.
Parameters
----------
layer_sizes : list
A list of integers. The i_th element in the list is the size of
the i_th layer of the network, and the network will have
len(layer_sizes) layers.
activation_funcs : list
activation_funcs must be a list of the same length as layer_sizes
where the i_th element is the activation function for the i_th
layer. Each component of the list must refer to an activation
function in such a way that the Autoencoder class recognizes the
function. Valid values include a callable (which takes a symbolic
tensor), a string that refers to a Theano activation function, or
None (which gives the identity function).
preact_corruptors : list
preact_corruptors follows exactly the same format as the
activations_func argument.
postact_corruptors : list
postact_corruptors follows exactly the same format as the
activations_func argument.
layer_samplers : list
layer_samplers follows exactly the same format as the
activations_func argument.
tied : bool
Indicates whether the network should use tied weights.
Notes
-----
The GSN classes applies functions in the following order:
- pre-activation corruption
- activation
- clamping applied
- sampling
- post-activation corruption
All setting and returning of values occurs after applying the
activation function (or clamping if clamping is used) but before
applying sampling.
"""
args = [layer_sizes, pre_corruptors, post_corruptors, layer_samplers]
if not all(isinstance(arg, list) for arg in args):
raise TypeError("All arguments except for tied must be lists")
if not all(len(arg) == len(args[0]) for arg in args):
lengths = map(len, args)
raise ValueError("All list arguments must be of the same length. " +
"Current lengths are %s" % lengths)
aes = cls._make_aes(layer_sizes, activation_funcs, tied=tied)
return cls(aes,
preact_cors=pre_corruptors,
postact_cors=post_corruptors,
layer_samplers=layer_samplers) | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def get_params(self):
"""
.. todo::
WRITEME
"""
params = set()
for ae in self.aes:
params.update(ae.get_params())
return list(params) | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def nlayers(self):
"""
Returns how many layers the GSN has.
"""
return len(self.aes) + 1 | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _make_or_get_compiled(self, indices, clamped=False):
"""
Compiles, wraps, and caches Theano functions for non-symbolic calls
to get_samples.
Parameters
----------
indices : WRITEME
clamped : WRITEME
"""
def compile_f_init():
mb = T.matrices(len(indices))
zipped = safe_zip(indices, mb)
f_init = theano.function(mb,
self._set_activations(zipped, corrupt=True),
allow_input_downcast=True)
# handle splitting of concatenated data
def wrap_f_init(*args):
data = f_init(*args)
length = len(data) / 2
return data[:length], data[length:]
return wrap_f_init
def compile_f_step():
prev = T.matrices(self.nlayers)
if clamped:
_initial = T.matrices(len(indices))
_clamps = T.matrices(len(indices))
z = self._update(copy.copy(prev),
clamped=safe_zip(indices, _initial, _clamps),
return_activations=True)
f = theano.function(prev + _initial + _clamps, z,
on_unused_input='ignore',
allow_input_downcast=True)
else:
z = self._update(copy.copy(prev), return_activations=True)
f = theano.function(prev, z, on_unused_input='ignore',
allow_input_downcast=True)
def wrapped(*args):
data = f(*args)
length = len(data) / 2
return data[:length], data[length:]
return wrapped
# things that require re-compiling everything
state = (self._corrupt_switch, self._sample_switch, self._bias_switch)
if hasattr(self, '_compiled_cache') and state == self._compiled_cache[0]:
# already have some cached functions
if indices == self._compiled_cache[1]:
# everything is cached, return all but state and indices
return self._compiled_cache[2:]
else:
# indices have changed, need to recompile f_init
f_init = compile_f_init()
cc = self._compiled_cache
self._compiled_cache = (state, indices, f_init, cc[3])
return self._compiled_cache[2:]
else:
# have no cached function (or incorrect state)
f_init = compile_f_init()
f_step = compile_f_step()
self._compiled_cache = (state, indices, f_init, f_step)
return self._compiled_cache[2:] | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def reconstruct(self, minibatch):
"""
.. todo::
WRITEME
"""
# included for compatibility with cost functions for autoencoders,
# so assumes model is in unsupervised mode
assert len(minibatch) == 1
idx = minibatch[0][0]
return self.get_samples(minibatch, walkback=0, indices=[idx]) | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _set_activations(self, minibatch, set_val=True, corrupt=False):
"""
Initializes the GSN as specified by minibatch.
Parameters
----------
minibatch : list of (int, tensor_like)
The minibatch parameter must be a list of tuples of form
(int, tensor_like), where the int component represents the index
of the layer (so 0 for visible, -1 for top/last layer) and the
tensor_like represents the activation at that level. Layer
indices not included in the minibatch will be set to 0. For
tuples included in the minibatch, the tensor_like component can
actually be None; this will result in that layer getting set to 0
initially.
set_val : bool, optional
Determines whether the method sets self.activations.
corrupt : bool, optional
Instructs the method to return both a non-corrupted and corrupted
set of activations rather than just non-corrupted.
Notes
-----
This method creates a new list, not modifying an existing list.
This method also does the first odd step in the network.
"""
activations = [None] * self.nlayers
mb_size = minibatch[0][1].shape[0]
first_layer_size = self.aes[0].weights.shape[0]
# zero out activations to start
activations[0] = T.alloc(0, mb_size, first_layer_size)
for i in xrange(1, len(activations)):
activations[i] = T.zeros_like(
T.dot(activations[i - 1], self.aes[i - 1].weights)
)
# set minibatch
for i, val in minibatch:
if val is not None:
activations[i] = val
indices = [t[0] for t in minibatch if t[1] is not None]
self._update_odds(activations, skip_idxs=indices, corrupt=False)
if set_val:
self.activations = activations
if corrupt:
return (activations +
self.apply_postact_corruption(activations[:],
xrange(len(activations))))
else:
return activations | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _update_evens(self, activations, clamped=None):
"""
Updates just the even layers of the network.
Parameters
----------
See all of the descriptions for _update_evens.
"""
evens = xrange(0, len(activations), 2)
self._update_activations(activations, evens)
if clamped is not None:
self._apply_clamping(activations, clamped)
evens_copy = [(i, activations[i]) for i in evens]
self.apply_postact_corruption(activations, evens)
return evens_copy | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _apply_clamping(activations, clamped, symbolic=True):
"""
Resets the value of some layers within the network.
Parameters
----------
activations : list
List of symbolic tensors representing the current activations.
clamped : list of (int, matrix, matrix or None) tuples
The first component of each tuple is an int representing the
index of the layer to clamp.
The second component is a matrix of the initial values for that
layer (ie what we are resetting the values to).
The third component is a matrix mask indicated which indices in
the minibatch to clamp (1 indicates clamping, 0 indicates not).
The value of None is equivalent to the 0 matrix (so no clamping).
If symbolic is true then matrices are Theano tensors, otherwise
they should be numpy matrices.
symbolic : bool, optional
Whether to execute with symbolic Theano tensors or numpy matrices.
"""
for idx, initial, clamp in clamped:
if clamp is None:
continue
# take values from initial
clamped_val = clamp * initial
# zero out values in activations
if symbolic:
activations[idx] = T.switch(clamp, initial, activations[idx])
else:
activations[idx] = np.switch(clamp, initial, activations[idx])
return activations | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _apply_corruption(activations, corruptors, idx_iter):
"""
Applies a list of corruptor functions to all layers.
Parameters
----------
activations : list of tensor_likes
Generally gsn.activations
corruptors : list of callables
Generally gsn.postact_cors or gsn.preact_cors
idx_iter : iterable
An iterable of indices into self.activations. The indexes
indicate which layers the post activation corruptors should be
applied to.
"""
assert len(corruptors) == len(activations)
for i in idx_iter:
activations[i] = corruptors[i](activations[i])
return activations | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def apply_postact_corruption(self, activations, idx_iter, sample=True):
"""
.. todo::
WRITEME
"""
if sample:
self.apply_sampling(activations, idx_iter)
if self._corrupt_switch:
self._apply_corruption(activations, self._postact_cors,
idx_iter)
return activations | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _update_activations(self, activations, idx_iter):
"""
Actually computes the activations for all indices in idx_iters.
This method computes the values for a layer by computing a linear
combination of the neighboring layers (dictated by the weight matrices),
applying the pre-activation corruption, and then applying the layer's
activation function.
Parameters
----------
activations : list of tensor_likes
The activations to update (could be self.activations). Updates
in-place.
idx_iter : iterable
An iterable of indices into self.activations. The indexes
indicate which layers should be updated.
Must be able to iterate over idx_iter multiple times.
"""
from_above = lambda i: ((self.aes[i].visbias if self._bias_switch else 0) +
T.dot(activations[i + 1],
self.aes[i].w_prime))
from_below = lambda i: ((self.aes[i - 1].hidbias if self._bias_switch else 0) +
T.dot(activations[i - 1],
self.aes[i - 1].weights))
for i in idx_iter:
# first compute the hidden activation
if i == 0:
activations[i] = from_above(i)
elif i == len(activations) - 1:
activations[i] = from_below(i)
else:
activations[i] = from_below(i) + from_above(i)
self.apply_preact_corruption(activations, idx_iter)
for i in idx_iter:
# Using the activation function from lower autoencoder
act_func = None
if i == 0:
act_func = self.aes[0].act_dec
else:
act_func = self.aes[i - 1].act_enc
# ACTIVATION
# None implies linear
if act_func is not None:
activations[i] = act_func(activations[i]) | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def convert(cls, gsn, input_idx=0, label_idx=None):
"""
'convert' essentially serves as the constructor for JointGSN.
Parameters
----------
gsn : GSN
input_idx : int
The index of the layer which serves as the "input" to the
network. During classification, this layer will be given.
Defaults to 0.
label_idx : int
The index of the layer which serves as the "output" of the
network. This label is predicted during classification.
Defaults to top layer of network.
"""
gsn = copy.copy(gsn)
gsn.__class__ = cls
gsn.input_idx = input_idx
gsn.label_idx = label_idx or (gsn.nlayers - 1)
return gsn | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def _get_aggregate_classification(self, minibatch, trials=10, skip=0):
"""
See classify method.
Returns the prediction vector aggregated over all time steps where
axis 0 is the minibatch item and axis 1 is the output for the label.
"""
clamped = np.ones(minibatch.shape, dtype=np.float32)
data = self.get_samples([(self.input_idx, minibatch)],
walkback=self.calc_walkback(trials + skip),
indices=[self.label_idx],
clamped=[clamped],
symbolic=False)
# 3d tensor: axis 0 is time step, axis 1 is minibatch item,
# axis 2 is softmax output for label (after slicing)
data = np.asarray(data[skip:skip+trials])[:, 0, :, :]
return data.mean(axis=0) | lisa-lab/pylearn2 | [
2738,
1110,
2738,
201,
1290448850
] |
def read_leda(path):
"""Read graph in GraphML format from path.
Returns an XGraph or XDiGraph."""
fh=_get_fh(path,mode='r')
G=parse_leda(fh)
return G | JaneliaSciComp/Neuroptikon | [
9,
2,
9,
48,
1409685514
] |
def parse_leda(lines):
"""Parse LEDA.GRAPH format from string or iterable.
Returns an Graph or DiGraph."""
if is_string_like(lines): lines=iter(lines.split('\n'))
lines = iter([line.rstrip('\n') for line in lines \
if not (line.startswith('#') or line.startswith('\n') or line=='')])
for i in range(3):
lines.next()
# Graph
du = int(lines.next()) # -1 directed, -2 undirected
if du==-1:
G = networkx.DiGraph()
else:
G = networkx.Graph() | JaneliaSciComp/Neuroptikon | [
9,
2,
9,
48,
1409685514
] |
def createserver(host="127.0.0.1", port=10123,
handler_factory=bjsonrpc.handlers.NullHandler,
sock=None, http=False):
"""
Creates a *bjson.server.Server* object linked to a listening socket. | deavid/bjsonrpc | [
40,
20,
40,
2,
1292884398
] |
def connect(host="127.0.0.1", port=10123,
sock=None, handler_factory=bjsonrpc.handlers.NullHandler):
"""
Creates a *bjson.connection.Connection* object linked to a connected
socket. | deavid/bjsonrpc | [
40,
20,
40,
2,
1292884398
] |
def extractYuzukiteaWordpressCom(item):
'''
Parser for 'yuzukitea.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | [
191,
16,
191,
3,
1437712243
] |
def init(i):
"""
Input: {}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
return {'return':0} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def detect(i):
"""
See "check" API
"""
return check(i) | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def internal_detect(i):
"""
Input: {
(host_os) - host OS (detect, if omitted)
(target_os) - target OS (detect, if omitted)
(target_device_id) - target device ID (detect, if omitted)
(data_uoa) or (uoa) - software UOA entry
or
(tags) - search UOA by tags (separated by comma)
(tool) - force this tool name
(env) - if !='', use this env string before calling compiler (to set up env)
(show) - if 'yes', show output
(force_version) - if !='', use this version
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
version_str - version as string
version_lst - version as list of strings
version_raw - raw list of strings (output of --version)
}
"""
import os
o=i.get('out','')
# Check host/target OS/CPU
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('target_device_id','')
r=ck.access({'action':'detect',
'module_uoa':cfg['module_deps']['platform.os'],
'host_os':hos,
'target_os':tos,
'target_device_id':tdid,
'skip_info_collection':'yes'})
if r['return']>0: return r
hos=r['host_os_uid']
hosx=r['host_os_uoa']
hosd=r['host_os_dict']
tos=r['os_uid']
tosx=r['os_uoa']
tosd=r['os_dict']
hplat=hosd['ck_name']
tplat=tosd['ck_name']
env=i.get('env','')
ubtr=hosd.get('use_bash_to_run','')
svarb=hosd.get('env_var_start','')
svarb1=hosd.get('env_var_extra1','')
svare=hosd.get('env_var_stop','')
svare1=hosd.get('env_var_extra2','')
sexe=hosd.get('set_executable','')
sbp=hosd.get('bin_prefix','')
envsep=hosd.get('env_separator','')
scall=hosd.get('env_call','')
sext=hosd.get('script_ext','')
# Check soft UOA
duoa=i.get('uoa','')
if duoa=='': duoa=i.get('data_uoa','')
if duoa=='':
# Search
tags=i.get('tags','')
if tags!='':
r=ck.access({'action':'search',
'module_uoa':work['self_module_uid'],
'tags':tags})
if r['return']>0: return r
l=r['lst']
if len(l)>0:
duid=l[0].get('data_uid')
duoa=duid
if duoa=='':
return {'return':1, 'error':'software entry was not found'}
# Load
r=ck.access({'action':'load',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa})
if r['return']>0: return r
d=r['dict']
p=r['path']
duoa=r['data_uoa']
duid=r['data_uid']
if o=='con':
x=duoa
if duid!=duoa: x+=' ('+duid+')'
ck.out('Software description entry found: '+x)
# Check if customize script is redirected into another entry:
#
another_entry_with_customize_script=d.get('use_customize_script_from_another_entry', None)
if another_entry_with_customize_script:
r=ck.access({'action':'find',
'module_uoa': another_entry_with_customize_script.get('module_uoa', work['self_module_uid']),
'data_uoa': another_entry_with_customize_script.get('data_uoa','')
})
if r['return']>0: return r
customization_script_path = r['path']
else:
customization_script_path = p
cs=None
rx=ck.load_module_from_path({'path':customization_script_path, 'module_code_name':cfg['custom_script_name'], 'skip_init':'yes'})
if rx['return']==0:
cs=rx['code']
elif another_entry_with_customize_script or not rx['error'].startswith("can't find module code"):
return rx
# Checking name
cus=d.get('customize',{})
tool=i.get('tool','')
if tool=='':
if cus.get('soft_file_as_env','')!='':
tool=svarb+cus['soft_file_as_env']+svare
if cus.get('soft_file_not_tool','')!='yes':
ry=prepare_target_name({'host_os_dict':hosd,
'target_os_dict':tosd,
'cus':cus})
if ry['return']>0: return ry
tool=ry['tool']
# Preparing CMD
soft_version_cmd=cus.get('soft_version_cmd',{}).get(hplat,'')
if o=='con':
ck.out('')
ck.out('Prepared cmd: '+soft_version_cmd+' ...')
# Check version (via customized script) ...
ver=''
lst=[]
ii={'full_path':tool,
'bat':env,
'host_os_dict':hosd,
'target_os_dict':tosd,
'cmd':soft_version_cmd,
'use_locale':cus.get('use_locale_for_version',''),
'customize':cus,
'custom_script_obj':cs,
'data_uid': duid
}
if ck.cfg.get('minimize_soft_detect_output','')!='yes':
ii['out']=o
rx=get_version(ii)
if rx['return']==0:
ver=rx['version']
lst=rx['version_lst']
if ver=='':
return {'return':16, 'error':'version was not detected'}
# Split version
rx=split_version({'version':ver})
if rx['return']>0: return rx
sver=rx['version_split']
if i.get('show','')=='yes':
ck.out('Output:')
ck.out('')
for q in lst:
ck.out(' '+q)
if o=='con':
ck.out('')
ck.out('Version detected: '+ver)
return {'return':0, 'version_str':ver,
'version_lst':sver,
'version_raw':lst} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def setup(i):
"""
Input: {
(host_os) - host OS (detect, if omitted)
(target_os) - target OS (detect, if omitted)
(target_device_id) - target device ID (detect, if omitted)
(data_uoa) or (uoa) - soft configuration UOA
or
(tags) - search UOA by tags (separated by comma)
(soft_name) - use this user friendly name for environment entry
(soft_add_name) - add extra name to above name (such as anaconda)
(customize) - dict with custom parameters
(usually passed to customize script)
skip_add_dirs
skip_add_to_path
skip_add_to_bin
skip_add_to_ld_path
add_include_path
skip_path - skiping installation path (for local versions)
version - add this version
skip_version - if 'yes', do not add version
(skip_path) - skiping installation path (for local versions)
(env) - update default env with this dict
(ienv) - supply extra install_env overrides via this mechanism
(deps) - list with dependencies (in special format, possibly resolved (from package))
(install_path) - path with soft is installed
(full_path) - full path to a tool or library (install_path will be calculated automatically)
(bat_file) - if !='', record environment to this bat file,
instead of creating env entry
(quiet) - if 'yes', minimize questions
(env_data_uoa) - use this data UOA to record (new) env
(env_repo_uoa) - use this repo to record new env
(env_new) - if 'yes', do not search for environment (was already done in package, for example)
(package_uoa) - if called from package, record package_uoa just in case
(reset_env) - if 'yes', do not use environment from existing entry, but use original one
(extra_version) - add extra version, when registering software
(for example, -trunk-20160421)
(skip_device_info_collection) - if 'yes', do not collect device info
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
env_data_uoa - environment entry UOA
env_data_uid - environment entry UID
deps - resolved dependencies (if any)
}
"""
import os
import json
o=i.get('out','')
env_new=i.get('env_new','')
########################################################################
# Check host/target OS/CPU
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('target_device_id','')
if tdid=='': tdid=i.get('device_id','')
ii={'action':'detect',
'module_uoa':cfg['module_deps']['platform.os'],
'host_os':hos,
'target_os':tos,
'target_device_id':tdid,
'skip_info_collection':'no'}
if i.get('skip_device_info_collection','')=='yes':
ii['skip_info_collection']='yes'
r=ck.access(ii)
if r['return']>0: return r
features=r.get('features',{})
hos=r['host_os_uid']
hosx=r['host_os_uoa']
hosd=r['host_os_dict']
tos=r['os_uid']
tosx=r['os_uoa']
tosd=r['os_dict']
tbits=tosd.get('bits','')
hplat=hosd['ck_name']
tplat=tosd['ck_name']
# Check if base is different
x1=hosd.get('base_uid','')
x2=hosd.get('base_uoa','')
if x1!='' and x2!='':
hos=x1
hosx=x2
x1=tosd.get('base_uid','')
x2=tosd.get('base_uoa','')
if x1!='' and x2!='':
tos=x1
tosx=x2
# Check soft UOA
duoa=i.get('uoa','')
if duoa=='': duoa=i.get('data_uoa','')
tags=i.get('tags','')
if duoa=='':
xcids=i.get('xcids',[])
if len(xcids)>0:
duoa=xcids[0].get('data_uoa','')
duid=duoa
if duoa=='' and tags!='':
r=ck.access({'action':'search',
'module_uoa':work['self_module_uid'],
'tags':tags})
if r['return']>0: return r
l=r['lst']
if len(l)>0:
duid=l[0].get('data_uid')
duoa=duid
d={}
p=''
########################################################################
if duoa=='':
# Try to detect CID in current path
rx=ck.detect_cid_in_current_path({})
if rx['return']==0:
duoa=rx.get('data_uoa','')
if duoa!='':
# Load defined or found soft entry
r=ck.access({'action':'load',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa})
if r['return']>0: return r
d=r['dict']
p=r['path']
duoa=r['data_uoa']
duid=r['data_uid']
if duoa=='':
try:
p=os.getcwd()
except OSError:
os.chdir('..')
p=os.getcwd()
pc=os.path.join(p, ck.cfg['subdir_ck_ext'], ck.cfg['file_meta'])
found=False
if os.path.isfile(pc):
r=ck.load_json_file({'json_file':pc})
if r['return']==0:
d=r['dict']
found=True
if not found:
return {'return':1, 'error':'software UOA (data_uoa) is not defined'}
if o=='con':
if duoa!='' and duid!='':
x=': '+duoa
if duid!=duoa: x+=' ('+duid+')'
else:
x=' in local directory'
ck.out(' Software entry found'+x)
# Check deps, customize, install path
ltags=d.get('tags',[])
deps=d.get('deps',{})
env=d.get('env',{})
cus=d.get('customize',{})
pi=''
csp=d.get('can_skip_path','')
extra_version=i.get('extra_version', cus.get('extra_version',''))
# Add tags from the search!
for q in tags.split(','):
q1=q.strip()
if q1!='' and q1 not in ltags: ltags.append(q1)
# Finish tags
tg='host-os-'+hosx
if tg not in ltags: ltags.append(tg)
tg='target-os-'+tosx
if tg not in ltags: ltags.append(tg)
tg=tbits+'bits'
if tg not in ltags: ltags.append(tg)
########################################################################
# Check if environment already set (preload to update)
enduoa=i.get('env_data_uoa','')
enruoa=i.get('env_repo_uoa','')
update=False
if enduoa!='':
rx=ck.access({'action':'load',
'module_uoa':cfg['module_deps']['env'],
'data_uoa':enduoa,
'repo_uoa':enruoa})
if rx['return']==0:
update=True
edx=rx['dict']
cus.update(edx.get('customize',{}))
deps=edx.get('deps',{})
if i.get('reset_env','')!='yes' and 'tmp' not in edx.get('tags',[]):
env=edx.get('env',{})
pi=cus.get('path_install','')
# Update from input
udeps=i.get('deps',{})
deps.update(udeps)
uenv=i.get('env',{})
env.update(uenv)
ucus=i.get('customize',{})
cus.update(ucus)
envp=cus.get('env_prefix','')
envps=envp+'_SET'
if i.get('soft_name','')!='': # (direct input overrides meta-data)
dname = i['soft_name']
else:
dname = d.get('soft_name','') + cus.get('package_extra_name', '')
dname += i.get('soft_add_name','')
ienv=i.get('ienv',{}) # override install_env using command-line options
for ienv_key in ienv:
if 'install_env' not in cus: # manual vivification
cus['install_env'] = {}
cus['install_env'][ienv_key] = ienv[ienv_key]
pi1=i.get('install_path','')
if pi1!='': pi=pi1
fp=i.get('full_path','')
########################################################################
# Check meta
setup={'host_os_uoa':hos,
'target_os_uoa':tos,
'target_os_bits':tbits}
# Resolve deps (if not ignored, such as when installing local version with all dependencies set)
if cus.get('ignore_deps','')=='yes':
deps={}
sdeps=''
sdeps1=''
if len(deps)>0:
ii={'action':'resolve',
'module_uoa':cfg['module_deps']['env'],
'host_os':hos,
'target_os':tos,
'target_device_id':tdid,
'repo_uoa':enruoa,
'deps':deps}
if o=='con': ii['out']='con'
rx=ck.access(ii)
if rx['return']>0: return rx
sdeps=rx['bat']
sdeps1=rx['cut_bat']
deps=rx['deps'] # Update deps (add UOA)
for q in deps:
v=deps[q]
vuoa=v.get('uoa','') # can be undefined if OS specific
if vuoa!='': setup['deps_'+q]=vuoa
# Check if customize script is redirected into another entry:
#
another_entry_with_customize_script=d.get('use_customize_script_from_another_entry', None)
if another_entry_with_customize_script:
r=ck.access({'action':'find',
'module_uoa': another_entry_with_customize_script.get('module_uoa', work['self_module_uid']),
'data_uoa': another_entry_with_customize_script.get('data_uoa','')
})
if r['return']>0: return r
customization_script_path = r['path']
else:
customization_script_path = p
cs=None
rx=ck.load_module_from_path({'path':customization_script_path, 'module_code_name':cfg['custom_script_name'], 'data_uoa':duoa ,'cfg':d, 'skip_init':'yes'})
if rx['return']==0:
cs=rx['code']
elif another_entry_with_customize_script or not rx['error'].startswith("can't find module code"):
return rx
########################################################################
########################################################################
########################################################################
########################################################################
# Starting processing soft
# Check via full path first
if pi=='' and fp=='' and o=='con' and cus.get('skip_path','')!='yes' and i.get('skip_path','')!='yes' and not update:
ck.out('')
ry=prepare_target_name({'host_os_dict':hosd,
'target_os_dict':tosd,
'cus':cus})
if ry['return']>0: return ry
sname=ry['tool']
y0='installed library, tool or script'
if sname!='':
suname=d.get('soft_name','')
if cus.get('skip_soft_file_is_asked','')=='yes':
if suname!='': y0=suname
else:
y0=sname
if suname!='': y0=suname+' ('+sname+')'
y1='full path to '+y0
y2=''
y3=cus.get('soft_path_example',{}).get(hplat,'')
if y3!='': y2=' (example: '+y3+')'
r=ck.inp({'text':'Enter '+y1+y2+': '})
fp=r['string'].strip()
# Check if file really exists and check version if a tool
ver=cus.get('version','')
vercus=ver
if fp!='':
if cus.get('skip_file_check','')!='yes' and not os.path.exists(fp):
return {'return':1, 'error':'software not found in a specified path ('+fp+')'}
skip_existing='no'
if cus.get('force_cmd_version_detection','')=='yes':
skip_existing='yes'
ver=''
if ver=='':
soft_version_cmd=cus.get('soft_version_cmd',{}).get(hplat,'')
if o=='con':
ck.out('')
ck.out(' Attempting to detect version automatically (if supported) ...')
# Check version (via customized script) ...
ii={'full_path':fp,
'bat':sdeps,
'host_os_dict':hosd,
'target_os_dict':tosd,
'cmd':soft_version_cmd,
'customize':cus,
'custom_script_obj':cs,
'skip_existing':skip_existing,
'skip_add_target_file':cus.get('soft_version_skip_add_target_file',''),
'use_locale':cus.get('use_locale_for_version',''),
'data_uid': duid,
'deps': deps,
}
if ck.cfg.get('minimize_soft_detect_output','')!='yes':
ii['out']=o
rx=get_version(ii)
if rx['return']==0:
ver=rx['version']
if o=='con':
ck.out('')
ck.out(' Detected version: '+ver)
elif rx['return']!=16 and rx['return']!=22:
return rx
else:
if o=='con':
ck.out('')
ck.out(' WARNING: didn\'t manage to automatically detect software version!')
########################################################################
# Get various git info ...
ss1=''
ss2=''
ss3=''
ss4=''
ss5=''
ver_to_search=ver
if cus.get('use_git_revision','')=='yes':
import datetime
psrc=cus.get('git_src_dir','')
dfp=i.get('full_path_install','')
if dfp!='':
if psrc!='':
dfp=os.path.join(dfp, psrc)
try:
pwd1=os.getcwd()
except OSError:
os.chdir('..')
pwd1=os.getcwd()
if os.path.isdir(dfp):
os.chdir(dfp)
r=ck.access({'action':'run_and_get_stdout',
'module_uoa':cfg['module_deps']['os'],
'cmd':['git','rev-parse','--short','HEAD']})
if r['return']==0 and r['return_code']==0:
ss1=r['stdout'].strip()
r=ck.access({'action':'run_and_get_stdout',
'module_uoa':cfg['module_deps']['os'],
'cmd':['git','log','-1','--format=%cd']})
if r['return']==0 and r['return_code']==0:
ss2=r['stdout'].strip()
if ss2!='':
ss2x=ss2
j=ss2x.find(' +')
if j<0:
j=ss2x.find(' -')
if j>0:
ss2x=ss2[:j]
x=datetime.datetime.strptime(ss2x, '%a %b %d %H:%M:%S %Y')
ss3=x.isoformat()
ss4=ss3[:10].replace('-','')
if ss1!='':
ss5=ss4+'-'+ss1
if 'git_info' not in cus:
cus['git_info']={}
cus['git_info']['revision']=ss1
cus['git_info']['datetime']=ss2
cus['git_info']['iso_datetime']=ss3
cus['git_info']['iso_datetime_cut']=ss4
cus['git_info']['iso_datetime_cut_revision']=ss5
if o=='con':
ck.out('')
if ss1!='':
ck.out('Detected GIT revision: '+ss1)
if ss2!='':
ck.out('Detected GIT date time of last commit: '+ss2)
os.chdir(pwd1)
ver+='-'+ss1
########################################################################
# Check if force version
x=i.get('force_version','').strip()
if x!='': ver=i['force_version']
########################################################################
# Ask for version if was not detected or is not explicitly specified (for example, from a package)
if ver=='' and cus.get('skip_version','')!='yes' and o=='con':
ck.out('')
r=ck.inp({'text':'Enter version of this software (for example, 3.21.6-2 or press Enter if default/unknown): '})
ver=r['string'].strip().lower()
ver_to_search=ver
# Add extra, if needed (useful for changing trunks)
if extra_version!='':
ver+=extra_version
ver_to_search+=extra_version
# If customized version has changed, try to check env again ...
if vercus!=ver:
env_new='no'
# Split version
rx=split_version({'version':ver})
if rx['return']>0: return rx
sver=rx['version_split']
# Add version to setup and separate into tags
setup['version']=ver_to_search
setup['version_split']=sver
# Prepare tags from version
if ver!='':
x=''
for q in sver:
if x!='':x+='.'
x+=str(q)
tg='v'+x
if tg not in ltags:
ltags.append(tg)
unsplit_version_tag_prefix = cus.get('unsplit_version_to_tags_prefixed_with')
if unsplit_version_tag_prefix != None: # NB: empty string is treated differently from absence!
ltags.append( unsplit_version_tag_prefix + ver_to_search )
tags_csv = ','.join( [t.strip() for t in ltags if t] )
########################################################################
# Search if environment is already registered for this version
# (to delete or reuse it)
finish=False
if enduoa=='' and env_new!='yes':
if o=='con':
ck.out('')
ck.out('Searching if environment already exists using:')
ck.out(' * Tags: '+tags_csv)
if len(deps)>0:
for q in deps:
v=deps[q]
vuoa=v.get('uoa','')
if vuoa!='':
ck.out(' * Dependency: '+q+'='+v.get('uoa',''))
r=ck.access({'action':'search',
'module_uoa':cfg['module_deps']['env'],
'repo_uoa':enruoa,
'tags':tags_csv,
'search_dict':{'setup':setup}})
if r['return']>0: return r
lst=r['lst']
if len(lst)>0:
fe=lst[0]
enduoa=fe['data_uoa']
enduid=fe['data_uid']
if o=='con':
x=enduoa
if enduid!=enduoa: x+=' ('+enduid+')'
ck.out('')
ck.out('Environment already registered for this version: '+x)
update=False
if i.get('update','')=='yes':
update=True
if not update:
if o=='con':
ck.out('')
if i.get('quiet','')=='yes':
dl='y'
else:
r=ck.inp({'text':'Would you like to delete this entry and re-register environment (Y/n): '})
dl=r['string'].strip().lower()
if dl=='' or dl=='y' or dl=='yes':
update=False
rx=ck.access({'action':'delete',
'module_uoa':cfg['module_deps']['env'],
'data_uoa':enduoa,
'repo_uoa':enruoa})
if rx['return']>0: return rx
else:
ck.out('')
r=ck.inp({'text':'Would you like to update this entry (Y/n): '})
upd=r['string'].strip().lower()
if upd=='' or upd=='y' or upd=='yes':
update=True
else:
finish=True
if update:
rx=ck.access({'action':'load',
'module_uoa':cfg['module_deps']['env'],
'data_uoa':enduoa,
'repo_uoa':enruoa})
if rx['return']>0: return rx
edx=rx['dict']
cus1=edx.get('customize',{})
deps1=edx.get('deps',{})
env1=edx.get('env',{})
cus.update(cus1)
deps.update(deps1)
env.update(env1)
pi=cus.get('path_install','')
else:
if o=='con':
ck.out('')
ck.out(' Environment with above tags is not yet registered in CK ...')
############################################################
if not finish:
# Prepare environment and batch
sb=''
if o=='out':
ck.out('')
ck.out('Preparing environment and batch file ...')
sdirs=hosd.get('dir_sep','')
wb=tosd.get('windows_base','')
rem=hosd.get('rem','')
eset=hosd.get('env_set','')
svarb=hosd.get('env_var_start','')
svare=hosd.get('env_var_stop','')
evs=hosd.get('env_var_separator','')
eifs=hosd.get('env_quotes_if_space','')
ellp=hosd.get('env_ld_library_path','')
if ellp=='': ellp='LD_LIBRARY_PATH'
elp=hosd.get('env_library_path','')
if elp=='': elp='LIBRARY_PATH'
# Check installation path
if fp=='' and cus.get('skip_path','')!='yes' and i.get('skip_path','')!='yes' and not update:
if o=='con': | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def search_tool(i):
"""
Input: {
path_list - path list
file_name - name of file to find (can be with patterns)
(recursion_level_max) - if >0, limit dir recursion
(can_be_dir) - if 'yes', return matched directories as well
(return_symlinks) - if 'yes', symlinks are returned as-is. Otherwise, they're resolved
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
list - list of file (see ck.list_all_files)
elapsed_time - elapsed time
}
"""
o=i.get('out','')
import time
import os
start_time = time.time()
pl=i['path_list']
fn=i['file_name']
pt=''
rlm=i.get('recursion_level_max',0)
cbd=i.get('can_be_dir','')
return_symlinks = i.get('return_symlinks','')
if fn.find('?')>=0 or fn.find('*')>=0:
pt=fn
fn=''
lst=[]
for p in pl:
if o=='con':
ck.out(' * Searching in '+p+' ...')
r=list_all_files({'path':p,
'file_name':fn,
'pattern':pt,
'can_be_dir':cbd,
'recursion_level_max':rlm})
if r['return']>0: return r
lst.extend( r['list'] ) | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def _internal_check_encoded_path_is_dir( path ):
"""
Need this complex structure to support UTF-8 file names in Python 2.7
"""
import os
import sys
try:
if os.path.isdir( path ):
return path
except Exception as e:
try:
path = path.encode('utf-8')
if os.path.isdir( path ):
return path
except Exception as e:
try:
path = path.encode(sys.stdin.encoding)
if os.path.isdir(p):
return path
except Exception as e:
pass
return None | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def list_all_files(i):
"""
Input: {
path - top level path
(file_name) - search for a specific file name
(pattern) - return only files with this pattern
(path_ext) - path extension (needed for recursion)
(can_be_dir) - if 'yes', return matched directories as well
(recursion_level_max) - if >0, limit dir recursion
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
list - list of found files
}
"""
import sys
import os
list_of_results=[]
fname = i.get('file_name', '')
fname_with_sep_bool = fname.find(os.sep)>=0
can_be_dir = i.get('can_be_dir', '')
can_be_dir_bool = can_be_dir == 'yes'
pattern=i.get('pattern','')
if pattern!='':
import fnmatch
pe = i.get('path_ext', '')
po = i.get('path', '')
if sys.version_info[0]<3: po=unicode(po)
rl=i.get('recursion_level',0)
rlm=i.get('recursion_level_max',0)
if rl>rlm:
return {'return':0, 'list':[]}
try:
dirList=os.listdir(po)
except Exception as e:
pass
else:
for fn in dirList:
p=''
try:
p=os.path.join(po, fn)
except Exception as e:
pass
if p!='':
candidate = None
if fname!='':
if fname_with_sep_bool and os.path.isdir(p):
deep_candidate = os.path.join(po, fname)
if os.path.exists( deep_candidate ):
candidate = deep_candidate
elif fname==fn:
candidate = p
elif pattern!='' and fnmatch.fnmatch(fn, pattern):
candidate = p
if candidate and (candidate not in list_of_results):
if os.path.isfile( candidate ) or (can_be_dir_bool and os.path.isdir( candidate )):
list_of_results.append( candidate )
if _internal_check_encoded_path_is_dir(p):
r=list_all_files({'path':p, 'path_ext':os.path.join(pe, fn),
'pattern':pattern, 'file_name':fname, 'can_be_dir':can_be_dir,
'recursion_level':rl+1, 'recursion_level_max':rlm})
if r['return']>0: return r
list_of_results.extend( r.get('list',[]) )
return {'return':0, 'list':list_of_results} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def check(i):
"""
Input: {
(target) - if specified, use info from 'machine' module
or
(host_os) - host OS (detect, if omitted)
(target_os) - target OS (detect, if omitted)
(target_device_id) - target device ID (detect, if omitted)
(data_uoa) or (uoa) - software UOA entry
or
(tags) - search UOA by tags (separated by comma)
(interactive) - if 'yes', and has questions, ask user
(quiet) - if 'yes', do not ask questions but select default value
(default_selection) - default value for the selection from the menu
(first_match) - in case of match ambiguity in menu selection, just take the first match
(skip_help) - if 'yes', skip print help if not detected (when called from env setup)
(deps) - already resolved deps (if called from env)
(dep_add_tags.{KEY}) - extra tags added to specific subdictionary of deps{} for this particular resolution session
(extra_version) - add extra version, when registering software
(for example, -trunk-20160421)
Be careful - if there is auto version detection,
CK will say that version has changed and will try to remove entry!
(extra_tags) - add extra tags to separate created entry from others
(for example Python 2.7 vs Anaconda Python 2.7)
(extra_name) - add extra name to soft (such as anaconda)
(force_env_data_uoa) - force which env UID to use when regstering detect software -
useful when reinstalling broken env entry to avoid breaking
all dependencies of other software ...
(search_dirs) - extra directories where to search soft (string separated by comma)
(search_dir) - search only in this directory (useful for Spack and EasyBuild)
(search_depth) - force directory recursive search depth when detecting installed software
(soft_name) - name to search explicitly
(version_from) - check version starting from ... (string or list of numbers)
(version_to) - check version up to ... (string list of numbers)
(force_version) - if !='', use this version
(full_path) - force full path (rather than searching in all directories)
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
path_install - path to the detected software
cus - dict with filled in info for the software
}
"""
import os
import json
import copy
o=i.get('out','')
oo=''
if o=='con': oo=o
# Check if target
if i.get('target','')!='':
r=ck.access({'action':'init',
'module_uoa':cfg['module_deps']['machine'],
'input':i})
if r['return']>0: return r
device_cfg=i.get('device_cfg',{})
# Check host/target OS/CPU
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('target_device_id','')
r=ck.access({'action':'detect',
'module_uoa':cfg['module_deps']['platform.os'],
'host_os':hos,
'target_os':tos,
'target_device_id':tdid,
'skip_info_collection':'yes'})
if r['return']>0: return r
hos=r['host_os_uid']
hosx=r['host_os_uoa']
hosd=r['host_os_dict']
tos=r['os_uid']
tosx=r['os_uoa']
tosd=r['os_dict']
tosd.update(device_cfg.get('update_target_os_dict',{}))
tbits=tosd.get('bits','')
hplat=hosd.get('ck_name','')
tplat=tosd.get('ck_name','')
# Check versions
vfrom=i.get('version_from',[])
vto=i.get('version_to',[])
if type(vfrom)!=list:
rx=split_version({'version':vfrom})
if rx['return']>0: return rx
vfrom=rx['version_split']
if type(vto)!=list:
rx=split_version({'version':vto})
if rx['return']>0: return rx
vto=rx['version_split']
tags=i.get('tags','')
# Check soft UOA
duoa=i.get('uoa', '')
if duoa=='': duoa=i.get('data_uoa','')
requested_muoa=i.get('module_uoa','')
if duoa=='' and requested_muoa=='':
# Try to detect CID in current path
rx=ck.detect_cid_in_current_path({})
if rx['return']==0:
duoa=rx.get('data_uoa','')
if tags: # if tags are available, try searching both in tags and variations
r=ck.access({'action': 'search_in_variations',
'data_uoa': duoa,
'module_uoa': 'misc',
'query_module_uoa': work['self_module_uid'],
'tags': tags,
})
if r['return']>0: return r
l=r['lst']
if len(l)>1: # FIXME: we could be smarter and assume several soft_candidates from the very start,
# merging all the options found into one big selector.
ck.out("Found {} soft candidate entries matching tags '{}' :".format(len(l), tags))
for candidate in l:
candidate_tags = candidate['meta']['tags']
required_variations = candidate['required_variations']
ck.out("\tck detect soft:{:<42} # --tags={}".format(candidate['data_uoa'], ','.join(candidate_tags+required_variations)) )
return {'return':1, 'error': "Please use a command that uniquely defines a soft: entry"}
elif len(l)==1:
r=l[0]
soft_entry_dict=r['meta']
required_variations=r['required_variations']
else:
return {'return':1, 'error':'software entry was not found'}
elif duoa: # if tags were not available, try to load directly
r=ck.access({'action':'load',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa,
})
if r['return']>0: return r
soft_entry_dict=r['dict']
required_variations = []
else:
return {'return':1, 'error':'please define either --data_uoa or --tags or both to get going'}
duoa=r['data_uoa']
duid=r['data_uid']
soft_entry_path=r['path']
cus=soft_entry_dict.get('customize',{})
########################################################################
# Check env from input
envx=copy.deepcopy(i.get('env',{}))
ienv=copy.deepcopy(i.get('install_env',i.get('ienv',{}))) # parse install_env overrides out of install_env{}, install_env.XXX and ienv.XXX CLI options
for q in i:
if q.startswith('env.'):
envx[q[len('env.'):]]=i[q]
elif q.startswith('ienv.'):
ienv[q[len('ienv.'):]]=i[q]
elif q.startswith('install_env.'):
ienv[q[len('install_env.'):]]=i[q]
supported_variations = soft_entry_dict.get('variations', {})
missing_variations = set(required_variations) - set(supported_variations)
if missing_variations:
return {'return':1, 'error':'Variations {} are not supported by soft:{}'.format(missing_variations, duoa)}
# Update this cus from all the supported variations.
# Detect if an incompatible mix of variation tags was required
# that would lead to undefined behaviour, and bail out if so.
#
if required_variations:
extra_env_from_variations = {}
extra_cus_from_variations = {}
for req_variation in required_variations:
extra_env = supported_variations[req_variation].get('extra_env',{})
colliding_vars = set(extra_env_from_variations.keys()) & set(extra_env.keys()) # non-empty intersection means undefined behaviour
for coll_var in colliding_vars: # have to check actual values to detect a mismatch
if extra_env_from_variations[coll_var] != extra_env[coll_var]:
return { 'return':1,
'error':'contradiction on variable ({}) detected when adding "{}" variation tag'.format(coll_var,req_variation)}
extra_cus = supported_variations[req_variation].get('extra_customize',{})
colliding_cuss = set(extra_cus_from_variations.keys()) & set(extra_cus.keys()) # non-empty intersection means undefined behaviour
for coll_cus in colliding_cuss: # have to check actual values to detect a mismatch
if extra_cus_from_variations[coll_cus] != extra_env[coll_cus]:
return { 'return':1,
'error':'contradiction on customize ({}) detected when adding "{}" variation tag'.format(coll_cus,req_variation)}
extra_env_from_variations.update( extra_env ) # merge of one particular variation
extra_cus_from_variations.update( extra_cus )
ienv.update( extra_env_from_variations ) # merge of all variations
cus.update( extra_cus_from_variations )
extra_version=i.get('extra_version', cus.get('extra_version',''))
# Check if restricts dependency to a given host or target OS
rx=check_target({'dict':cus,
'host_os_uoa':hosx,
'host_os_dict':hosd,
'target_os_uoa':tosx,
'target_os_dict':tosd})
if rx['return']>0: return rx
# Check if need to resolve dependencies
deps=i.get('deps',{})
dep_add_tags = i.get('dep_add_tags', {})
for q in i:
if q.startswith('deps.'):
preset_deps[q[5:]]=i[q].split(':')[-1]
elif q.startswith('dep_add_tags.'):
_ , dep_name = q.split('.')
dep_add_tags[dep_name] = i[q]
sbat=''
if len(deps)==0:
deps=soft_entry_dict.get('deps',{})
if len(deps)>0:
ii={'action':'resolve',
'module_uoa':cfg['module_deps']['env'],
'host_os':hos,
'target_os':tos,
'target_device_id':tdid,
'deps':deps, | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def get_version(i):
"""
Input: {
full_path
bat
cmd
custom_script_obj
host_os_dict
(show) - if 'yes', show output file
(skip_existing) - if 'yes', force detecting version again
(skip_add_target_file) - if 'yes', do not add target file at the beginning
of CMD to detect version
(use_locale) - if 'yes', use locale to decode output
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
version - string version
version_lst - raw output (as list)
}
"""
import os
o=i.get('out','')
fp=i.get('full_path','')
sb=i.get('bat','')
soft_version_cmd=i.get('cmd')
data_uid = i.get('data_uid')
cs=i.get('custom_script_obj', None)
cus=i.get('customize',{}) # should we be more strict [] here?
hosd=i.get('host_os_dict',{})
tosd=i.get('target_os_dict',{})
bprefix=hosd.get('batch_prefix','')
ubtr=hosd.get('use_bash_to_run','')
svarb=hosd.get('env_var_start','')
svarb1=hosd.get('env_var_extra1','')
svare=hosd.get('env_var_stop','')
svare1=hosd.get('env_var_extra2','')
sexe=hosd.get('set_executable','')
sbp=hosd.get('bin_prefix','')
envsep=hosd.get('env_separator','')
scall=hosd.get('env_call','')
sext=hosd.get('script_ext','')
eifsc=hosd.get('env_quotes_if_space_in_call','')
nout=hosd.get('no_output','')
deps=i.get('deps',{})
sb=bprefix+sb
ver=''
lst=[]
cmd=''
# Attempt to check via CK config file
if i.get('skip_existing','')!='yes':
rx=find_config_file({'full_path':fp, 'data_uid': data_uid})
if rx['return']>0: return rx
found=rx['found']
if found=='yes':
ver=rx['dict'].get('customize',{}).get('version','')
if ver=='':
# Preparing CMD
if 'version_cmd' in dir(cs):
rx=cs.version_cmd({'full_path':fp,
'host_os_dict':hosd,
'target_os_dict':tosd,
'cmd':soft_version_cmd,
'ck_kernel':ck,
'customize':cus,
'out':o,
'deps':deps})
if rx['return']>0: return rx
cmd=rx.get('cmd','')
ver=rx.get('version','')
elif soft_version_cmd:
if eifsc!='' and fp.find(' ')>=0 and not fp.startswith(eifsc):
fp=eifsc+fp+eifsc
if o!='con':
cmd+=nout
if i.get('skip_add_target_file','')=='yes':
cmd+=' '+soft_version_cmd
else:
cmd+=fp+' '+soft_version_cmd
if ver=='' and cmd:
# Generate tmp file
rx=ck.gen_tmp_file({})
if rx['return']>0: return rx
ftmp=rx['file_name'] | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def internal_get_val(lst, index, default_value):
v=default_value
if index<len(lst):
v=lst[index]
return v | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def print_help(i):
"""
Input: {
data_uoa - data UOA to get help
platform - platform name
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
import os
duoa=i['data_uoa']
hplat=i['platform']
ti=''
# If only one related software entry found, try to read text notes from it
rx=ck.access({'action':'find',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa})
if rx['return']>0: return rx
pppx=rx['path']
ppx=os.path.join(pppx,'install.txt')
if os.path.isfile(ppx):
rx=ck.load_text_file({'text_file':ppx})
if rx['return']==0:
ti+=rx['string']
ppx=os.path.join(pppx,'install.'+hplat+'.txt')
if os.path.isfile(ppx):
rx=ck.load_text_file({'text_file':ppx})
if rx['return']==0:
if ti!='': ti+='\n'
ti+=rx['string']
if ti!='':
read=True
ck.out('****** Installation notes: ******')
ck.out(ti)
ck.out('*********************************')
else:
# Show possible Wiki page
rx=ck.inp({'text':' Would you like to open wiki pages about installation and other info (if exists) (Y/n): '})
x=rx['string'].strip().lower()
if x!='n' and x!='no':
ck.out('')
rx=ck.access({'action':'wiki',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa})
if rx['return']>0: return rx
ck.out('')
return {'return':0} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def check_target(i):
"""
Input: {
dict - dictionary with info about supported host and target OS
host_os_uoa - host OS UOA (already resolved)
host_os_dict - host OS dict (already resolved)
target_os_uoa - target OS UOA (already resolved)
target_os_dict - target OS UOA (already resolved)
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
cus=i['dict']
hosx=i['host_os_uoa']
hosd=i['host_os_dict']
tosx=i['target_os_uoa']
tosd=i['target_os_dict']
# Check if restricts dependency to a given host or target OS
only_hos=cus.get('only_for_host_os',[])
if len(only_hos)>0:
if hosx not in only_hos:
return {'return':1, 'error':'host OS is not supported by this software'}
only_hos1=cus.get('only_for_host_os_tags',[])
if len(only_hos1)>0:
x=hosd.get('tags',[])
found=False
for xx in only_hos1:
if xx in x:
found=True
break
if not found:
return {'return':1, 'error':'host OS family is not supported by this software'}
only_tos=cus.get('only_for_target_os',[])
if len(only_tos)>0:
if tosx not in only_tos:
return {'return':1, 'error':'target OS is not supported by this software'}
only_tos1=cus.get('only_for_target_os_tags',[])
if len(only_tos1)>0:
x=tosd.get('tags',[])
found=False
for xx in only_tos1:
if xx in x:
found=True
break
if not found:
return {'return':1, 'error':'target OS family is not supported by this software'}
return {'return':0} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def split_version(i):
"""
Input: {
version - string version
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
version_split - split version
}
"""
import re
ver=i['version']
# Split version
sver=[]
if ver!='':
if ver!='':
sver1=re.split('\.|\-|\_', ver)
for q in sver1:
x=q
try:
x=int(q)
except:
#pass - causes problems when mixing strings and ints ...
x=0
sver.append(x)
return {'return':0, 'version_split':sver} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def show(i):
"""
Input: {
(the same as list; can use wildcards)
(out_file) - output to file (for mediawiki)
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
import os
import copy
o=i.get('out','')
of=i.get('out_file','')
if of!='':
xof=os.path.splitext(of)
html=False
if o=='html' or i.get('web','')=='yes':
html=True
h=''
h2=''
if i.get('new','')=='yes':
ii=copy.deepcopy(i)
ii['action']='preload_html_for_lists'
ii['module_uoa']=cfg['module_deps']['misc']
ii['ck_title']='Shared CK software detection plugins'
r=ck.access(ii)
if r['return']>0: return r
h=r['html_start']+'\n'
h2=r['html_stop']+'\n'
unique_repo=False
if i.get('repo_uoa','')!='': unique_repo=True
ii=copy.deepcopy(i)
ii['out']=''
ii['action']='list'
ii['add_meta']='yes'
ii['time_out']=-1
rx=ck.access(ii)
if rx['return']>0: return rx
ll=sorted(rx['lst'], key=lambda k: k['data_uoa'])
if html:
h+='<h2>Please check our new <a href="http://ReuseResearch.com/c.php?c=soft">beta browser</a> for CK components!</h2>\n'
h+='<br>\n'
h+='You can detect installed software and register it in the CK as follows:\n'
h+='<pre>\n'
h+=' ck pull repo:{Repo UOA - see below}\n'
h+=' ck detect soft:{Soft UOA - see below}\n'
h+='</pre>\n'
h+='using tags:\n'
h+='<pre>\n'
h+=' ck detect soft --tags={some tags from below}\n'
h+='</pre>\n'
h+='in an unusual path:\n'
h+='<pre>\n'
h+=' ck detect soft:{Soft UOA - see below} --search_dirs={path to this software}\n'
h+='</pre>\n'
h+='or for a different OS target (Android):\n'
h+='<pre>\n'
h+=' ck ls os:android* | sort\n'
h+=' ck detect soft:{Soft UOA - see below} --target_os={OS UOA from above}\n'
h+='</pre>\n'
h+='You can see or use registered virtual CK environments as follows:\n'
h+='<pre>\n'
h+=' ck show env\n'
h+=' ck show env --tags={some tags from below}\n'
h+='\n'
h+=' ck virtual env:{UID from above}\n'
h+=' ck virtual env --tags={some tags from below}\n'
h+='</pre>\n'
h+='<p>\n'
h+='See <pre>ck detect soft --help</pre> for more detection options.\n'
h+='See <a href="http://cKnowledge.org/shared-packages.html">related CK packages</a>,\n'
h+=' <a href="https://github.com/mlcommons/ck/wiki">CK documentation</a>,\n'
h+=' <a href="https://github.com/mlcommons/ck/wiki#contributing">"how to contribute" guide</a>,\n'
h+=' <a href="https://portalparts.acm.org/3230000/3229762/fm/frontmatter.pdf">ACM ReQuEST-ASPLOS\'18 report</a>\n'
h+=' and the latest <a href="http://cKnowledge.org/rpi-crowd-tuning">CK paper</a> for further details.\n'
h+='<p>\n'
h+='<table cellpadding="4" border="1" style="border-collapse: collapse; border: 1px solid black">\n'
h+=' <tr>\n'
h+=' <td nowrap><b>#</b></td>\n'
h+=' <td nowrap><b>Soft UOA</b></td>\n'
h+=' <td nowrap><b>Template?</b></td>\n'
h+=' <td nowrap><b>Repo UOA</b></td>\n'
h+=' <td><b>Tags</b></td>\n'
h+=' <td><b>Host OS</b></td>\n'
h+=' <td><b>Target OS</b></td>\n'
h+=' <td><b>Notes</b></td>\n'
h+=' </tr>\n'
repo_url={}
repo_private={}
size=0
isize=1
private=''
num=0
for l in ll:
ln=l['data_uoa']
lr=l['repo_uoa']
lr_uid=l['repo_uid']
url=''
if lr=='default':
url='' #'http://github.com/mlcommons/ck'
elif lr_uid in repo_url:
url=repo_url[lr_uid]
else:
rx=ck.load_repo_info_from_cache({'repo_uoa':lr_uid})
if rx['return']>0: return rx
url=rx.get('dict',{}).get('url','')
repo_private[lr_uid]=rx.get('dict',{}).get('private','')
repo_url[lr_uid]=url
private=repo_private.get(lr_uid,'')
if lr not in cfg.get('skip_repos',[]) and private!='yes' and url!='':
num+=1
lm=l['meta']
ld=lm.get('desc','')
soft_name=lm.get('soft_name','')
cus=lm.get('customize',{})
ad=lm.get('auto_detect','')
if ad!='yes': ad='no'
ep=cus.get('env_prefix','')
xhos=cus.get('only_for_host_os_tags',[])
xtos=cus.get('only_for_target_os_tags',[])
tmpl=lm.get('template','')
template=lm.get('template_type','')
if tmpl=='yes' and template=='':
template='yes'
tags=lm.get('tags',[])
ytags=','.join(tags)
yhos=''
ytos=''
for q in xhos:
if yhos!='': yhos+=','
yhos+=q
for q in xtos:
if ytos!='': ytos+=','
ytos+=q
if yhos=='':
yhos='any'
else:
yhos=yhos.replace('linux','linux,macos')
if ytos=='':
ytos='any'
else:
ytos=ytos.replace('linux','linux,macos')
if lr=='default':
to_get=''
elif url.find('github.com/ctuning/')>0:
to_get='ck pull repo:'+lr
else:
to_get='ck pull repo --url='+url
x=lr
y=''
yh=''
if url!='':
url2=url
if url2.endswith('.git'):
url2=url2[:-4]
yh=url2+'/tree/master/soft/'+ln
x='['+url2+' '+lr+']'
y='['+yh+' link]'
###############################################################
if html:
h+=' <tr>\n'
x1=''
x2=''
z1=''
if url!='':
x1='<a href="'+url+'">'
x2='</a>'
z1='<a href="'+yh+'">'
z11='<a href="'+yh+'/.cm/meta.json">'
h+=' <td nowrap valign="top"><a name="'+ln+'">'+str(num)+'</b></td>\n'
h+=' <td nowrap valign="top">'+z1+ln+x2+'</b> <i>('+z11+'CK meta'+x2+')</i></td>\n'
h+=' <td nowrap valign="top">'+template+'</td>\n'
h+=' <td nowrap valign="top">'+x1+lr+x2+'</td>\n'
h+=' <td valign="top"><small>'+ytags+'</small>\n'
h+=' <td valign="top"><small>'+yhos+'</small>\n'
h+=' <td valign="top"><small>'+ytos+'</small>\n'
h1='Auto-detect? '+ad+'<br>\n'
h1+='Environment variable: '+ep+'<br>\n'
if ld!='':
h1+='<p>\n'+ld
h+=' <td valign="top">'+h1+'\n'
h+='</td>\n'
h+=' </tr>\n'
###############################################################
elif o=='mediawiki':
s=''
s+='\n'
s+='=== '+ln+' ('+soft_name+') ===\n'
s+='\n'
s+='Auto-detect?: '+ad+'\n'
s+='<br>Environment variable: <b>'+ep+'</b>\n'
s+='\n'
s+='Tags: <i>'+ytags+'</i>\n'
s+='<br>Host OS tags: <i>'+yhos+'</i>\n'
s+='<br>Target OS tags: <i>'+ytos+'</i>\n'
if y!='':
s+='\n'
s+='Software entry with meta: <i>'+y+'</i>\n'
s+='\n'
s+='Which CK repo: '+x+'\n'
if to_get!='':
s+='<br>How to get: <i>'+to_get+'</i>\n'
if to_get!='':
s+='\n'
s+='How to detect: <b>ck detect soft:'+ln+' (--target_os={CK OS UOA})</b>\n'
s+='\n'
if of=='':
ck.out(s)
else:
with open(of, "a") as ff:
ff.write(s) | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def find_config_file(i):
"""
Input: {
full_path - where to start search
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
found - 'yes' if found
dict - loaded dict with the configuration ...
filename - filename
path - path
}
"""
import os
pf=i['full_path']
filter_data_uid = i.get('data_uid', '')
pf1=os.path.dirname(pf)
found='no'
d={}
fn=''
pf2=''
while pf1!=pf and pf1!='':
fn=cfg['ck_install_file']
pf2=os.path.join(pf1,fn)
if os.path.isfile(pf2):
rx=ck.load_json_file({'json_file':pf2})
if rx['return']==0:
found='yes'
d=rx['dict']
break
else:
fn=cfg['ck_install_file_saved']
pf2=os.path.join(pf1,fn)
if os.path.isfile(pf2):
rx=ck.load_json_file({'json_file':pf2})
if rx['return']==0:
found='yes'
d=rx['dict']
break
pf=pf1
pf1=os.path.dirname(pf)
config_data_uid = d.get('data_uoa', '')
if filter_data_uid and (config_data_uid != filter_data_uid):
found = 'no'
d = {}
return {'return':0, 'found':found, 'dict':d, 'filename':fn, 'path':pf2} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def compare_versions(i):
"""
Input: {
version1 - version 1 to compare against version2 (list such as [1,62])
version2 - (list such as [1,63])
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
result "<" - version 1 < version 2
"=" - version 1 == version 2
">" - version 1 > version 2
}
"""
def compare_versions_core(v1_orig, v2_orig):
len_diff = len(v2_orig)-len(v1_orig) # determine the (signed) length of zero-padding
# now pad the shorter to match the longer:
(v1, v2) = (v1_orig + [0]*len_diff, v2_orig) if len_diff>0 else (v1_orig, v2_orig + [0]*-len_diff)
for j in range(0,len(v1)):
(t1, t2) = (type(v1[j]), type(v2[j]))
if t1 == t2: # perform natural comparison within the same type
if v1[j]<v2[j]:
return '<'
elif v1[j]>v2[j]:
return '>'
elif t1 == int: # but any integer is higher than any letter combination
return '>'
elif t2 == int:
return '<'
return '='
result = compare_versions_core(i['version1'], i['version2'])
if i.get('out','')=='con':
ck.out(result)
return {'return':0, 'result':result} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def prepare_target_name(i):
"""
Input: {
host_os_dict - host OS dict
target_os_dict - target OS dict
cus - custom meta
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
tool - tool name
}
"""
cus=i['cus']
hosd=i['host_os_dict']
tosd=i['target_os_dict']
hplat=hosd['ck_name']
tplat=tosd['ck_name']
tool=''
sdirs=hosd.get('dir_sep','')
plat=tplat
osd=tosd
if cus.get('soft_file_from_host_os','')=='yes':
plat=hplat
osd=hosd
tool=cus.get('soft_file_universal','')
if tool=='':
tool=cus.get('soft_file',{}).get(plat,'')
file_extensions=hosd.get('file_extensions',{})
# Check file extensions from OS (for example, to specialize .dylib vs .so for MacOS)
for k in file_extensions:
v=file_extensions[k]
tool=tool.replace('$#file_ext_'+k+'#$',v)
tool=tool.replace('$#sep#$', sdirs)
return {'return':0, 'tool':tool} | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def add(i):
"""
Input: {
(template) - if !='', use this program as template!
(tags) - if !='', use these tags
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
o=i.get('out','')
# Redirect to a universal template ...
muoa=i['module_uoa']
i['original_module_uoa']=muoa
i['module_uoa']=cfg['module_deps']['misc']
i['action']='prepare_entry_template'
if 'cid' in i: del(i['cid'])
r=ck.access(i)
if r['return']>0: return r
# Update newly created entry with special keys
duid=r['data_uid']
duoa=r['data_uoa']
ruid=r['repo_uid']
dd=r['dict']
if 'template' in dd: del(dd['template'])
if 'template_type' in dd: del(dd['template_type'])
xtags=i.get('tags','')
if xtags=='':
ck.out('')
r=ck.inp({'text':'Enter tags for your new soft detection plugin separated by comma (for example lib,tflite): '})
xtags=r['string'].strip()
tags=[]
if xtags!='':
for q in xtags.split(','):
q=q.strip()
if q not in tags:
tags.append(q)
else:
for k in dd.get('tags',[]):
if k!='template':
tags.append(k)
dd['tags']=tags
ii={'action':'update',
'module_uoa':muoa,
'data_uoa':duid,
'repo_uoa':ruid,
'dict':dd,
'substitute':'yes',
'sort_keys':'yes',
'ignore_update':'yes'
}
if o=='con':
ck.out('')
ck.out('Further details about how to update meta.json and customize.py of your new software detection plugin:')
ck.out('')
ck.out(' * https://github.com/mlcommons/ck/wiki/Adding-new-workflows')
return ck.access(ii) | ctuning/ck | [
511,
83,
511,
31,
1415207683
] |
def setUpClass(cls):
super(LinkDomainsTests, cls).setUpClass()
cls.upstream_domain = 'upstream'
cls.downstream_domain = 'downstream' | dimagi/commcare-hq | [
465,
201,
465,
202,
1247158807
] |
def mock_handler(domain):
return domain != self.downstream_domain | dimagi/commcare-hq | [
465,
201,
465,
202,
1247158807
] |
def test_exception_raised_if_domain_link_already_exists(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=Mock()),\
self.assertRaises(DomainLinkAlreadyExists):
link_domains(Mock(), self.upstream_domain, self.downstream_domain) | dimagi/commcare-hq | [
465,
201,
465,
202,
1247158807
] |
def mock_handler(downstream, upstream):
raise DomainLinkError | dimagi/commcare-hq | [
465,
201,
465,
202,
1247158807
] |
def test_exception_raised_if_user_is_not_admin_in_both_domains(self):
with patch('corehq.apps.linked_domain.views.domain_exists', return_value=True),\
patch('corehq.apps.linked_domain.views.get_active_domain_link', return_value=None),\
patch('corehq.apps.linked_domain.views.user_has_admin_access_in_all_domains', return_value=False),\
self.assertRaises(DomainLinkNotAllowed):
link_domains(Mock(), self.upstream_domain, self.downstream_domain) | dimagi/commcare-hq | [
465,
201,
465,
202,
1247158807
] |
def setUp(self):
'''Empty'''
pass | SasView/sasview | [
40,
32,
40,
388,
1423429150
] |
def testReplaceShellName(self):
"""
Test the utility function for string manipulation
"""
param_name = "test [123]"
value = "replaced"
result = FittingUtilities.replaceShellName(param_name, value) | SasView/sasview | [
40,
32,
40,
388,
1423429150
] |
def testGetIterParams(self):
"""
Assure the right multishell parameters are returned
"""
# Use a single-shell parameter
model_name = "barbell"
kernel_module = generate.load_kernel_module(model_name)
barbell_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.getIterParams(barbell_parameters)
# returns empty list
self.assertEqual(params, [])
# Use a multi-shell parameter
model_name = "core_multi_shell"
kernel_module = generate.load_kernel_module(model_name)
multishell_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.getIterParams(multishell_parameters)
# returns a non-empty list
self.assertNotEqual(params, [])
self.assertIn('sld', str(params))
self.assertIn('thickness', str(params)) | SasView/sasview | [
40,
32,
40,
388,
1423429150
] |
def testAddParametersToModel(self):
"""
Checks the QModel update from Sasmodel parameters
"""
# Use a single-shell parameter
model_name = "barbell"
models = load_standard_models()
kernel_module = generate.load_kernel_module(model_name)
kernel_module_o = None
for model in models:
if model.name == model_name:
kernel_module_o = model()
self.assertIsNotNone(kernel_module_o)
barbell_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.addParametersToModel(barbell_parameters, kernel_module_o, True)
# Test the resulting model
self.assertEqual(len(params), 7)
self.assertEqual(len(params[0]), 5)
self.assertTrue(params[0][0].isCheckable())
self.assertEqual(params[0][0].text(), "sld")
self.assertEqual(params[1][0].text(), "sld_solvent")
# Use a multi-shell parameter to see that the method includes shell params
model_name = "core_multi_shell"
kernel_module = generate.load_kernel_module(model_name)
kernel_module_o = None
for model in models:
if model.name == model_name:
kernel_module_o = model()
self.assertIsNotNone(kernel_module_o)
multi_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.addParametersToModel(multi_parameters, kernel_module_o, False)
# Test the resulting model
self.assertEqual(len(params), 3)
self.assertEqual(len(params[0]), 5)
self.assertTrue(params[0][0].isCheckable())
self.assertEqual(params[0][0].text(), "sld_core")
self.assertEqual(params[1][0].text(), "radius") | SasView/sasview | [
40,
32,
40,
388,
1423429150
] |
def testAddCheckedListToModel(self):
"""
Test for inserting a checkboxed item into a QModel
"""
model = QtGui.QStandardItemModel()
params = ["row1", "row2", "row3"]
FittingUtilities.addCheckedListToModel(model, params)
# Check the model
self.assertEqual(model.rowCount(), 1)
self.assertTrue(model.item(0).isCheckable())
self.assertEqual(model.item(0, 0).text(), params[0])
self.assertEqual(model.item(0, 1).text(), params[1])
self.assertEqual(model.item(0, 2).text(), params[2]) | SasView/sasview | [
40,
32,
40,
388,
1423429150
] |
def testCalculate1DChi2(self):
"""
Test the chi2 calculator for Data1D
"""
reference_data = Data1D(x=[0.1, 0.2], y=[0.0, 0.0])
# 1. identical data
current_data = Data1D(x=[0.1, 0.2], y=[0.0, 0.0])
weights = None
chi = FittingUtilities.calculateChi2(reference_data, current_data, weights)
# Should be zero
self.assertAlmostEqual(chi, 0.0, 8)
# 2. far data
current_data = Data1D(x=[0.1, 0.2], y=[200.0, 150.0])
chi = FittingUtilities.calculateChi2(reference_data, current_data, weights)
# Should not be zero
self.assertAlmostEqual(chi, 31250.0, 8)
# 3. Wrong data
current_data = Data1D(x=[0.1, 0.2], y=[200.0, 150.0, 200.0])
chi = FittingUtilities.calculateChi2(reference_data, current_data, weights)
# Should remain unchanged
self.assertAlmostEqual(chi, 31250.0, 8) | SasView/sasview | [
40,
32,
40,
388,
1423429150
] |
def notestAddHeadersToModel(self):
'''Check to see if headers are correctly applied'''
#test model
model = QtGui.QStandardItemModel()
FittingUtilities.addHeadersToModel(model)
# Assure we have properly named columns
names = FittingUtilities.model_header_captions
names_from_model = [model.headerData(i, QtCore.Qt.Horizontal) for i in range(len(names))]
self.assertEqual(names, names_from_model)
# Add another model
model2 = QtGui.QStandardItemModel()
# Add headers again
FittingUtilities.addHeadersToModel(model2)
# We still should have only the original names
names_from_model2 = [model2.headerData(i, QtCore.Qt.Horizontal) for i in range(len(names))]
self.assertEqual(names, names_from_model2) | SasView/sasview | [
40,
32,
40,
388,
1423429150
] |
def test_all():
md = 'Some *markdown* **text** ~xyz~'
c_md = pf.convert_text(md)
b_md = [pf.Para(pf.Str("Some"), pf.Space,
pf.Emph(pf.Str("markdown")), pf.Space,
pf.Strong(pf.Str("text")), pf.Space,
pf.Subscript(pf.Str("xyz")))]
print("Benchmark MD:")
print(b_md)
print("Converted MD:")
print(c_md)
assert repr(c_md) == repr(b_md)
with io.StringIO() as f:
doc = pf.Doc(*c_md)
pf.dump(doc, f)
c_md_dump = f.getvalue()
with io.StringIO() as f:
doc = pf.Doc(*b_md)
pf.dump(doc, f)
b_md_dump = f.getvalue()
assert c_md_dump == b_md_dump
# ----------------------
print()
tex = r'Some $x^y$ or $x_n = \sqrt{a + b}$ \textit{a}'
c_tex = pf.convert_text(tex)
b_tex = [pf.Para(pf.Str("Some"), pf.Space,
pf.Math("x^y", format='InlineMath'), pf.Space,
pf.Str("or"), pf.Space,
pf.Math(r"x_n = \sqrt{a + b}", format='InlineMath'),
pf.Space, pf.RawInline(r"\textit{a}", format='tex'))]
print("Benchmark TEX:")
print(b_tex)
print("Converted TEX:")
print(c_tex)
assert repr(c_tex) == repr(b_tex)
with io.StringIO() as f:
doc = pf.Doc(*c_tex)
pf.dump(doc, f)
c_tex_dump = f.getvalue()
with io.StringIO() as f:
doc = pf.Doc(*b_tex)
pf.dump(doc, f)
b_tex_dump = f.getvalue()
assert c_tex_dump == b_tex_dump
print("\nBack and forth conversions... md->json->md")
md = 'Some *markdown* **text** ~xyz~'
print("[MD]", md)
md2json = pf.convert_text(md, input_format='markdown', output_format='json')
print("[JSON]", md2json)
md2json2md = pf.convert_text(md2json, input_format='json', output_format='markdown')
print("[MD]", md2json2md)
assert md == md2json2md
print("\nBack and forth conversions... md->panflute->md")
md = 'Some *markdown* **text** ~xyz~'
print("[MD]", md)
md2panflute = pf.convert_text(md, input_format='markdown', output_format='panflute')
print("[PANFLUTE]", md2panflute)
md2panflute2md = pf.convert_text(md2panflute, input_format='panflute', output_format='markdown')
print("[MD]", md2panflute2md)
assert md == md2panflute2md
print("\nBack and forth conversions... md->panflute(standalone)->md")
md = 'Some *markdown* **text** ~xyz~'
print("[MD]", md)
md2panflute = pf.convert_text(md, input_format='markdown', output_format='panflute', standalone=True)
print("[PANFLUTE]", md2panflute)
md2panflute2md = pf.convert_text(md2panflute, input_format='panflute', output_format='markdown')
print("[MD]", md2panflute2md)
assert md == md2panflute2md
print("\nBack and forth conversions... md table -> json(standalone) -> md table")
md = """lorem
--- ---
x y
--- --- | sergiocorreia/panflute | [
399,
55,
399,
14,
1459303667
] |
def _measure(d, sources, target, niter=25, bound=None):
"""
This computes unique information as S(X_0 >-< Y || X_1).
Parameters
----------
d : Distribution
The distribution to compute I_SKAR for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
i_skar_nw : dict
The value of I_SKAR_nw for each individual source.
"""
uniques = {}
for source in sources:
others = list(sources)
others.remove(source)
others = list(flatten(others))
uniques[source] = no_communication_skar(d, source, target, others)
return uniques | dit/dit | [
430,
78,
430,
33,
1380495831
] |
def _measure(d, sources, target, niter=25, bound=None):
"""
This computes unique information as S(X_0 >-> Y || X_1).
Parameters
----------
d : Distribution
The distribution to compute I_SKAR for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
i_skar_owa : dict
The value of I_SKAR_owa for each individual source.
"""
uniques = {}
for source in sources:
others = list(sources)
others.remove(source)
others = list(flatten(others))
uniques[source] = one_way_skar(d, source, target, others)
return uniques | dit/dit | [
430,
78,
430,
33,
1380495831
] |
def _measure(d, sources, target, niter=25, bound=None):
"""
This computes unique information as S(X_0 <-< Y || X_1).
Parameters
----------
d : Distribution
The distribution to compute I_SKAR for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
i_skar_owb : dict
The value of I_SKAR_owb for each individual source.
"""
uniques = {}
for source in sources:
others = list(sources)
others.remove(source)
others = list(flatten(others))
uniques[source] = one_way_skar(d, target, source, others)
return uniques | dit/dit | [
430,
78,
430,
33,
1380495831
] |
def setUp(self):
self.group, _ = Group.objects.get_or_create(name='Hydroshare Author')
self.user = hydroshare.create_account(
'scrawley@byu.edu',
username='scrawley',
first_name='Shawn',
last_name='Crawley',
superuser=False,
groups=[self.group]
)
self.allowance = 0.00001
self.resScript = hydroshare.create_resource(
resource_type='ScriptResource',
owner=self.user,
title='Test R Script Resource',
keywords=['kw1', 'kw2']
) | hydroshare/hydroshare | [
163,
31,
163,
204,
1412216381
] |
def test_receivers(self):
request = HttpRequest()
# ScriptSpecificMetadata
request.POST = {'scriptLanguage': 'R', 'languageVersion': '3.5'}
data = script_metadata_pre_create_handler(sender=ScriptResource,
element_name="ScriptSpecificMetadata",
request=request)
self.assertTrue(data["is_valid"])
request.POST = None
data = script_metadata_pre_create_handler(sender=ScriptResource,
element_name="ScriptSpecificMetadata",
request=request)
self.assertFalse(data["is_valid"])
data = script_pre_create(sender=ScriptResource,
metadata=[], source_names=[],
files=None)
self.assertEqual(data[0]['scriptspecificmetadata'], {})
request.POST = {'scriptLanguage': 'R', 'languageVersion': '3.5'}
data = script_metadata_pre_update_handler(sender=ScriptResource,
element_name="ScriptSpecificMetadata",
request=request)
self.assertTrue(data["is_valid"])
request.POST = None
data = script_metadata_pre_update_handler(sender=ScriptResource,
element_name="ScriptSpecificMetadata",
request=request)
self.assertFalse(data["is_valid"]) | hydroshare/hydroshare | [
163,
31,
163,
204,
1412216381
] |
def __init__ (self, filepath, needle):
self.filepath = filepath
self.needle = needle | pizzapanther/Neutron-IDE | [
73,
27,
73,
7,
1308021766
] |
def replace (self, rstr, rlines):
fh = open(self.filepath, 'r')
newlines = '' | pizzapanther/Neutron-IDE | [
73,
27,
73,
7,
1308021766
] |
def results (self):
ret = []
fh = open(self.filepath, 'r')
if ide.utils.istext(fh.read(512)):
fh.seek(0)
linenum = 0
while 1:
line = fh.readline()
if line:
for match in self.needle.finditer(line):
ret.append((linenum, match.start(), match.end())) | pizzapanther/Neutron-IDE | [
73,
27,
73,
7,
1308021766
] |
def del_stat_fields(self,generic):
generic.pop("ns",None)
generic.pop("numExtents",None)
generic.pop("nindexes",None)
generic.pop("lastExtentSize",None)
generic.pop("paddingFactor",None)
generic.pop("flags",None)
generic.pop("totalIndexSize",None)
generic.pop("indexSizes",None)
generic.pop("max",None)
generic.pop("ok",None)
if generic["capped"] == 1:
generic["capped"]="Yes"
else:
generic["capped"]="No" | periscope-ps/unis | [
3,
3,
3,
15,
1359416012
] |
def make_color_table(in_class):
"""Build a set of color attributes in a class.
Helper function for building the *TermColors classes.""" | santisiri/popego | [
5,
2,
5,
1,
1476320366
] |
def __init__(self,__scheme_name_,colordict=None,**colormap):
self.name = __scheme_name_
if colordict is None:
self.colors = Struct(**colormap)
else:
self.colors = Struct(colordict) | santisiri/popego | [
5,
2,
5,
1,
1476320366
] |
def __init__(self,scheme_list=None,default_scheme=''):
"""Create a table of color schemes.
The table can be created empty and manually filled or it can be
created with a list of valid color schemes AND the specification for
the default active scheme.
""" | santisiri/popego | [
5,
2,
5,
1,
1476320366
] |
def copy(self):
"""Return full copy of object"""
return ColorSchemeTable(self.values(),self.active_scheme_name) | santisiri/popego | [
5,
2,
5,
1,
1476320366
] |
def get(self):
"""Renders the UI with the form fields."""
self.RenderStaticHtml('create_health_report.html') | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def _GetTableConfigList(self):
query = table_config.TableConfig.query()
table_config_list = query.fetch(keys_only=True)
return_list = []
for config in table_config_list:
return_list.append(config.id())
self.response.out.write(json.dumps({
'table_config_list': return_list,
})) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def _CreateTableConfig(self):
"""Creates a table config. Writes a valid name or an error message."""
self._ValidateToken()
name = self.request.get('tableName')
master_bot = self.request.get('tableBots').splitlines()
tests = self.request.get('tableTests').splitlines()
table_layout = self.request.get('tableLayout')
override = int(self.request.get('override'))
user = users.get_current_user()
if not name or not master_bot or not tests or not table_layout or not user:
self.response.out.write(json.dumps({
'error': 'Please fill out the form entirely.'
}))
return
try:
created_table = table_config.CreateTableConfig(
name=name, bots=master_bot, tests=tests, layout=table_layout,
username=user.email(), override=override)
except table_config.BadRequestError as error:
self.response.out.write(json.dumps({
'error': error.message,
}))
logging.error('BadRequestError: %r', error.message)
return
if created_table:
self.response.out.write(json.dumps({
'name': name,
}))
else:
self.response.out.write(json.dumps({
'error': 'Could not create table.',
}))
logging.error('Could not create table.') | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __init__(self):
super(Linkedin, self).__init__() | Impactstory/total-impact-core | [
55,
7,
55,
20,
1330896831
] |
def provides_members(self):
return True | Impactstory/total-impact-core | [
55,
7,
55,
20,
1330896831
] |
def provides_biblio(self):
return True | Impactstory/total-impact-core | [
55,
7,
55,
20,
1330896831
] |
def member_items(self,
linkedin_url,
provider_url_template=None,
cache_enabled=True):
return [("url", linkedin_url)] | Impactstory/total-impact-core | [
55,
7,
55,
20,
1330896831
] |
def provides_aliases(self):
return True | Impactstory/total-impact-core | [
55,
7,
55,
20,
1330896831
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.