hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
99f38b9134e0d4bb07db5b0c8fea46d8053e0a32
| 427
|
py
|
Python
|
schedule/transformData/transformContext.py
|
JaviMiot/employeeSchedule
|
0ace8086ba8aeb1426f0252558b307e0e03bc7d8
|
[
"MIT"
] | null | null | null |
schedule/transformData/transformContext.py
|
JaviMiot/employeeSchedule
|
0ace8086ba8aeb1426f0252558b307e0e03bc7d8
|
[
"MIT"
] | null | null | null |
schedule/transformData/transformContext.py
|
JaviMiot/employeeSchedule
|
0ace8086ba8aeb1426f0252558b307e0e03bc7d8
|
[
"MIT"
] | null | null | null |
from .transformData import TransformData
class TransformContext:
def __init__(self, strategy: TransformData):
self._strategy = strategy
@property
def strategy(self) -> TransformData:
return self._strategy
@strategy.setter
def strategy(self, strategy: TransformData):
self._strategy = strategy
def execute(self, data: dict()):
return self._strategy.convertDict(data)
| 23.722222
| 48
| 0.693208
| 42
| 427
| 6.857143
| 0.380952
| 0.25
| 0.208333
| 0.201389
| 0.3125
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222482
| 427
| 17
| 49
| 25.117647
| 0.86747
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.083333
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
8202fd54d93144fe6f61ba4eb9664d5d36f7ff46
| 92
|
py
|
Python
|
src/python/tools/tool1.py
|
tuh8888/hpl-util
|
e8eea3e3eb326dc94e4392db1df77a02afd052d6
|
[
"Apache-2.0"
] | null | null | null |
src/python/tools/tool1.py
|
tuh8888/hpl-util
|
e8eea3e3eb326dc94e4392db1df77a02afd052d6
|
[
"Apache-2.0"
] | 1
|
2020-07-01T15:29:01.000Z
|
2020-07-01T15:29:01.000Z
|
src/python/tools/tool1.py
|
tuh8888/hpl-util
|
e8eea3e3eb326dc94e4392db1df77a02afd052d6
|
[
"Apache-2.0"
] | null | null | null |
bool x(int a, int b)
{
}
bool y(int a, int b)
{
}
bool z(int c)
{
}
| 9.2
| 20
| 0.380435
| 16
| 92
| 2.1875
| 0.5
| 0.228571
| 0.4
| 0.457143
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.467391
| 92
| 9
| 21
| 10.222222
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
414a7c413af0641fca98dfe5572e7ad9d8611b83
| 23,632
|
py
|
Python
|
winners/nontargeted-attack/teaflow/cleverhans/attacks.py
|
geekpwn/caad2018
|
a788132f74cbfdd3d09a0a75fada135f50ae9a8b
|
[
"Apache-2.0"
] | 50
|
2018-11-20T11:59:18.000Z
|
2021-11-01T18:01:42.000Z
|
winners/nontargeted-attack/teaflow/cleverhans/attacks.py
|
geekpwn/caad2018
|
a788132f74cbfdd3d09a0a75fada135f50ae9a8b
|
[
"Apache-2.0"
] | 1
|
2019-10-09T23:55:21.000Z
|
2019-10-09T23:55:21.000Z
|
winners/nontargeted-attack/teaflow/cleverhans/attacks.py
|
geekpwn/caad2018
|
a788132f74cbfdd3d09a0a75fada135f50ae9a8b
|
[
"Apache-2.0"
] | 13
|
2019-03-15T20:01:39.000Z
|
2021-01-11T02:39:38.000Z
|
from abc import ABCMeta
import numpy as np
from six.moves import xrange
import warnings
import collections
import cleverhans.utils as utils
from cleverhans.model import Model, CallableModelWrapper
class Attack(object):
"""
Abstract base class for all attack classes.
"""
__metaclass__ = ABCMeta
def __init__(self, model, back='tf', sess=None):
"""
:param model: An instance of the Model class.
:param back: The backend to use. Either 'tf' (default) or 'th'.
:param sess: The tf session to run graphs in (use None for Theano)
"""
if not(back == 'tf' or back == 'th'):
raise ValueError("Backend argument must either be 'tf' or 'th'.")
if back == 'th' and sess is not None:
raise Exception("A session should not be provided when using th.")
if not isinstance(model, Model):
if hasattr(model, '__call__'):
pass
else:
raise ValueError("The model argument should be an instance of"
" the Model class.")
if back == 'th':
warnings.warn("CleverHans support for Theano is deprecated and "
"will be dropped on 2017-11-08.")
# Prepare attributes
self.model = model
self.back = back
self.sess = sess
# We are going to keep track of old graphs and cache them.
self.graphs = {}
# When calling generate_np, arguments in the following set should be
# fed into the graph, as they are not structural items that require
# generating a new graph.
# This dict should map names of arguments to the types they should
# have.
# (Usually, the target class will be a feedable keyword argument.)
self.feedable_kwargs = {}
# When calling generate_np, arguments in the following set should NOT
# be fed into the graph, as they ARE structural items that require
# generating a new graph.
# This list should contain the names of the structural arguments.
self.structural_kwargs = []
def generate(self, x, **kwargs):
"""
Generate the attack's symbolic graph for adversarial examples. This
method should be overriden in any child class that implements an
attack that is expressable symbolically. Otherwise, it will wrap the
numerical implementation as a symbolic operator.
:param x: The model's symbolic inputs.
:param **kwargs: optional parameters used by child classes.
:return: A symbolic representation of the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
error = "Sub-classes must implement generate."
raise NotImplementedError(error)
def construct_graph(self, fixed, feedable, x_val, hash_key):
# try our very best to create a TF placeholder for each of the
# feedable keyword arguments, and check the types are one of
# the allowed types
import tensorflow as tf
new_kwargs = dict(x for x in fixed.items())
for name, value in feedable.items():
given_type = self.feedable_kwargs[name]
if isinstance(value, np.ndarray):
new_shape = [None] + list(value.shape[1:])
new_kwargs[name] = tf.placeholder(given_type, new_shape)
elif isinstance(value, utils.known_number_types):
new_kwargs[name] = tf.placeholder(given_type, shape=[])
else:
raise ValueError("Could not identify type of argument " +
name + ": " + str(value))
# x is a special placeholder we always want to have
x_shape = [None] + list(x_val.shape)[1:]
x = tf.placeholder(tf.float32, shape=x_shape)
# now we generate the graph that we want
x_adv = self.generate(x, **new_kwargs)
self.graphs[hash_key] = (x, new_kwargs, x_adv)
if len(self.graphs) >= 10:
warnings.warn("Calling generate_np() with multiple different "
"structural paramaters is inefficient and should"
" be avoided. Calling generate() is preferred.")
def generate_np(self, x_val, **kwargs):
"""
Generate adversarial examples and return them as a Numpy array.
Sub-classes *should not* implement this method unless they must
perform special handling of arguments.
:param x_val: A Numpy array with the original inputs.
:param **kwargs: optional parameters used by child classes.
:return: A Numpy array holding the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
import tensorflow as tf
if self.sess is None:
raise ValueError("Cannot use `generate_np` when no `sess` was"
" provided")
# the set of arguments that are structural properties of the attack
# if these arguments are different, we must construct a new graph
fixed = dict((k, v) for k, v in kwargs.items()
if k in self.structural_kwargs)
# the set of arguments that are passed as placeholders to the graph
# on each call, and can change without constructing a new graph
feedable = dict((k, v) for k, v in kwargs.items()
if k in self.feedable_kwargs)
if len(fixed) + len(feedable) < len(kwargs):
warnings.warn("Supplied extra keyword arguments that are not "
"used in the graph computation. They have been "
"ignored.")
if not all(isinstance(value, collections.Hashable)
for value in fixed.values()):
# we have received a fixed value that isn't hashable
# this means we can't cache this graph for later use,
# and it will have to be discarded later
hash_key = None
else:
# create a unique key for this set of fixed paramaters
hash_key = tuple(sorted(fixed.items()))
if hash_key not in self.graphs:
self.construct_graph(fixed, feedable, x_val, hash_key)
x, new_kwargs, x_adv = self.graphs[hash_key]
feed_dict = {x: x_val}
for name in feedable:
feed_dict[new_kwargs[name]] = feedable[name]
return self.sess.run(x_adv, feed_dict)
def parse_params(self, params=None):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
:param params: a dictionary of attack-specific parameters
:return: True when parsing was successful
"""
return True
class MultipleModelAttack(object):
"""
Abstract base class for all attack classes.
"""
__metaclass__ = ABCMeta
def __init__(self, models, back='tf', sess=None):
"""
:param models: An instance of the Model class.
:param back: The backend to use. Either 'tf' (default) or 'th'.
:param sess: The tf session to run graphs in (use None for Theano)
"""
if not(back == 'tf' or back == 'th'):
raise ValueError("Backend argument must either be 'tf' or 'th'.")
if back == 'th' and sess is not None:
raise Exception("A session should not be provided when using th.")
for model in models:
if not isinstance(model, Model):
if hasattr(model, '__call__'):
warnings.warn("CleverHans support for supplying a callable"
" instead of an instance of the Model class is"
" deprecated and will be dropped on 2018-01-11.")
else:
raise ValueError("The model argument should be an instance of"
" the Model class.")
if back == 'th':
warnings.warn("CleverHans support for Theano is deprecated and "
"will be dropped on 2017-11-08.")
# Prepare attributes
self.model1 = models[0]
self.model2 = models[1]
self.model3 = models[2]
self.back = back
self.sess = sess
# We are going to keep track of old graphs and cache them.
self.graphs = {}
# When calling generate_np, arguments in the following set should be
# fed into the graph, as they are not structural items that require
# generating a new graph.
# This dict should map names of arguments to the types they should
# have.
# (Usually, the target class will be a feedable keyword argument.)
self.feedable_kwargs = {}
# When calling generate_np, arguments in the following set should NOT
# be fed into the graph, as they ARE structural items that require
# generating a new graph.
# This list should contain the names of the structural arguments.
self.structural_kwargs = []
def generate(self, x, **kwargs):
"""
Generate the attack's symbolic graph for adversarial examples. This
method should be overriden in any child class that implements an
attack that is expressable symbolically. Otherwise, it will wrap the
numerical implementation as a symbolic operator.
:param x: The model's symbolic inputs.
:param **kwargs: optional parameters used by child classes.
:return: A symbolic representation of the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
error = "Sub-classes must implement generate."
raise NotImplementedError(error)
def construct_graph(self, fixed, feedable, x_val, hash_key):
# try our very best to create a TF placeholder for each of the
# feedable keyword arguments, and check the types are one of
# the allowed types
import tensorflow as tf
new_kwargs = dict(x for x in fixed.items())
for name, value in feedable.items():
given_type = self.feedable_kwargs[name]
if isinstance(value, np.ndarray):
new_shape = [None] + list(value.shape[1:])
new_kwargs[name] = tf.placeholder(given_type, new_shape)
elif isinstance(value, utils.known_number_types):
new_kwargs[name] = tf.placeholder(given_type, shape=[])
else:
raise ValueError("Could not identify type of argument " +
name + ": " + str(value))
# x is a special placeholder we always want to have
x_shape = [None] + list(x_val.shape)[1:]
x = tf.placeholder(tf.float32, shape=x_shape)
# now we generate the graph that we want
x_adv = self.generate(x, **new_kwargs)
self.graphs[hash_key] = (x, new_kwargs, x_adv)
if len(self.graphs) >= 10:
warnings.warn("Calling generate_np() with multiple different "
"structural paramaters is inefficient and should"
" be avoided. Calling generate() is preferred.")
def generate_np(self, x_val, **kwargs):
"""
Generate adversarial examples and return them as a Numpy array.
Sub-classes *should not* implement this method unless they must
perform special handling of arguments.
:param x_val: A Numpy array with the original inputs.
:param **kwargs: optional parameters used by child classes.
:return: A Numpy array holding the adversarial examples.
"""
if self.back == 'th':
raise NotImplementedError('Theano version not implemented.')
import tensorflow as tf
if self.sess is None:
raise ValueError("Cannot use `generate_np` when no `sess` was"
" provided")
# the set of arguments that are structural properties of the attack
# if these arguments are different, we must construct a new graph
fixed = dict((k, v) for k, v in kwargs.items()
if k in self.structural_kwargs)
# the set of arguments that are passed as placeholders to the graph
# on each call, and can change without constructing a new graph
feedable = dict((k, v) for k, v in kwargs.items()
if k in self.feedable_kwargs)
if len(fixed) + len(feedable) < len(kwargs):
warnings.warn("Supplied extra keyword arguments that are not "
"used in the graph computation. They have been "
"ignored.")
if not all(isinstance(value, collections.Hashable)
for value in fixed.values()):
# we have received a fixed value that isn't hashable
# this means we can't cache this graph for later use,
# and it will have to be discarded later
hash_key = None
else:
# create a unique key for this set of fixed paramaters
hash_key = tuple(sorted(fixed.items()))
if hash_key not in self.graphs:
self.construct_graph(fixed, feedable, x_val, hash_key)
x, new_kwargs, x_adv = self.graphs[hash_key]
feed_dict = {x: x_val}
for name in feedable:
feed_dict[new_kwargs[name]] = feedable[name]
return self.sess.run(x_adv, feed_dict)
def parse_params(self, params=None):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
:param params: a dictionary of attack-specific parameters
:return: True when parsing was successful
"""
return True
class FastGradientMethod(Attack):
"""
This attack was originally implemented by Goodfellow et al. (2015) with the
infinity norm (and is known as the "Fast Gradient Sign Method"). This
implementation extends the attack to other norms, and is therefore called
the Fast Gradient Method.
Paper link: https://arxiv.org/abs/1412.6572
"""
def __init__(self, model, back='tf', sess=None):
"""
Create a FastGradientMethod instance.
"""
super(FastGradientMethod, self).__init__(model, back, sess)
self.feedable_kwargs = {'eps': np.float32,
'y': np.float32,
'clip_min': np.float32,
'clip_max': np.float32}
self.structural_kwargs = ['ord']
if not isinstance(self.model, Model):
self.model = CallableModelWrapper(self.model, 'probs')
def generate(self, x, **kwargs):
"""
Generate symbolic graph for adversarial examples and return.
:param x: The model's symbolic inputs.
:param eps: (optional float) attack step size (input variation)
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param y: (optional) A tensor with the model labels. Only provide
this parameter if you'd like to use true labels when crafting
adversarial samples. Otherwise, model predictions are used as
labels to avoid the "label leaking" effect (explained in this
paper: https://arxiv.org/abs/1611.01236). Default is None.
Labels should be one-hot-encoded.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
# Parse and save attack-specific parameters
assert self.parse_params(**kwargs)
if self.back == 'tf':
from .attacks_tf import fgm
else:
from .attacks_th import fgm
return fgm(x, self.model.get_probs(x), y=self.y, eps=self.eps,
ord=self.ord, clip_min=self.clip_min,
clip_max=self.clip_max)
def parse_params(self, eps=0.3, ord=np.inf, y=None, clip_min=None,
clip_max=None, **kwargs):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
Attack-specific parameters:
:param eps: (optional float) attack step size (input variation)
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param y: (optional) A tensor with the model labels. Only provide
this parameter if you'd like to use true labels when crafting
adversarial samples. Otherwise, model predictions are used as
labels to avoid the "label leaking" effect (explained in this
paper: https://arxiv.org/abs/1611.01236). Default is None.
Labels should be one-hot-encoded.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
# Save attack-specific parameters
self.eps = eps
self.ord = ord
self.y = y
self.clip_min = clip_min
self.clip_max = clip_max
# Check if order of the norm is acceptable given current implementation
if self.ord not in [np.inf, int(1), int(2)]:
raise ValueError("Norm order must be either np.inf, 1, or 2.")
if self.back == 'th' and self.ord != np.inf:
raise NotImplementedError("The only FastGradientMethod norm "
"implemented for Theano is np.inf.")
return True
class MultiModelIterativeMethod(MultipleModelAttack):
"""
The Basic Iterative Method (Kurakin et al. 2016). The original paper used
hard labels for this attack; no label smoothing.
"""
def __init__(self, models, back='tf', sess=None):
"""
Create a BasicIterativeMethod instance.
"""
super(MultiModelIterativeMethod, self).__init__(models, back, sess)
self.feedable_kwargs = {'eps': np.float32,
'eps_iter': np.float32,
'y': np.float32,
'clip_min': np.float32,
'clip_max': np.float32}
self.structural_kwargs = ['ord', 'nb_iter']
if not isinstance(self.model1, Model):
self.model1 = CallableModelWrapper(self.model1, 'probs')
if not isinstance(self.model2, Model):
self.model2 = CallableModelWrapper(self.model2, 'probs')
if not isinstance(self.model3, Model):
self.model3 = CallableModelWrapper(self.model3, 'probs')
def generate(self, x, **kwargs):
"""
Generate symbolic graph for adversarial examples and return.
:param x: The model's symbolic inputs.
:param eps: (required float) maximum distortion of adversarial example
compared to original input
:param eps_iter: (required float) step size for each attack iteration
:param nb_iter: (required int) Number of attack iterations.
:param y: (required) A tensor with the model labels.
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
import tensorflow as tf
# Parse and save attack-specific parameters
assert self.parse_params(**kwargs)
# Initialize loop variables
eta = 0
# Fix labels to the first model predictions for loss computation
# model_preds1 = self.model1.get_probs(x)
# model_preds2 = self.model2.get_probs(x)
model_preds3 = self.model3.get_probs(x)
model_preds = model_preds3
preds_max = tf.reduce_max(model_preds, 1, keep_dims=True)
y = tf.to_float(tf.equal(model_preds, preds_max))
fgsm_params = {'eps': self.eps_iter, 'y': y, 'ord': self.ord}
for i in range(self.nb_iter):
FGSM1 = FastGradientMethod(self.model1, back=self.back, sess=self.sess)
FGSM2 = FastGradientMethod(self.model2, back=self.back, sess=self.sess)
FGSM3 = FastGradientMethod(self.model3, back=self.back, sess=self.sess)
# Compute this step's perturbation
eta1 = FGSM1.generate(x + eta, **fgsm_params) - x
eta2 = FGSM2.generate(x + eta, **fgsm_params) - x
eta3 = FGSM3.generate(x + eta, **fgsm_params) - x
eta = eta1 * 0.333 + eta2 * 0.333 + eta3 * 0.333
# Clipping perturbation eta to self.ord norm ball
if self.ord == np.inf:
eta = tf.clip_by_value(eta, -self.eps, self.eps)
elif self.ord in [1, 2]:
reduc_ind = list(xrange(1, len(eta.get_shape())))
if self.ord == 1:
norm = tf.reduce_sum(tf.abs(eta),
reduction_indices=reduc_ind,
keep_dims=True)
elif self.ord == 2:
norm = tf.sqrt(tf.reduce_sum(tf.square(eta),
reduction_indices=reduc_ind,
keep_dims=True))
eta = eta * self.eps / norm
# Define adversarial example (and clip if necessary)
adv_x = x + eta
if self.clip_min is not None and self.clip_max is not None:
adv_x = tf.clip_by_value(adv_x, self.clip_min, self.clip_max)
return adv_x
def parse_params(self, eps=0.3, eps_iter=0.05, nb_iter=10, y=None,
ord=np.inf, clip_min=None, clip_max=None, **kwargs):
"""
Take in a dictionary of parameters and applies attack-specific checks
before saving them as attributes.
Attack-specific parameters:
:param eps: (required float) maximum distortion of adversarial example
compared to original input
:param eps_iter: (required float) step size for each attack iteration
:param nb_iter: (required int) Number of attack iterations.
:param y: (required) A tensor with the model labels.
:param ord: (optional) Order of the norm (mimics Numpy).
Possible values: np.inf, 1 or 2.
:param clip_min: (optional float) Minimum input component value
:param clip_max: (optional float) Maximum input component value
"""
# Save attack-specific parameters
self.eps = eps
self.eps_iter = eps_iter
self.nb_iter = nb_iter
self.y = y
self.ord = ord
self.clip_min = clip_min
self.clip_max = clip_max
# Check if order of the norm is acceptable given current implementation
if self.ord not in [np.inf, 1, 2]:
raise ValueError("Norm order must be either np.inf, 1, or 2.")
if self.back == 'th':
error_string = "BasicIterativeMethod is not implemented in Theano"
raise NotImplementedError(error_string)
return True
| 42.351254
| 83
| 0.598764
| 2,960
| 23,632
| 4.701351
| 0.128041
| 0.007545
| 0.005174
| 0.005174
| 0.823872
| 0.816111
| 0.803176
| 0.797715
| 0.780109
| 0.773929
| 0
| 0.010553
| 0.322317
| 23,632
| 557
| 84
| 42.427289
| 0.858383
| 0.375127
| 0
| 0.69084
| 0
| 0
| 0.130648
| 0
| 0
| 0
| 0
| 0
| 0.007634
| 1
| 0.061069
| false
| 0.003817
| 0.053435
| 0
| 0.167939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
415782ae944fda78b643762118b9da9141acb670
| 38,000
|
py
|
Python
|
Post-process/correlation/correl.py
|
michgz/vibration-record
|
854a21a135cc86e1ce6b1ed28caadb6a770fa2c6
|
[
"MIT"
] | null | null | null |
Post-process/correlation/correl.py
|
michgz/vibration-record
|
854a21a135cc86e1ce6b1ed28caadb6a770fa2c6
|
[
"MIT"
] | null | null | null |
Post-process/correlation/correl.py
|
michgz/vibration-record
|
854a21a135cc86e1ce6b1ed28caadb6a770fa2c6
|
[
"MIT"
] | null | null | null |
# Run with Python 2.7
##
from trace import Trace
from trace import ReadIn
import datetime
import os
import sys
import zipfile
import shutil
import math
import getopt
import numpy
def ODSDate(dt):
return dt.strftime("<table:table-cell table:style-name=\"ce2\" office:value-type=\"date\" office:date-value=\"%Y-%m-%dT%H:%M:%S\" calcext:value-type=\"date\"><text:p>%d/%m/%Y %H:%M:%S</text:p></table:table-cell>")
def bIncludeTraces():
return True
def correl(infile1, infile2, intext1, intext2, output, diff = float('nan')):
# The time delta by which ADXL is _before_ LSM6.
#
# This was determined manually. Next big challenge: do it automatically!!
#
runningPath = os.path.dirname(os.path.abspath(__file__))
adxl = []
ax1 = ReadIn([infile1])
for ay in ax1:
az = ay.Calc()
adxl.append([ay.dt,az[1],az[2]])
lsm6 = []
ax2 = ReadIn([infile2])
for ay in ax2:
az = ay.Calc()
lsm6.append([ay.dt,az[1],az[2]])
if (math.isnan(diff)):
# Here were determine the time delta automatically.
#
# First, ensure that the two traces at least overlap in time.
if (min([a[0] for a in adxl]) < max([a[0] for a in lsm6])) and (min([a[0] for a in lsm6]) < max([a[0] for a in adxl])):
pass # we're ok
else:
sys.exit(6) # now okay
# Now choose the top 25%
min_len = min([len(adxl), len(lsm6)])
if (min_len <= 10):
choose_len = min_len
else:
choose_len = min_len /4;
def sortKey(e):
return e[1] # or e[2]
sorted_1 = sorted(adxl, key=sortKey, reverse=True)[0:choose_len]
sorted_2 = sorted(lsm6, key=sortKey, reverse=True)[0:choose_len]
#The following are in seconds
epsilon = 20
step_big = 10 # best if it's about 1/2 epsilon
step_small = 1 # usually 1
vals = range(-4000,4000,step_big)
# First pass
v3 = []
for v in vals:
b3 = 0
for b1 in sorted_1:
for b2 in sorted_2:
if (abs(b1[0]+datetime.timedelta(seconds=v)-b2[0]).total_seconds() < epsilon):
b3 += 1
break
v3.append(b3)
max_at = vals[v3.index(max(v3))]
# Second pass
vals = range(max_at - 2*step_big, max_at + 2*step_big, step_small)
v3 = []
for v in vals:
b3 = 0
for b1 in sorted_1:
for b2 in sorted_2:
if (abs(b1[0]+datetime.timedelta(seconds=v)-b2[0]).total_seconds() < epsilon):
b3 += 1
break
v3.append(b3)
max_val = max(v3)
# Now take the average of all values with the same maximum value
max_at = [i for i, j in enumerate(v3) if j == max_val]
time_diff = datetime.timedelta( seconds=vals[sum(max_at)/len(max_at)] )
else:
time_diff = datetime.timedelta( seconds=diff )
print "Using time difference of " + str(time_diff.total_seconds()) + " seconds"
# The furtherest two times can be and still count as the same
#
epsilon = datetime.timedelta(seconds=15)
print len(adxl)
print len(lsm6)
adxl_idx = len(adxl)*[-1]
lsm6_idx = len(lsm6)*[-1]
a1_idx = 0
## Find matching indices
##
for a1 in adxl:
targ_time = a1[0] + time_diff
best_idx = 999999 # Just any value
best_diff = datetime.timedelta.max
a2_idx = 0
for a2 in lsm6:
diff = targ_time - a2[0]
if (abs(diff) < best_diff):
best_diff = abs(diff)
best_idx = a2_idx
a2_idx += 1
if (best_diff < epsilon) and (lsm6_idx[best_idx] == -1):
adxl_idx[a1_idx] = best_idx
lsm6_idx[best_idx] = a1_idx
a1_idx += 1
## Choose some representative points
##
bx2 = []
bx_idx = 0
for a1 in adxl:
bx2.append(a1[1]) # or [2]
bx_idx += 1
bx1 = numpy.argsort(numpy.array(bx2)) # sorted indices
## Select only those for which a match exists
#
bx0 = []
for b0 in bx1:
if adxl_idx[b0]>=0:
bx0.append(b0)
## Limit the number
#
num_bx = min(len(bx0) / 2, 100) # number of indices to use.
bx1 = bx0[0:num_bx]
if os.path.abspath(output):
theName = output
else:
theName = os.path.join(runningPath, output)
axis_labels = True
if True:
# Now output to a file
i3 = 0
for i1 in range(0,len(adxl_idx)):
if (adxl_idx[i1] >= 0):
i3 += 1
noRows = i3 - 1
print "noRows = %d" % noRows
with open( runningPath + "/content_local_obj1.xml", "w") as dest:
with open( runningPath + "/6_2.xml", "r") as source:
while True:
copy_buff = source.read(4096)
if not copy_buff:
break
dest.write(copy_buff)
dest.write( "<office:body>" )
dest.write( "<office:chart>" )
dest.write( "<chart:chart svg:width=\"25.17cm\" svg:height=\"14.295cm\" xlink:href=\"..\" xlink:type=\"simple\" chart:class=\"chart:scatter\" chart:style-name=\"ch1\">" )
dest.write( "<chart:plot-area chart:style-name=\"ch2\" table:cell-range-address=\"Data.C2:Data.C%d Data.G2:Data.G%d\" svg:x=\"0.538cm\" svg:y=\"0.284cm\" svg:width=\"24.124cm\" svg:height=\"13.514cm\">" % (noRows+2, noRows+2) )
dest.write( "<chartooo:coordinate-region svg:x=\"1.345cm\" svg:y=\"0.483cm\" svg:width=\"22.853cm\" svg:height=\"12.668cm\"/>" )
dest.write( "<chart:axis chart:dimension=\"x\" chart:name=\"primary-x\" chart:style-name=\"ch3\"" )
if (axis_labels):
dest.write( ">" )
#dest.write( "<chart:title svg:x=\"11.511cm\" svg:y=\"13.735cm\" chart:style-name=\"ch4\">" )
dest.write( "<chart:title svg:x=\"11.511cm\" svg:y=\"13.735cm\">" )
dest.write( "<text:p>%s</text:p>" % intext1 )
dest.write( "</chart:title>" )
dest.write( "</chart:axis>" )
else:
dest.write( "/>" )
dest.write( "<chart:axis chart:dimension=\"y\" chart:name=\"primary-y\" chart:style-name=\"ch3\"" )
if (axis_labels):
dest.write( ">" )
#dest.write( "<chart:title svg:x=\"0cm\" svg:y=\"8.13cm\" chart:style-name=\"ch5\">" )
dest.write( "<chart:title svg:x=\"0cm\" svg:y=\"8.13cm\">" )
dest.write( "<text:p>%s</text:p>" % intext2 )
dest.write( "</chart:title>" )
else:
dest.write( ">" )
dest.write( "<chart:grid chart:style-name=\"ch4\" chart:class=\"major\"/></chart:axis>" )
dest.write( "<chart:series chart:style-name=\"ch5\" chart:values-cell-range-address=\"Data.G2:Data.G%d\" chart:class=\"chart:scatter\">" % (noRows+2) )
dest.write( "<chart:domain table:cell-range-address=\"Data.C2:Data.C%d\"/><chart:data-point chart:repeated=\"%d\"/>" % (noRows+2, noRows) )
dest.write( "</chart:series>" )
dest.write( "<chart:wall chart:style-name=\"ch6\"/><chart:floor chart:style-name=\"ch7\"/></chart:plot-area><table:table table:name=\"local-table\"><table:table-header-columns><table:table-column/>" )
dest.write( "</table:table-header-columns>" )
dest.write( "<table:table-columns><table:table-column table:number-columns-repeated=\"2\"/></table:table-columns>" )
dest.write( "<table:table-header-rows>" )
dest.write( "<table:table-row>" )
dest.write( "<table:table-cell><text:p/></table:table-cell>" )
dest.write( "<table:table-cell office:value-type=\"string\"><text:p>Column C</text:p></table:table-cell><table:table-cell office:value-type=\"string\"><text:p>Column G</text:p></table:table-cell>" )
dest.write( "</table:table-row>" )
dest.write( "</table:table-header-rows>" )
dest.write( "<table:table-rows>" )
for i1 in range(0,len(adxl_idx)):
if (adxl_idx[i1] >= 0):
i3 += 1
dest.write( "<table:table-row>" )
dest.write( "<table:table-cell office:value-type=\"string\"><text:p>%d</text:p></table:table-cell>" % i3)
f3 = adxl[i1][1]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\"><text:p>%0.3f</text:p>" % (f3, f3) )
if (i3 ==1 ):
dest.write( "<draw:g><svg:desc>Data.C2:Data.C%d</svg:desc></draw:g>" % (noRows+2) )
dest.write( "</table:table-cell>" )
f3 = lsm6[adxl_idx[i1]][1]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\"><text:p>%0.3f</text:p>" % (f3, f3) )
if (i3 ==1 ):
dest.write( "<draw:g><svg:desc>Data.G2:Data.G%d</svg:desc></draw:g>" % (noRows+2) )
dest.write( "</table:table-cell>" )
dest.write( "</table:table-row>" )
dest.write( "</table:table-rows></table:table></chart:chart></office:chart></office:body></office:document-content>" )
with open( runningPath + "/content_local_obj2.xml", "w") as dest:
with open( runningPath + "/7_2.xml", "r") as source:
while True:
copy_buff = source.read(4096)
if not copy_buff:
break
dest.write(copy_buff)
dest.write( "<office:body>" )
dest.write( "<office:chart>" )
dest.write( "<chart:chart svg:width=\"25.17cm\" svg:height=\"14.295cm\" xlink:href=\"..\" xlink:type=\"simple\" chart:class=\"chart:scatter\" chart:style-name=\"ch1\">" )
dest.write( "<chart:plot-area chart:style-name=\"ch2\" table:cell-range-address=\"Data.B2:Data.B%d Data.F2:Data.F%d\" svg:x=\"0.503cm\" svg:y=\"0.285cm\" svg:width=\"24.164cm\" svg:height=\"13.725cm\">" % (noRows+2, noRows+2) )
dest.write( "<chartooo:coordinate-region svg:x=\"1.124cm\" svg:y=\"0.482cm\" svg:width=\"23.076cm\" svg:height=\"12.512cm\"/>" )
dest.write( "<chart:axis chart:dimension=\"x\" chart:name=\"primary-x\" chart:style-name=\"ch3\"" )
if (axis_labels):
dest.write( ">" )
#dest.write( "<chart:title svg:x=\"11.538cm\" svg:y=\"13.735cm\" chart:style-name=\"ch4\">" )
dest.write( "<chart:title svg:x=\"11.538cm\" svg:y=\"13.735cm\">" )
dest.write( "<text:p>%s</text:p>" % intext1 )
dest.write( "</chart:title>" )
dest.write( "</chart:axis>" )
else:
dest.write( "/>" )
dest.write( "<chart:axis chart:dimension=\"y\" chart:name=\"primary-y\" chart:style-name=\"ch3\">" )
if (axis_labels):
#dest.write( "<chart:title svg:x=\"0cm\" svg:y=\"8.051cm\" chart:style-name=\"ch5\">" )
dest.write( "<chart:title svg:x=\"0cm\" svg:y=\"8.051cm\">" )
dest.write( "<text:p>%s</text:p>" % intext2 )
dest.write( "</chart:title>" )
dest.write( "<chart:grid chart:style-name=\"ch4\" chart:class=\"major\"/></chart:axis>" )
dest.write( "<chart:series chart:style-name=\"ch5\" chart:values-cell-range-address=\"Data.F2:Data.F%d\" chart:class=\"chart:scatter\">" % (noRows+2) )
dest.write( "<chart:domain table:cell-range-address=\"Data.B2:Data.B%d\"/><chart:data-point chart:repeated=\"%d\"/>" % (noRows+2, noRows) )
dest.write( "</chart:series>" )
dest.write( "<chart:wall chart:style-name=\"ch6\"/><chart:floor chart:style-name=\"ch7\"/></chart:plot-area><table:table table:name=\"local-table\"><table:table-header-columns><table:table-column/>" )
dest.write( "</table:table-header-columns>" )
dest.write( "<table:table-columns><table:table-column table:number-columns-repeated=\"2\"/></table:table-columns>" )
dest.write( "<table:table-header-rows>" )
dest.write( "<table:table-row>" )
dest.write( "<table:table-cell><text:p/></table:table-cell>" )
dest.write( "<table:table-cell office:value-type=\"string\"><text:p>Column B</text:p></table:table-cell><table:table-cell office:value-type=\"string\"><text:p>Column F</text:p></table:table-cell>" )
dest.write( "</table:table-row>" )
dest.write( "</table:table-header-rows>" )
dest.write( "<table:table-rows>" )
i4 = 0
for i1 in range(0,len(adxl_idx)):
if (adxl_idx[i1] >= 0):
i4 += 1
dest.write( "<table:table-row>" )
dest.write( "<table:table-cell office:value-type=\"string\"><text:p>%d</text:p></table:table-cell>" % i4)
f3 = adxl[i1][2]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\"><text:p>%0.3f</text:p>" % (f3, f3) )
if (i4 ==1 ):
dest.write( "<draw:g><svg:desc>Data.B1:Data.B%d</svg:desc></draw:g>" % noRows )
dest.write( "</table:table-cell>" )
f3 = lsm6[adxl_idx[i1]][2]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\"><text:p>%0.3f</text:p>" % (f3, f3) )
if (i4 ==1 ):
dest.write( "<draw:g><svg:desc>Data.F1:Data.F%d</svg:desc></draw:g>" % noRows )
dest.write( "</table:table-cell>" )
dest.write( "</table:table-row>" )
dest.write( "</table:table-rows></table:table></chart:chart></office:chart></office:body></office:document-content>" )
with open( runningPath + "/content_local_obj3.xml", "w") as dest:
with open( runningPath + "/3_1.xml", "r") as source:
while True:
copy_buff = source.read(4096)
if not copy_buff:
break
dest.write(copy_buff)
dest.write( intext1 )
with open( runningPath + "/3_2.xml", "r") as source:
while True:
copy_buff = source.read(4096)
if not copy_buff:
break
dest.write(copy_buff)
with open( runningPath + "/content_local_obj4.xml", "w") as dest:
with open( runningPath + "/4_1.xml", "r") as source:
while True:
copy_buff = source.read(4096)
if not copy_buff:
break
dest.write(copy_buff)
dest.write( intext2 )
with open( runningPath + "/4_2.xml", "r") as source:
while True:
copy_buff = source.read(4096)
if not copy_buff:
break
dest.write(copy_buff)
######################################################
### Write Header copperplate to the contents file ##
with open( runningPath + "/content_local.xml", "w") as dest:
with open( runningPath + "/../template_content/3.xml", "r") as source:
while True:
copy_buff = source.read(4096)
if not copy_buff:
break
dest.write(copy_buff)
dest.write( "<office:body>" )
dest.write( "<office:spreadsheet>" )
dest.write( "<table:calculation-settings table:automatic-find-labels=\"false\"/>" )
dest.write( "<table:table table:name=\"Raw\" table:style-name=\"ta1\">" )
dest.write( "<table:table-column table:style-name=\"co1\" table:default-cell-style-name=\"ce3\"/>" )
dest.write( "<table:table-column table:style-name=\"co2\" table:number-columns-repeated=\"3\" table:default-cell-style-name=\"Default\"/>" )
dest.write( "<table:table-column table:style-name=\"co1\" table:default-cell-style-name=\"Default\"/>" )
dest.write( "<table:table-column table:style-name=\"co2\" table:number-columns-repeated=\"2\" table:default-cell-style-name=\"Default\"/>" )
# Show the file names
dest.write( "<table:table-row table:style-name=\"ro1\">" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % (intext1) )
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % (intext2) )
dest.write( "<table:table-cell/><table:table-cell/>" )
dest.write( "</table:table-row>" )
if bIncludeTraces():
for bi in bx1:
## Find the corresponding point
ci = adxl_idx[bi]
dest.write( "<table:table-row table:style-name=\"ro1\">" )
dest.write( ODSDate(ax1[bi].dt) )
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( ODSDate(ax2[ci].dt) )
dest.write( "<table:table-cell/><table:table-cell/></table:table-row>" )
for ii in range(0, 500):
dest.write( "<table:table-row table:style-name=\"ro1\">" )
if (ii < len(ax1[bi].x)):
f3 = ax1[bi].x[ii]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = ax1[bi].y[ii]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = ax1[bi].z[ii]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
else:
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell/>" )
if (ii < len(ax2[ci].x)):
f3 = ax2[ci].x[ii]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = ax2[ci].y[ii]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = ax2[ci].z[ii]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
else:
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "</table:table-row>" )
full_len = len(bx1)*501
dest.write( "</table:table>" )
dest.write( "<table:table table:name=\"Data\" table:style-name=\"ta1\">" )
dest.write( "<table:shapes>" )
dest.write( "<draw:frame draw:z-index=\"0\" draw:style-name=\"gr1\" draw:text-style-name=\"P1\" svg:width=\"251.69mm\" svg:height=\"142.94mm\" svg:x=\"223.06mm\" svg:y=\"7.17mm\">" )
dest.write( "<draw:object draw:notify-on-update-of-ranges=\"Data.C2:Data.C%d Data.G2:Data.G%d\" xlink:href=\"./Object 1\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\">" % (noRows+2, noRows+2) )
dest.write( "<loext:p/>" )
dest.write( "</draw:object>" )
dest.write( "<draw:image xlink:href=\"./ObjectReplacements/Object 1\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\"/>" )
dest.write( "</draw:frame>" )
dest.write( "<draw:frame draw:z-index=\"1\" draw:style-name=\"gr1\" draw:text-style-name=\"P1\" svg:width=\"251.69mm\" svg:height=\"142.94mm\" svg:x=\"409.83mm\" svg:y=\"79.95mm\">" )
dest.write( "<draw:object draw:notify-on-update-of-ranges=\"Data.B2:Data.B%d Data.F2:Data.F%d\" xlink:href=\"./Object 2\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\">" % (noRows+2, noRows+2) )
dest.write( "<loext:p/>" )
dest.write( "</draw:object>" )
dest.write( "<draw:image xlink:href=\"./ObjectReplacements/Object 2\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\"/>" )
dest.write( "</draw:frame>" )
dest.write( "</table:shapes>" )
dest.write( "<table:table-column table:style-name=\"co1\" table:default-cell-style-name=\"ce3\"/>" )
dest.write( "<table:table-column table:style-name=\"co2\" table:number-columns-repeated=\"3\" table:default-cell-style-name=\"Default\"/>" )
dest.write( "<table:table-column table:style-name=\"co1\" table:default-cell-style-name=\"Default\"/>" )
dest.write( "<table:table-column table:style-name=\"co2\" table:number-columns-repeated=\"2\" table:default-cell-style-name=\"Default\"/>" )
# Show the file names
dest.write( "<table:table-row table:style-name=\"ro1\">" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % (intext1) )
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % (intext2) )
dest.write( "<table:table-cell/><table:table-cell/>" )
dest.write( "</table:table-row>" )
for i1 in range(0,len(adxl_idx)):
if (adxl_idx[i1] >= 0):
dest.write( "<table:table-row table:style-name=\"ro1\">" )
dest.write( ODSDate(adxl[i1][0]) )
f3 = adxl[i1][1]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = adxl[i1][2]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
dest.write("<table:table-cell/>")
dest.write( ODSDate(lsm6[adxl_idx[i1]][0]) )
f3 = lsm6[adxl_idx[i1]][1]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = lsm6[adxl_idx[i1]][2]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
dest.write("<table:table-cell/>")
#dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % ("Hello!") )
dest.write( "</table:table-row>" )
# Write empty row
dest.write( "<table:table-row table:style-name=\"ro1\"><table:table-cell/><table:table-cell/><table:table-cell/><table:table-cell/><table:table-cell/><table:table-cell/><table:table-cell/></table:table-row>" )
dest.write( "<table:table-row table:style-name=\"ro1\">" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % ("Unmatched:") )
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % ("Unmatched:") )
dest.write( "<table:table-cell/><table:table-cell/>" )
dest.write( "</table:table-row>" )
# Now write out the remaining values (that haven't been paired).
i1 = 0
i2 = 0
while(True):
while(i1 < len(adxl_idx)):
if (adxl_idx[i1] != -1):
i1 += 1
else:
break
while(i2 < len(lsm6_idx)):
if (lsm6_idx[i2] != -1):
i2 += 1
else:
break
if(i1 >= len(adxl_idx)) and (i2 >= len(lsm6_idx)):
# Finished!
break
else:
# Something still to do!
dest.write( "<table:table-row table:style-name=\"ro1\">" )
if(i1 < len(adxl_idx)):
dest.write( ODSDate(adxl[i1][0]) )
f3 = adxl[i1][1]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = adxl[i1][2]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
i1 += 1
else:
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell/>" )
if(i2 < len(lsm6_idx)):
dest.write( ODSDate(lsm6[i2][0]) )
f3 = lsm6[i2][1]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
f3 = lsm6[i2][2]
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%0.3f\" calcext:value-type=\"float\"><text:p>%0.3f</text:p></table:table-cell>" % (f3, f3) )
i2 += 1
else:
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "</table:table-row>" )
dest.write( "</table:table>" )
## Now write out the third table sheet
dest.write( "<table:table table:name=\"Compare\" table:style-name=\"ta1\">" )
dest.write( "<table:shapes>" )
dest.write( "<draw:frame draw:z-index=\"0\" draw:style-name=\"gr1\" draw:text-style-name=\"P1\" svg:width=\"278.64mm\" svg:height=\"168.14mm\" svg:x=\"85.08mm\" svg:y=\"19.65mm\">" )
dest.write( "<draw:object draw:notify-on-update-of-ranges=\"Compare.D3:Compare.D502 Compare.E3:Compare.E502 Compare.D3:Compare.D502 Compare.F3:Compare.F502 Compare.D3:Compare.D502 Compare.G3:Compare.G502\" xlink:href=\"./Object 3\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\"><loext:p/>" )
dest.write( "</draw:object>" )
dest.write( "<draw:image xlink:href=\"./ObjectReplacements/Object 3\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\"/>" )
dest.write( "</draw:frame>" )
dest.write( "<draw:frame draw:z-index=\"1\" draw:style-name=\"gr1\" draw:text-style-name=\"P1\" svg:width=\"306.74mm\" svg:height=\"172.65mm\" svg:x=\"372.25mm\" svg:y=\"17.8mm\">" )
dest.write( "<draw:object draw:notify-on-update-of-ranges=\"Compare.I3:Compare.I502 Compare.J3:Compare.J502 Compare.I3:Compare.I502 Compare.K3:Compare.K502 Compare.I3:Compare.I502 Compare.L3:Compare.L502\" xlink:href=\"./Object 4\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\"><loext:p/>" )
dest.write( "</draw:object>" )
dest.write( "<draw:image xlink:href=\"./ObjectReplacements/Object 4\" xlink:type=\"simple\" xlink:show=\"embed\" xlink:actuate=\"onLoad\"/>" )
dest.write( "</draw:frame>" )
dest.write( "</table:shapes>" )
dest.write( "<table:table-column table:style-name=\"co2\" table:number-columns-repeated=\"4\" table:default-cell-style-name=\"Default\"/>" )
dest.write( "<table:table-column table:style-name=\"co1\" table:default-cell-style-name=\"ce3\"/>" )
dest.write( "<table:table-column table:style-name=\"co2\" table:number-columns-repeated=\"4\" table:default-cell-style-name=\"Default\"/>" )
dest.write( "<table:table-column table:style-name=\"co1\" table:default-cell-style-name=\"Default\"/>" )
dest.write( "<table:table-column table:style-name=\"co2\" table:number-columns-repeated=\"2\" table:default-cell-style-name=\"Default\"/>" )
# Show the file names
dest.write( "<table:table-row table:style-name=\"ro1\"><table:table-cell/><table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % (intext1) )
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"string\" calcext:value-type=\"string\"><text:p>%s</text:p></table:table-cell>" % (intext2) )
dest.write( "<table:table-cell/><table:table-cell/>" )
dest.write( "</table:table-row>" )
dest.write( "<table:table-row table:style-name=\"ro1\"><table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"1\" calcext:value-type=\"float\"><text:p>1</text:p></table:table-cell>" )
dest.write( "<table:table-cell/>" )
dest.write( "<table:table-cell table:formula=\"of:=INDEX([.$A$3:.$C$%d];[.$B$2];2)\" office:value-type=\"float\" office:value=\"502\" calcext:value-type=\"float\"><text:p>502</text:p></table:table-cell>" % (500) )
dest.write( "<table:table-cell table:style-name=\"ce2\" table:formula=\"of:=INDEX([$Raw.$A$1:.$C$%d];[.$D$2];1)\" " % (full_len + 1) )
dt_x = datetime.datetime.now()
dest.write( dt_x.strftime("office:value-type=\"date\" office:date-value=\"%Y-%m-%dT%H:%M:%S\" calcext:value-type=\"date\"><text:p>%d/%m/%Y %H:%M:%S</text:p></table:table-cell>") )
#dest.write( "<table:table-cell/>" )
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell table:style-name=\"ce2\" table:formula=\"of:=INDEX([$Raw.$E$1:.$G$%d];[.$D$2];1)\" " % (full_len + 1) )
dt_x = datetime.datetime.now()
dest.write( dt_x.strftime("office:value-type=\"date\" office:date-value=\"%Y-%m-%dT%H:%M:%S\" calcext:value-type=\"date\"><text:p>%d/%m/%Y %H:%M:%S</text:p></table:table-cell>") )
dest.write( "<table:table-cell/>" )
dest.write( "<table:table-cell table:style-name=\"ce5\" table:formula=\"of:=24*60*60*([.J2]-[.E2])\" office:value-type=\"float\" office:value=\"0\" calcext:value-type=\"float\"><text:p>0.00</text:p></table:table-cell>" )
dest.write( "</table:table-row>" )
for yi in range(0,500):
dest.write( "<table:table-row table:style-name=\"ro1\">" )
## Limit the number. What if num_bx > 500?? That can't happpen at the moment because (above) it
# is limited to 100, but maybe in future?
#
if yi <= num_bx:
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%d\" calcext:value-type=\"float\"><text:p>%d</text:p></table:table-cell>" % (yi+1, yi+1) )
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%d\" calcext:value-type=\"float\"><text:p>%d</text:p></table:table-cell>" % (yi*501+2, yi*501+2) )
dest.write( "<table:table-cell/>" )
else:
dest.write( "<table:table-cell/><table:table-cell/><table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%d\" calcext:value-type=\"float\"><text:p>%d</text:p></table:table-cell>" % (yi+1, yi+1) )
dest.write( "<table:table-cell table:formula=\"of:=INDEX([$Raw.$A$1:.$C$%d];[.$D$2]+[.$D%d];1)\" office:value-type=\"float\" office:value=\"0.0000\" calcext:value-type=\"float\"><text:p>0.0000</text:p></table:table-cell>" % (full_len+1, yi+3) )
dest.write( "<table:table-cell table:formula=\"of:=INDEX([$Raw.$A$1:.$C$%d];[.$D$2]+[.$D%d];2)\" office:value-type=\"float\" office:value=\"0.0000\" calcext:value-type=\"float\"><text:p>0.0000</text:p></table:table-cell>" % (full_len+1, yi+3) )
dest.write( "<table:table-cell table:formula=\"of:=INDEX([$Raw.$A$1:.$C$%d];[.$D$2]+[.$D%d];3)\" office:value-type=\"float\" office:value=\"0.0000\" calcext:value-type=\"float\"><text:p>0.0000</text:p></table:table-cell>" % (full_len+1, yi+3) )
dest.write( "<table:table-cell/>" )
dest.write( "<table:table-cell office:value-type=\"float\" office:value=\"%d\" calcext:value-type=\"float\"><text:p>%d</text:p></table:table-cell>" % (yi+1, yi+1) )
dest.write( "<table:table-cell table:formula=\"of:=INDEX([$Raw.$E$1:.$G$%d];[.$D$2]+[.$I%d];1)\" office:value-type=\"float\" office:value=\"0.0000\" calcext:value-type=\"float\"><text:p>0.0000</text:p></table:table-cell>" % (full_len+1, yi+3) )
dest.write( "<table:table-cell table:formula=\"of:=INDEX([$Raw.$E$1:.$G$%d];[.$D$2]+[.$I%d];2)\" office:value-type=\"float\" office:value=\"0.0000\" calcext:value-type=\"float\"><text:p>0.0000</text:p></table:table-cell>" % (full_len+1, yi+3) )
dest.write( "<table:table-cell table:formula=\"of:=INDEX([$Raw.$E$1:.$G$%d];[.$D$2]+[.$I%d];3)\" office:value-type=\"float\" office:value=\"0.0000\" calcext:value-type=\"float\"><text:p>0.0000</text:p></table:table-cell>" % (full_len+1, yi+3) )
dest.write( "</table:table-row>" )
dest.write( "</table:table>" )
## Finish the document
dest.write( "</office:spreadsheet></office:body></office:document-content>" )
##### Now write to the ZIP archive ####
shutil.copy2( runningPath + "/6_2.ODS", theName) # 6_2 includes plots, 5_1 does not.
with zipfile.ZipFile(theName, "a") as z:
z.write( runningPath + "/content_local.xml", "content.xml", zipfile.ZIP_DEFLATED )
z.write( runningPath + "/content_local_obj1.xml", "Object 1/content.xml", zipfile.ZIP_DEFLATED )
z.write( runningPath + "/content_local_obj2.xml", "Object 2/content.xml", zipfile.ZIP_DEFLATED )
z.write( runningPath + "/content_local_obj3.xml", "Object 3/content.xml", zipfile.ZIP_DEFLATED )
z.write( runningPath + "/content_local_obj4.xml", "Object 4/content.xml", zipfile.ZIP_DEFLATED )
z.close()
os.remove( runningPath + "/content_local.xml" )
os.remove( runningPath + "/content_local_obj1.xml" )
os.remove( runningPath + "/content_local_obj2.xml" )
os.remove( runningPath + "/content_local_obj3.xml" )
os.remove( runningPath + "/content_local_obj4.xml" )
######################################################################
## START MAIN PROGRAM ##
######################################################################
def usage():
print "-h help"
print "-o output"
print "-d time delta to use (if omitted, calculate automatically)"
def main():
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "ho:d:", ["help", "output=", "delta=", "diff="])
except getopt.GetoptError as err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
Output = None
Diff = float('nan')
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-o", "--output"):
Output = a
elif o in ("-d", "--delta", "--diff"):
Diff = float(a)
else:
assert False, "unhandled option"
if (Output == None):
# Have no output. Fails.
sys.exit(4)
if (len(args) != 2):
# Wrong number of args. Fails.
sys.exit(4)
thePath = args[0]
if args[0].upper().endswith('.CSV'):
input1 = os.path.abspath(args[0])
else:
sys.exit(3)
if args[1].upper().endswith('.CSV'):
input2 = os.path.abspath(args[1])
else:
sys.exit(3)
# Determine the text to show as representation of each input. This is either the filename,
# or the parent directory plus file name, depending on what the program determines.
if (os.path.dirname(input1) == os.path.dirname(input2)):
# If the directory is the same, don't bother showing it.
input1_text = os.path.basename(input1)
input2_text = os.path.basename(input2)
else:
if (args[0] == os.path.basename(input1)):
# If the input doesn't contain the directory, don't bother showing it
input1_text = args[0]
else:
# ... otherwise, show 1 level of the directory
input1_text = os.path.join(os.path.basename(os.path.dirname(input1)),os.path.basename(input1))
if (args[1] == os.path.basename(input2)):
# If the input doesn't contain the directory, don't bother showing it
input2_text = args[1]
else:
# ... otherwise, show 1 level of the directory
input2_text = os.path.join(os.path.basename(os.path.dirname(input2)),os.path.basename(input2))
if math.isnan(Diff):
correl(input1, input2, input1_text, input2_text, output=Output)
else:
correl(input1, input2, input1_text, input2_text, output=Output, diff=Diff)
if __name__=="__main__":
main()
| 47.559449
| 325
| 0.554605
| 5,166
| 38,000
| 4.042199
| 0.095432
| 0.118763
| 0.112633
| 0.123743
| 0.796475
| 0.765157
| 0.743176
| 0.722201
| 0.718657
| 0.711187
| 0
| 0.036194
| 0.237289
| 38,000
| 798
| 326
| 47.619048
| 0.684298
| 0.063342
| 0
| 0.537143
| 0
| 0.028571
| 0.388006
| 0.15501
| 0
| 0
| 0
| 0
| 0.001905
| 0
| null | null | 0.001905
| 0.019048
| null | null | 0.015238
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4176c5f533f8633531f10c7f3c52202e1e27c414
| 41
|
py
|
Python
|
app/wakkerdam/__init__.py
|
mofferthond/flask-base
|
9f463d1a665b7c3b0466ff14430b4abecf1079d4
|
[
"MIT"
] | null | null | null |
app/wakkerdam/__init__.py
|
mofferthond/flask-base
|
9f463d1a665b7c3b0466ff14430b4abecf1079d4
|
[
"MIT"
] | null | null | null |
app/wakkerdam/__init__.py
|
mofferthond/flask-base
|
9f463d1a665b7c3b0466ff14430b4abecf1079d4
|
[
"MIT"
] | null | null | null |
from app.wakkerdam.views import wakkerdam
| 41
| 41
| 0.878049
| 6
| 41
| 6
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4185ce815f8dfa14376e6a225fd603f542e8c076
| 34
|
py
|
Python
|
plugins/share_post/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 13
|
2020-01-27T09:02:25.000Z
|
2022-01-20T07:45:26.000Z
|
plugins/share_post/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 29
|
2020-03-22T06:57:57.000Z
|
2022-01-24T22:46:42.000Z
|
plugins/share_post/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 6
|
2020-07-10T00:13:30.000Z
|
2022-01-26T08:22:33.000Z
|
from .share_post import * # noqa
| 17
| 33
| 0.705882
| 5
| 34
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 34
| 1
| 34
| 34
| 0.851852
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
41985f623c3e94929ed152014af0e6125cd6984d
| 35
|
py
|
Python
|
prosemirror/schema/list/__init__.py
|
p7g/prosemirror-py
|
ac22f3f93daff7dde896f797eb856890b65a3e46
|
[
"BSD-3-Clause"
] | 18
|
2019-06-19T04:38:45.000Z
|
2020-11-28T03:40:03.000Z
|
prosemirror/schema/list/__init__.py
|
p7g/prosemirror-py
|
ac22f3f93daff7dde896f797eb856890b65a3e46
|
[
"BSD-3-Clause"
] | 115
|
2019-06-19T04:52:00.000Z
|
2020-12-18T10:39:36.000Z
|
prosemirror/schema/list/__init__.py
|
p7g/prosemirror-py
|
ac22f3f93daff7dde896f797eb856890b65a3e46
|
[
"BSD-3-Clause"
] | 2
|
2020-06-03T16:48:02.000Z
|
2020-12-14T16:33:41.000Z
|
from .schema_list import * # noqa
| 17.5
| 34
| 0.714286
| 5
| 35
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 35
| 1
| 35
| 35
| 0.857143
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
419bf390a3579c1c5382fe57283e2c3543e13272
| 36
|
py
|
Python
|
backend/api/db/schemas/users.py
|
kkevinn114/Yacht
|
b354290501b24dc2220aa9562cfcf1725bff2fdf
|
[
"MIT"
] | 1
|
2020-10-23T18:52:17.000Z
|
2020-10-23T18:52:17.000Z
|
backend/api/db/schemas/users.py
|
ptTrR/Yacht
|
396a59f7a1b25e96c52c33cc7b0986f2d8dedb1c
|
[
"MIT"
] | null | null | null |
backend/api/db/schemas/users.py
|
ptTrR/Yacht
|
396a59f7a1b25e96c52c33cc7b0986f2d8dedb1c
|
[
"MIT"
] | null | null | null |
from fastapi_users import models
| 7.2
| 32
| 0.805556
| 5
| 36
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.194444
| 36
| 4
| 33
| 9
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
41a4003aad2cf0f40e0b4d48ec2bdb82d31bcdad
| 121
|
py
|
Python
|
teste.py
|
j0nathan-calist0/Aula-18_03
|
fdb70f961531e4f1dfc2bcfea53d37527c997770
|
[
"Apache-2.0"
] | null | null | null |
teste.py
|
j0nathan-calist0/Aula-18_03
|
fdb70f961531e4f1dfc2bcfea53d37527c997770
|
[
"Apache-2.0"
] | null | null | null |
teste.py
|
j0nathan-calist0/Aula-18_03
|
fdb70f961531e4f1dfc2bcfea53d37527c997770
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from principal import somar
from principal import subtrair
def test_somar():
assert somar (2,4)==6
| 20.166667
| 30
| 0.743802
| 18
| 121
| 4.944444
| 0.666667
| 0.292135
| 0.426966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030612
| 0.190083
| 121
| 6
| 31
| 20.166667
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| true
| 0
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
68eef5d5d55d826d566cbd739e745eb8f70fa496
| 206
|
py
|
Python
|
navmenu/io/__init__.py
|
rashidsh/navmenu
|
ec67b820462cc102417e214cd74eb7b1b97ad1f1
|
[
"MIT"
] | null | null | null |
navmenu/io/__init__.py
|
rashidsh/navmenu
|
ec67b820462cc102417e214cd74eb7b1b97ad1f1
|
[
"MIT"
] | null | null | null |
navmenu/io/__init__.py
|
rashidsh/navmenu
|
ec67b820462cc102417e214cd74eb7b1b97ad1f1
|
[
"MIT"
] | null | null | null |
from navmenu.io.base import BaseIO
from navmenu.io.console import ConsoleIO
from navmenu.io.telegram import TelegramIO
from navmenu.io.vk import VKIO
__all__ = 'BaseIO', 'ConsoleIO', 'TelegramIO', 'VKIO',
| 29.428571
| 54
| 0.786408
| 29
| 206
| 5.448276
| 0.448276
| 0.278481
| 0.329114
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11165
| 206
| 6
| 55
| 34.333333
| 0.863388
| 0
| 0
| 0
| 0
| 0
| 0.140777
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
68f2fe93b25bfa63e997898c7873e060907977c8
| 226
|
py
|
Python
|
package/awesome_panel/database/__init__.py
|
mycarta/awesome-panel
|
dae17d11f686daaedd48b8e74ac4307c89e2b031
|
[
"Apache-2.0"
] | 1
|
2020-05-08T21:44:37.000Z
|
2020-05-08T21:44:37.000Z
|
package/awesome_panel/database/__init__.py
|
mycarta/awesome-panel
|
dae17d11f686daaedd48b8e74ac4307c89e2b031
|
[
"Apache-2.0"
] | null | null | null |
package/awesome_panel/database/__init__.py
|
mycarta/awesome-panel
|
dae17d11f686daaedd48b8e74ac4307c89e2b031
|
[
"Apache-2.0"
] | null | null | null |
"""Imports to be exposed to the user of the package are listed here"""
from awesome_panel.database.authors import AUTHORS
from awesome_panel.database.resources import RESOURCES
from awesome_panel.database.tags import TAGS
| 45.2
| 71
| 0.818584
| 34
| 226
| 5.352941
| 0.558824
| 0.181319
| 0.263736
| 0.395604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128319
| 226
| 4
| 72
| 56.5
| 0.923858
| 0.283186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ec34627f156e83b3561bb7af1ed9a09033caac07
| 145
|
py
|
Python
|
hexa/catalog/management/commands/sync_datasources_worker.py
|
qgerome/openhexa-app
|
8c9377b2ad972121d8e9575f5d52420212b52ed4
|
[
"MIT"
] | 4
|
2021-07-19T12:53:21.000Z
|
2022-01-26T17:45:02.000Z
|
hexa/catalog/management/commands/sync_datasources_worker.py
|
qgerome/openhexa-app
|
8c9377b2ad972121d8e9575f5d52420212b52ed4
|
[
"MIT"
] | 20
|
2021-05-17T12:27:06.000Z
|
2022-03-30T11:35:26.000Z
|
hexa/catalog/management/commands/sync_datasources_worker.py
|
qgerome/openhexa-app
|
8c9377b2ad972121d8e9575f5d52420212b52ed4
|
[
"MIT"
] | 2
|
2021-09-07T04:19:59.000Z
|
2022-02-08T15:33:29.000Z
|
from dpq.commands import Worker
from hexa.catalog.queue import datasource_sync_queue
class Command(Worker):
queue = datasource_sync_queue
| 18.125
| 52
| 0.813793
| 20
| 145
| 5.7
| 0.6
| 0.245614
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 145
| 7
| 53
| 20.714286
| 0.912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6b760839f7a3ed2433a5fa321df2e001c61bfc99
| 103
|
py
|
Python
|
web-server/model/__init__.py
|
sanfengliao/DeepNavi
|
dc405ac0010075c2eea63083528db7cb765ad161
|
[
"Apache-2.0"
] | null | null | null |
web-server/model/__init__.py
|
sanfengliao/DeepNavi
|
dc405ac0010075c2eea63083528db7cb765ad161
|
[
"Apache-2.0"
] | null | null | null |
web-server/model/__init__.py
|
sanfengliao/DeepNavi
|
dc405ac0010075c2eea63083528db7cb765ad161
|
[
"Apache-2.0"
] | null | null | null |
from .map import *
from .edge import *
from .point import *
from .basic_pb2 import *
from .loc import *
| 20.6
| 24
| 0.718447
| 16
| 103
| 4.5625
| 0.5
| 0.547945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011905
| 0.184466
| 103
| 5
| 25
| 20.6
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6bb2e813c09a7126a6db693aaaf9dfaeab9e0033
| 182
|
py
|
Python
|
src/python/pyllars/cppparser/generation/clang/tranlation_unit.py
|
nak/pyllars
|
b4b3b131c61e6ba6a916df37129269f91ad1cc89
|
[
"Apache-2.0"
] | 2
|
2015-12-20T06:19:11.000Z
|
2020-07-28T04:17:57.000Z
|
src/python/pyllars/cppparser/generation/clang/tranlation_unit.py
|
nak/pyllars
|
b4b3b131c61e6ba6a916df37129269f91ad1cc89
|
[
"Apache-2.0"
] | null | null | null |
src/python/pyllars/cppparser/generation/clang/tranlation_unit.py
|
nak/pyllars
|
b4b3b131c61e6ba6a916df37129269f91ad1cc89
|
[
"Apache-2.0"
] | null | null | null |
from pyllars.cppparser.parser.clang_translator import NodeType
from .generator import Generator
class TranslationUnitDeclGenerator(Generator):
def generate(self):
pass
| 22.75
| 62
| 0.796703
| 19
| 182
| 7.578947
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148352
| 182
| 8
| 63
| 22.75
| 0.929032
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
6bd5b898b24cdf3f8f87d646b9e714601420f9eb
| 166
|
py
|
Python
|
django_sendgrid_tracking/signals.py
|
MattFanto/django-sendgrid-tracking
|
9438a8146a6522654bbd9e56d98555ab1b5374c6
|
[
"MIT"
] | 5
|
2020-08-29T19:00:10.000Z
|
2020-10-20T00:11:27.000Z
|
django_sendgrid_tracking/signals.py
|
MattFanto/django-sendgrid-tracking
|
9438a8146a6522654bbd9e56d98555ab1b5374c6
|
[
"MIT"
] | 3
|
2020-08-30T10:32:59.000Z
|
2020-12-17T23:08:12.000Z
|
django_sendgrid_tracking/signals.py
|
MattFanto/django-sendgrid-tracking
|
9438a8146a6522654bbd9e56d98555ab1b5374c6
|
[
"MIT"
] | null | null | null |
from sendgrid_backend.signals import sendgrid_email_sent
from django_sendgrid_tracking.mail import create_send_email
sendgrid_email_sent.connect(create_send_email)
| 27.666667
| 59
| 0.903614
| 24
| 166
| 5.791667
| 0.541667
| 0.18705
| 0.244604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066265
| 166
| 5
| 60
| 33.2
| 0.896774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d41e700a3233421c0d38f8e709a48805e6430de6
| 1,406
|
py
|
Python
|
tests/conftest.py
|
Murthy10/pyGeoTile
|
c744e540ba698fbe0d822616a62702918d24f71e
|
[
"MIT"
] | 93
|
2017-04-24T10:49:20.000Z
|
2022-03-30T00:12:09.000Z
|
tests/conftest.py
|
Murthy10/pyGeoTile
|
c744e540ba698fbe0d822616a62702918d24f71e
|
[
"MIT"
] | 12
|
2017-04-24T09:40:54.000Z
|
2021-12-09T16:26:19.000Z
|
tests/conftest.py
|
Murthy10/pyGeoTile
|
c744e540ba698fbe0d822616a62702918d24f71e
|
[
"MIT"
] | 9
|
2017-11-14T08:16:02.000Z
|
2021-03-07T13:23:29.000Z
|
import pytest
'''
Chicago, IL
LatLng: (41.85, -87.64999999999998)
Zoom level: 19
World Coordinate: (65.67111111111113, 95.17492654697409)
Pixel Coordinate: (34430575, 49899071)
Tile Coordinate: (134494, 194918)
'''
@pytest.fixture(scope="session", autouse=True)
def chicago_latitude_longitude():
return 41.85, -87.65
@pytest.fixture(scope="session", autouse=True)
def chicago_zoom():
return 19
@pytest.fixture(scope="session", autouse=True)
def chicago_pixel():
return 34430575, 49899071
@pytest.fixture(scope="session", autouse=True)
def chicago_meters():
return -9757148.442088600, 5138517.444985110
@pytest.fixture(scope="session", autouse=True)
def chicago_pixel_bounds():
return (34430464, 49899264), (34430720, 49899008)
@pytest.fixture(scope="session", autouse=True)
def chicago_meter_bounds():
return (-9757186.660602748, 5138479.226470973), (-9757110.223574463, 5138555.663499258)
@pytest.fixture(scope="session", autouse=True)
def chicago_latitude_longitude_bounds():
return (41.8496161693754, -87.65029907226562), (41.85012764855732, -87.64961242675781)
@pytest.fixture(scope="session", autouse=True)
def chicago_google():
return 134494, 194918
@pytest.fixture(scope="session", autouse=True)
def chicago_tms():
return 134494, 329369
@pytest.fixture(scope="session", autouse=True)
def chicago_quad_tree():
return '0302222310303211330'
| 23.04918
| 91
| 0.748222
| 168
| 1,406
| 6.166667
| 0.369048
| 0.125483
| 0.173745
| 0.241313
| 0.509653
| 0.509653
| 0.509653
| 0.509653
| 0.287645
| 0.189189
| 0
| 0.274258
| 0.113087
| 1,406
| 60
| 92
| 23.433333
| 0.556536
| 0
| 0
| 0.322581
| 0
| 0
| 0.073798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.322581
| true
| 0
| 0.032258
| 0.322581
| 0.677419
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d4706d698eb0cb0a37fd98c2beeccef0f36632bc
| 44
|
py
|
Python
|
discord/ext/voice_recv/common/__init__.py
|
schlopp/Novus
|
5f468c1a438a6f38dff7eea8b7741fab93897e99
|
[
"MIT"
] | 61
|
2021-08-30T05:30:31.000Z
|
2022-03-24T11:24:38.000Z
|
discord/ext/voice_recv/common/__init__.py
|
schlopp/Novus
|
5f468c1a438a6f38dff7eea8b7741fab93897e99
|
[
"MIT"
] | 30
|
2021-08-31T10:16:42.000Z
|
2022-03-09T22:53:15.000Z
|
discord/ext/voice_recv/common/__init__.py
|
schlopp/Novus
|
5f468c1a438a6f38dff7eea8b7741fab93897e99
|
[
"MIT"
] | 46
|
2018-06-27T15:05:33.000Z
|
2022-03-21T16:58:23.000Z
|
# -*- coding: utf-8 -*-
from .rtp import *
| 11
| 23
| 0.522727
| 6
| 44
| 3.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.227273
| 44
| 3
| 24
| 14.666667
| 0.647059
| 0.477273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2e0f5737550b1b2a5137f4593753db60ebb38afe
| 1,966
|
py
|
Python
|
web/pipeline/migrations/0064_auto_20200826_2058.py
|
stevenstuber/CIT
|
8c485e72084c06da6db45da1cb402bac26411ec2
|
[
"Apache-2.0"
] | 10
|
2020-11-12T15:13:40.000Z
|
2022-03-05T22:33:08.000Z
|
web/pipeline/migrations/0064_auto_20200826_2058.py
|
stevenstuber/CIT
|
8c485e72084c06da6db45da1cb402bac26411ec2
|
[
"Apache-2.0"
] | 28
|
2020-07-17T16:33:55.000Z
|
2022-03-21T16:24:25.000Z
|
web/pipeline/migrations/0064_auto_20200826_2058.py
|
stevenstuber/CIT
|
8c485e72084c06da6db45da1cb402bac26411ec2
|
[
"Apache-2.0"
] | 5
|
2020-11-02T23:39:53.000Z
|
2022-03-01T19:09:45.000Z
|
# Generated by Django 2.2.13 on 2020-08-26 20:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pipeline', '0063_merge_20200826_2021'),
]
operations = [
migrations.RenameField(
model_name='censussubdivision',
old_name='households_owner_spending_30_pct_income',
new_name='households_owner_pct_spending_30_pct_income',
),
migrations.RenameField(
model_name='censussubdivision',
old_name='households_tenant_spending_30_pct_income',
new_name='households_tenant_pct_spending_30_pct_income',
),
migrations.AddField(
model_name='censussubdivision',
name='households_owner_count_mortgage',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='households_owner_count_spending_30_pct_income',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='households_tenant_count_spending_30_pct_income',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='households_tenant_count_subsidized_housing',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='pop_count_0_14',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='pop_count_14_65',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='pop_count_65',
field=models.IntegerField(null=True),
),
]
| 33.322034
| 68
| 0.618006
| 184
| 1,966
| 6.25
| 0.277174
| 0.070435
| 0.203478
| 0.164348
| 0.821739
| 0.821739
| 0.753043
| 0.690435
| 0.57913
| 0.515652
| 0
| 0.037884
| 0.288403
| 1,966
| 58
| 69
| 33.896552
| 0.784132
| 0.023398
| 0
| 0.653846
| 1
| 0
| 0.289885
| 0.184567
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2e450bd05243b1b789ff418a19c0fa31b4c7692e
| 16
|
py
|
Python
|
first.py
|
FilipCvetko/Testingrepo
|
549d94f30688b4f5285d3439018e386ca80d9ac7
|
[
"MIT"
] | null | null | null |
first.py
|
FilipCvetko/Testingrepo
|
549d94f30688b4f5285d3439018e386ca80d9ac7
|
[
"MIT"
] | null | null | null |
first.py
|
FilipCvetko/Testingrepo
|
549d94f30688b4f5285d3439018e386ca80d9ac7
|
[
"MIT"
] | null | null | null |
print("H222I.")
| 8
| 15
| 0.625
| 2
| 16
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.0625
| 16
| 1
| 16
| 16
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
cf0cbe64671b340adb5716534ec48ec7e1d078e2
| 48
|
py
|
Python
|
config/__init__.py
|
kunal-sanghvi/flask-app
|
10182024bfc298fd055e4a75ce73849da30003f7
|
[
"MIT"
] | null | null | null |
config/__init__.py
|
kunal-sanghvi/flask-app
|
10182024bfc298fd055e4a75ce73849da30003f7
|
[
"MIT"
] | null | null | null |
config/__init__.py
|
kunal-sanghvi/flask-app
|
10182024bfc298fd055e4a75ce73849da30003f7
|
[
"MIT"
] | null | null | null |
from .settings import *
from .constants import *
| 24
| 24
| 0.770833
| 6
| 48
| 6.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 2
| 24
| 24
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cf0e075691493027f09c6c36b8cb22bb0a0c72e2
| 3,936
|
py
|
Python
|
tests/data/token_indexers/elmo_indexer_test.py
|
unendin/allennlp
|
0dcbaea6dbc6cc43e24a3564d6d37f8a1421484c
|
[
"Apache-2.0"
] | 1
|
2018-06-14T10:11:20.000Z
|
2018-06-14T10:11:20.000Z
|
tests/data/token_indexers/elmo_indexer_test.py
|
unendin/allennlp
|
0dcbaea6dbc6cc43e24a3564d6d37f8a1421484c
|
[
"Apache-2.0"
] | 1
|
2018-07-02T18:19:41.000Z
|
2018-07-02T19:37:31.000Z
|
tests/data/token_indexers/elmo_indexer_test.py
|
unendin/allennlp
|
0dcbaea6dbc6cc43e24a3564d6d37f8a1421484c
|
[
"Apache-2.0"
] | 1
|
2022-03-27T19:45:13.000Z
|
2022-03-27T19:45:13.000Z
|
# pylint: disable=no-self-use
from allennlp.common.testing import AllenNlpTestCase
from allennlp.data import Token, Vocabulary
from allennlp.data.token_indexers import ELMoTokenCharactersIndexer
class TestELMoTokenCharactersIndexer(AllenNlpTestCase):
def test_bos_to_char_ids(self):
indexer = ELMoTokenCharactersIndexer()
indices = indexer.token_to_indices(Token('<S>'), Vocabulary())
expected_indices = [259, 257, 260, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261]
assert indices == expected_indices
def test_eos_to_char_ids(self):
indexer = ELMoTokenCharactersIndexer()
indices = indexer.token_to_indices(Token('</S>'), Vocabulary())
expected_indices = [259, 258, 260, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261]
assert indices == expected_indices
def test_unicode_to_char_ids(self):
indexer = ELMoTokenCharactersIndexer()
indices = indexer.token_to_indices(Token(chr(256) + 't'), Vocabulary())
expected_indices = [259, 197, 129, 117, 260, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261]
assert indices == expected_indices
def test_elmo_as_array_produces_token_sequence(self): # pylint: disable=invalid-name
indexer = ELMoTokenCharactersIndexer()
indices = [
indexer.token_to_indices(Token(token), Vocabulary())
for token in ['Second', '.']
]
padded_tokens = indexer.pad_token_sequence(indices,
desired_num_tokens=3,
padding_lengths={})
expected_padded_tokens = [[259, 84, 102, 100, 112, 111, 101, 260, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261],
[259, 47, 260, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261, 261, 261, 261, 261,
261, 261, 261, 261, 261],
[0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0]]
assert padded_tokens == expected_padded_tokens
| 56.228571
| 88
| 0.455793
| 447
| 3,936
| 3.914989
| 0.152125
| 0.764571
| 1.121143
| 1.460571
| 0.694286
| 0.694286
| 0.694286
| 0.694286
| 0.656571
| 0.656571
| 0
| 0.359964
| 0.433943
| 3,936
| 69
| 89
| 57.043478
| 0.425494
| 0.014228
| 0
| 0.580645
| 0
| 0
| 0.003869
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 1
| 0.064516
| false
| 0
| 0.048387
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cf40c00a70de431ce34f646176599a64d698aa0f
| 15,067
|
py
|
Python
|
beneficiaries/beneficiaries/doctype/beneficiary_request/beneficiary_request.py
|
baidalala/beneficiaries
|
b7299e0a7da91e90c607e70d76994ec0aebae402
|
[
"MIT"
] | null | null | null |
beneficiaries/beneficiaries/doctype/beneficiary_request/beneficiary_request.py
|
baidalala/beneficiaries
|
b7299e0a7da91e90c607e70d76994ec0aebae402
|
[
"MIT"
] | null | null | null |
beneficiaries/beneficiaries/doctype/beneficiary_request/beneficiary_request.py
|
baidalala/beneficiaries
|
b7299e0a7da91e90c607e70d76994ec0aebae402
|
[
"MIT"
] | 1
|
2021-08-31T18:47:58.000Z
|
2021-08-31T18:47:58.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Baida and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import json
from frappe.model.naming import set_name_by_naming_series
from frappe import _, msgprint, throw
import frappe.defaults
from frappe.utils import flt, cint, cstr, today
from frappe.desk.reportview import build_match_conditions, get_filters_cond
from erpnext.utilities.transaction_base import TransactionBase
from erpnext.accounts.party import validate_party_accounts, get_dashboard_info, get_timeline_data # keep this
from frappe.contacts.address_and_contact import load_address_and_contact, delete_contact_and_address
from frappe.model.rename_doc import update_linked_doctypes
from frappe.model.mapper import get_mapped_doc
from frappe.model.document import Document
from datetime import datetime,date
from dateutil.relativedelta import relativedelta
from frappe.permissions import add_user_permission, remove_user_permission, \
set_user_permission_if_allowed, has_permission
from frappe.utils.password import update_password as _update_password
from frappe.utils import random_string
from frappe.utils.data import add_months
from frappe.utils import cint, cstr, formatdate, flt, getdate, nowdate, get_link_to_form
from erpnext.setup.doctype.item_group.item_group import get_item_group_defaults
from erpnext.stock import get_warehouse_account_map
from erpnext.assets.doctype.asset_category.asset_category import get_asset_category_account
from erpnext.accounts.utils import get_fiscal_year
class BeneficiaryRequest(Document):
def validate(self):
self.is_deserve()
# self.validate_values()
self.created_by = frappe.session.user
self.date_of_registration=date.today()
def validate_values(self):
if (self.number_of_needed_members_in_family > self.number_of_family) :
frappe.throw('عدد الافراد المعالين اكبر من عدد افراد الاسرة')
if (self.number_of_wives > self.number_of_family) :
frappe.throw('عدد الزوجات اكبر من عدد افراد الاسرة')
if (self.the_number_of_household_workers > self.number_of_family) :
frappe.throw('عدد الافراد العاملين في المنزل اكبر من عدد افراد الاسرة')
if ( self.the_number_of_professional_workers > self.number_of_family) :
frappe.throw('عدد الافراد العاملين اكبر من عدد افراد الاسرة')
if self.date_of_expired < self.date_of_issue:
frappe.throw('تاريخ انتهاء الهوية أقل من تاريخ اصدارها')
def get_max_number_of_members(self):
return frappe.db.sql("""select max(number_of_members) as members from `tabThe Base`""", as_dict=True)
def get_base(self):
max_member=self.get_max_number_of_members()[0].members
if self.number_of_needed_members_in_family:
if self.number_of_needed_members_in_family > int (max_member):
members=max_member
else:
members=self.number_of_needed_members_in_family
"""
Returns list of active beneficiary based on selected criteria
and for which type exists
"""
return frappe.db.sql("""select live_base as live_base,rent_base as rent_base,rent_in_year as rent_in_year,rent_in_five_year as rent_in_five_year
from `tabThe Base` where number_of_members= %s""",members, as_dict=True)
else:
return
def is_deserve(self):
check_is_deserve = self.get_base()
if not check_is_deserve:
return
fee_sum=0
for m in self.get("fees"):
m.fee_in_year=flt(m.fee_in_month * 12)
fee_sum +=m.fee_in_year
self.fee_total=fee_sum
obl_sum=0
for m in self.get("obligation"):
obl_sum +=m.amount
self.obligations_total=obl_sum
result = self.fee_total - self.obligations_total
if (self.territory=="Unaizah" or self.territory=="عنيزة") and (self.nationality=="Saudi" or self.nationality=="Syrian" or self.nationality=="سوري" or
self.nationality=="سعودي")and result <= check_is_deserve[0].live_base:
self.deserve_according_to_base=True
self.live_base=check_is_deserve[0].live_base
if self.home_type== "Rent":
self.rent_base=check_is_deserve[0].rent_base
else:
self.rent_base=0
self.rent_in_year=check_is_deserve[0].rent_in_year
self.rent_in_five_year=check_is_deserve[0].rent_in_five_year
elif (self.territory=="Unaizah" or self.territory=="عنيزة") and (self.nationality=="Saudi" or self.nationality=="Syrian" or self.nationality=="سوري" or
self.nationality=="سعودي" ) and result >= check_is_deserve[0].live_base and result <= check_is_deserve[0].rent_base:
self.deserve_according_to_base=True
self.live_base=0
if self.home_type== "Rent":
self.rent_base=check_is_deserve[0].rent_base
else:
self.rent_base=0
self.rent_in_year=check_is_deserve[0].rent_in_year
self.rent_in_five_year=check_is_deserve[0].rent_in_five_year
else:
self.deserve_according_to_base=False
self.live_base=0
self.rent_base=0
self.rent_in_five_year=0
self.rent_in_year=0
def add_beneficiary(self):
if self.employee==1:
beneficiary = frappe.new_doc('Beneficiary')
beneficiary.beneficiary_name = self.beneficiary_name
beneficiary.beneficiary_request = self.name
beneficiary.beneficiary_account=frappe.db.get_single_value('Beneficiary Settings', 'beneficiary_account')
beneficiary.marital_status = self.marital_status
beneficiary.nationality = self.nationality
beneficiary.territory=self.territory
beneficiary.address=self.address
beneficiary.gender=self.gender
beneficiary.phone=self.phone
beneficiary.mobile=self.mobile
beneficiary.email=self.email
beneficiary.beneficiary_state=self.beneficiary_state
beneficiary.owner=self.email
pwd=random_string(10)
beneficiary.default_password=pwd
beneficiary.id_type=self.id_type
beneficiary.the_number=self.the_number
beneficiary.date_of_issue=self.date_of_issue
beneficiary.date_of_expired=self.date_of_expired
for f in self.get("fees"):
beneficiary.append('fees', dict(fee_type=f.fee_type, fee_in_year=f.fee_in_year,fee_in_month=f.fee_in_month))
beneficiary.fee_total=self.fee_total
for ob in self.get("obligation"):
beneficiary.append('beneficiary_obligation', dict(beneficiary_obligation=ob.beneficiary_obligation,
obligation_to=ob.obligation_to,amount=ob.amount,number_of_pays=ob.number_of_pays,way_of_pay=ob.way_of_pay,reason_of_obligation=ob.reason_of_obligation,attach=ob.attach))
beneficiary.obligations_total=self.obligations_total
beneficiary.home_type=self.home_type
beneficiary.number_of_rooms=self.number_of_rooms
beneficiary.home_attach=self.home_type_attachment
beneficiary.home_state=self.state_of_home
beneficiary.number_of_family=self.number_of_family
beneficiary.number_of_wives=self.number_of_wives
beneficiary.number_of_needed_members_in_family=self.number_of_needed_members_in_family
beneficiary.the_number_of_professional_workers=self.the_number_of_professional_workers
beneficiary.the_number_of_household_workers=self.the_number_of_household_workers
beneficiary.number_of_unemployed_members=self.number_of_unemployed_members
beneficiary.beneficiary_notes=self.beneficiary_notes
beneficiary.deserve_according_to_base=self.deserve_according_to_base
beneficiary.live_base=self.live_base
beneficiary.rent_base=self.rent_base
beneficiary.rent_in_year=self.rent_in_year
beneficiary.rent_in_five_year=self.rent_in_five_year
for f in self.get("family_own"):
beneficiary.append('family_own', dict(own=f.own, note=f.note))
if not frappe.db.exists("Beneficiary", beneficiary.name):
beneficiary.insert()
# frappe.msgprint('Beneficiary Inserted Done :)')
# create contact from beneficiary
contact = frappe.new_doc('Contact')
contact.first_name = self.beneficiary_name
contact.email_id = self.email
contact.phone = self.phone
contact.mobile_no = self.mobile
contact.is_primary_contact = 1
contact.append('links', dict(link_doctype='Beneficiary', link_name=beneficiary.name))
if self.email:
contact.append('email_ids', dict(email_id=self.email, is_primary=1))
if self.phone:
contact.append('phone_nos', dict(phone=self.phone, is_primary_mobile_no=1))
contact.flags.ignore_permissions = self.flags.ignore_permissions
contact.autoname()
if not frappe.db.exists("Beneficiary", contact.name):
contact.insert()
# frappe.msgprint('Beneficiary contact Inserted Done :)')
# self.has_contact=1
# if self.has_contact==0:
# frappe.throw("Beneficiary doesn't add to contacts list",raise_exception)
# if self.has_contact==1:
user = frappe.get_doc({
"doctype": "User",
"first_name": self.beneficiary_name,
"email": self.email,
"language":"ar",
"user_type": "Website User",
"send_welcome_email": 1,
"role_profile_name":"Beneficiary"
}).insert(ignore_permissions = True)
frappe.get_doc("User", self.email).add_roles("Beneficiary")
_update_password(user=self.email, pwd=pwd, logout_all_sessions=0)
# user.new_password="1234"
# self.is_user=1
# if self.is_user==0:
# frappe.throw("Beneficiary doesn't add to Users list",raise_exception)
# if self.is_user==1 and self.has_contact==1:
userpermission = frappe.get_doc({
"doctype": "User Permission",
"user": user.email,
"for_value": beneficiary.name,
"allow": "Beneficiary",
"is_default":1,
"apply_to_all_doctypes":0,
"applicable_for":"Beneficiary"
}).insert()
# if frappe.db.exists("Beneficiary", beneficiary.name) and frappe.db.exists("Contact", contact.name) and frappe.db.exists("User", user.email) and frappe.db.exists("User Permission", userpermission.user):
self.inserted=True
# else:
# self.inserted=False
# self.has_user_permission=1
# if self.has_user_permission==0:
# frappe.throw("Beneficiary doesn't add to User Permission list",raise_exception)
@frappe.whitelist()
def set_multiple_request(names):
names = json.loads(names)
# frappe.msgprint(names)
for name in names:
req = frappe.get_doc("Beneficiary Request", name)
if not req.inserted:
add_beneficiary(req)
req.save()
else:
frappe.msgprint(req.beneficiary_name + "Already Beneficiary")
@frappe.whitelist()
def add_beneficiary(self):
if self.employee==1:
beneficiary = frappe.new_doc('Beneficiary')
beneficiary.beneficiary_name = self.beneficiary_name
beneficiary.beneficiary_request = self.name
beneficiary.beneficiary_account=frappe.db.get_single_value('Beneficiary Settings', 'beneficiary_account')
beneficiary.marital_status = self.marital_status
beneficiary.nationality = self.nationality
beneficiary.territory=self.territory
beneficiary.address=self.address
beneficiary.gender=self.gender
beneficiary.phone=self.phone
beneficiary.mobile=self.mobile
beneficiary.email=self.email
beneficiary.beneficiary_state=self.beneficiary_state
beneficiary.owner=self.email
pwd=random_string(10)
beneficiary.default_password=pwd
beneficiary.id_type=self.id_type
beneficiary.the_number=self.the_number
beneficiary.date_of_issue=self.date_of_issue
beneficiary.date_of_expired=self.date_of_expired
for f in self.get("fees"):
beneficiary.append('fees', dict(fee_type=f.fee_type, fee_in_year=f.fee_in_year,fee_in_month=f.fee_in_month))
beneficiary.fee_total=self.fee_total
for ob in self.get("obligation"):
beneficiary.append('beneficiary_obligation', dict(beneficiary_obligation=ob.beneficiary_obligation,
obligation_to=ob.obligation_to,amount=ob.amount,number_of_pays=ob.number_of_pays,way_of_pay=ob.way_of_pay,reason_of_obligation=ob.reason_of_obligation,attach=ob.attach))
beneficiary.obligations_total=self.obligations_total
beneficiary.home_type=self.home_type
beneficiary.number_of_rooms=self.number_of_rooms
beneficiary.home_attach=self.home_type_attachment
beneficiary.home_state=self.state_of_home
beneficiary.number_of_family=self.number_of_family
beneficiary.number_of_wives=self.number_of_wives
beneficiary.number_of_needed_members_in_family=self.number_of_needed_members_in_family
beneficiary.the_number_of_professional_workers=self.the_number_of_professional_workers
beneficiary.the_number_of_household_workers=self.the_number_of_household_workers
beneficiary.number_of_unemployed_members=self.number_of_unemployed_members
beneficiary.beneficiary_notes=self.beneficiary_notes
beneficiary.deserve_according_to_base=self.deserve_according_to_base
beneficiary.live_base=self.live_base
beneficiary.rent_base=self.rent_base
beneficiary.rent_in_year=self.rent_in_year
beneficiary.rent_in_five_year=self.rent_in_five_year
for f in self.get("family_own"):
beneficiary.append('family_own', dict(own=f.own, note=f.note))
if not frappe.db.exists("Beneficiary", beneficiary.name):
beneficiary.insert()
# frappe.msgprint('Beneficiary Inserted Done :)')
# create contact from beneficiary
contact = frappe.new_doc('Contact')
contact.first_name = self.beneficiary_name
contact.email_id = self.email
contact.phone = self.phone
contact.mobile_no = self.mobile
contact.is_primary_contact = 1
contact.append('links', dict(link_doctype='Beneficiary', link_name=beneficiary.name))
if self.email:
contact.append('email_ids', dict(email_id=self.email, is_primary=1))
if self.phone:
contact.append('phone_nos', dict(phone=self.phone, is_primary_mobile_no=1))
contact.flags.ignore_permissions = self.flags.ignore_permissions
contact.autoname()
if not frappe.db.exists("Beneficiary", contact.name):
contact.insert()
# frappe.msgprint('Beneficiary contact Inserted Done :)')
# self.has_contact=1
# if self.has_contact==0:
# frappe.throw("Beneficiary doesn't add to contacts list",raise_exception)
# if self.has_contact==1:
user = frappe.get_doc({
"doctype": "User",
"first_name": self.beneficiary_name,
"email": self.email,
"user_type": "Website User",
"send_welcome_email": 1,
"role_profile_name":"Beneficiary"
}).insert(ignore_permissions = True)
frappe.get_doc("User", self.email).add_roles("Beneficiary")
_update_password(user=self.email, pwd=pwd, logout_all_sessions=0)
# user.new_password="1234"
# self.is_user=1
# if self.is_user==0:
# frappe.throw("Beneficiary doesn't add to Users list",raise_exception)
# if self.is_user==1 and self.has_contact==1:
userpermission = frappe.get_doc({
"doctype": "User Permission",
"user": user.email,
"for_value": beneficiary.name,
"allow": "Beneficiary",
"is_default":1,
"apply_to_all_doctypes":0,
"applicable_for":"Beneficiary"
}).insert()
# if frappe.db.exists("Beneficiary", beneficiary.name) and frappe.db.exists("Contact", contact.name) and frappe.db.exists("User", user.email) and frappe.db.exists("User Permission", userpermission.user):
self.inserted=True
# else:
# self.inserted=False
# self.has_user_permission=1
# if self.has_user_permission==0:
# frappe.throw("Beneficiary doesn't add to User Permission list",raise_exception)
| 41.736842
| 208
| 0.773545
| 2,161
| 15,067
| 5.105969
| 0.127256
| 0.034076
| 0.020663
| 0.013957
| 0.783669
| 0.771252
| 0.761465
| 0.745695
| 0.73636
| 0.732645
| 0
| 0.00554
| 0.12544
| 15,067
| 360
| 209
| 41.852778
| 0.831828
| 0.120462
| 0
| 0.701818
| 0
| 0
| 0.103833
| 0.012216
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029091
| false
| 0.018182
| 0.090909
| 0.003636
| 0.138182
| 0.007273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d85a02a8cdbddcf92896a538d73359101bcce28a
| 612
|
py
|
Python
|
dsaii/forms.py
|
khushi0205/DSAII
|
0a9c2b52346d8c71821854d8b84d52a92bf87449
|
[
"MIT"
] | null | null | null |
dsaii/forms.py
|
khushi0205/DSAII
|
0a9c2b52346d8c71821854d8b84d52a92bf87449
|
[
"MIT"
] | 1
|
2022-03-12T01:05:20.000Z
|
2022-03-12T01:05:20.000Z
|
dsaii/forms.py
|
khushi0205/DSAII
|
0a9c2b52346d8c71821854d8b84d52a92bf87449
|
[
"MIT"
] | null | null | null |
from django import forms
from .models import Comments
class CommentForm(forms.ModelForm):
class Meta:
model = Comments
fields = ('name', 'body')
widgets = {
'name': forms.TextInput(attrs={'class' : 'form-control'}),
'body' : forms.Textarea(attrs={'class': 'form-control' })
}
class CF(forms.ModelForm):
class Meta:
model = Comments
fields = ('name', 'body')
widgets = {
'name': forms.TextInput(attrs={'class' : 'form-control'}),
'body' : forms.Textarea(attrs={'class': 'form-control' })
}
| 27.818182
| 70
| 0.54902
| 60
| 612
| 5.6
| 0.35
| 0.119048
| 0.166667
| 0.25
| 0.797619
| 0.797619
| 0.797619
| 0.797619
| 0.797619
| 0.797619
| 0
| 0
| 0.292484
| 612
| 22
| 71
| 27.818182
| 0.775982
| 0
| 0
| 0.666667
| 0
| 0
| 0.163132
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d88545224b176504b187f479a2ec07c0c5b512d9
| 430
|
py
|
Python
|
src/audio_utils/mel/__init__.py
|
stefantaubert/audio-utils
|
8a9a51ff7fc773e54037c356bcc6c7eaa9b00312
|
[
"MIT"
] | null | null | null |
src/audio_utils/mel/__init__.py
|
stefantaubert/audio-utils
|
8a9a51ff7fc773e54037c356bcc6c7eaa9b00312
|
[
"MIT"
] | null | null | null |
src/audio_utils/mel/__init__.py
|
stefantaubert/audio-utils
|
8a9a51ff7fc773e54037c356bcc6c7eaa9b00312
|
[
"MIT"
] | null | null | null |
from audio_utils.mel.main import (get_wav_tensor_segment, mel_to_numpy,
wav_to_float32_tensor)
from audio_utils.mel.mel_plot import (concatenate_mels, plot_melspec,
plot_melspec_np)
from audio_utils.mel.stft import STFT
from audio_utils.mel.taco_stft import STFTHParams, TacotronSTFT, TSTFTHParams
from audio_utils.mel.msd import align_mels_with_dtw, get_msd
| 61.428571
| 77
| 0.723256
| 60
| 430
| 4.8
| 0.45
| 0.15625
| 0.243056
| 0.295139
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006024
| 0.227907
| 430
| 7
| 78
| 61.428571
| 0.861446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d8856adba30073b8c357e336409f2c4153d07693
| 11,589
|
py
|
Python
|
examples_and_tutorial/dD_examples.py
|
timotheehornek/sparsetorch
|
212c4e38dc352af15eea9e72f011c974fd43eb53
|
[
"MIT"
] | null | null | null |
examples_and_tutorial/dD_examples.py
|
timotheehornek/sparsetorch
|
212c4e38dc352af15eea9e72f011c974fd43eb53
|
[
"MIT"
] | null | null | null |
examples_and_tutorial/dD_examples.py
|
timotheehornek/sparsetorch
|
212c4e38dc352af15eea9e72f011c974fd43eb53
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import torch
from sparsetorch.dD_basis_functions import Tensorprod, Elemprod, Sparse
from sparsetorch.oneD_basis_functions import Hat, Gauss, Fourier, Chebyshev, Legendre
from sparsetorch.plotter import plot_3D_all
from sparsetorch.utils import get_equidist_coord, get_rand_coord
from sparsetorch.solver import Model, Solver
def f_dD(x):
"""Simple example function defined on interval `[0, 1]`
Parameters
----------
x : torch.Tensor
coordinates for evaluation
Returns
-------
torch.Tensor
function evaluations
"""
result = 4 * x[0] * (x[0] - 1)
for x_i in x[1:]:
result *= 4 * x_i * (x_i - 1)
result *= torch.exp(2 * torch.prod(x, dim=0))
return result
def g_dD(x):
"""Complicated example function defined on interval `[0, 6]`
Parameters
----------
x : torch.Tensor
coordinates for evaluation
Returns
-------
torch.Tensor
function evaluations
"""
result = x[0] * (x[0] - 6) / 9
for x_i in x[1:]:
result *= x_i * (x_i - 6) / 9
result *= torch.exp(torch.sin(torch.prod(x, dim=0)))
return result
def step_dD(x):
"""Another example function defined on interval `[0, 1]`, discontinuous
Parameters
----------
x : torch.Tensor
coordinates for evaluation
Returns
-------
torch.Tensor
function evaluations
"""
result = 1.0
for x_i in x:
result *= torch.round(2 * x_i)
return result
def example_1():
"""Example with same equidistant basis functions in 2D and tensorprod combination"""
#############
# settings: #
#############
# basis function settings
basis = Gauss # Hat or Gauss
bf_num = 30 # number of basis functions in one dimension
BF_dD = Tensorprod # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = f_dD(input)
#############
# create 1D basis with equidistant basis functions
bf_1D = basis.equidist(bf_num)
bfs_1D = [bf_1D] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, f_dD, "Example 1")
def example_2():
"""Example with different equidistant basis functions in 2D,
tensorprod combination and different number of basis functions
in different dimensions"""
#############
# settings: #
#############
# basis function settings
basis_x = Hat # Hat or Gauss
basis_y = Gauss # Hat or Gauss
bf_num_x = 7 # number of basis functions in x direction
bf_num_y = 3 # number of basis functions in y direction
BF_dD = Tensorprod # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num_x = 50 # number of function evaluations in x direction
eval_num_y = 60 # number of function evaluations in y direction
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.tensor([eval_num_x, eval_num_y]))
# function evaluations
target = f_dD(input)
#############
# create 1D basis with equidistant basis functions
bf_1D_x = basis_x.equidist(bf_num_x)
bf_1D_y = basis_y.equidist(bf_num_y)
bfs_1D = [bf_1D_x, bf_1D_y]
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, f_dD, "Example 2")
def example_3():
"""Example with custom basis functions and elemprod combination"""
#############
# settings: #
#############
# basis function settings
basis_x = Hat # Hat or Gauss
basis_y = Gauss # Hat or Gauss
bf_num = 50 # number of basis functions
torch.manual_seed(332)
# position and width parameters of basis functions
mu_x = torch.rand(bf_num)
h_x = torch.rand(bf_num)
mu_y = torch.rand(bf_num)
h_y = torch.rand(bf_num)
BF_dD = Elemprod # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 60 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = f_dD(input)
#############
# create 1D basis with equidistant basis functions
bf_1D_x = basis_x(mu_x, h_x)
bf_1D_y = basis_y(mu_y, h_y)
bfs_1D = [bf_1D_x, bf_1D_y]
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, f_dD, "Example 3")
def example_4():
"""Example with same hierarchical basis functions in 2D, sparse combination
and approximated function nonzero on boundary
"""
#
#############
# settings: #
#############
# basis function settings
basis = Hat # Hat or Gauss
level = 5 # highest level of basis functions in one dimension
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = step_dD(input)
#############
# create 1D basis with hierarchical basis functions
bf_1D = basis.hierarchical(level, boundary=True)
bfs_1D = [bf_1D] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, step_dD, "Example 4")
def example_5():
"""Example with hierarchical basis functions in 2D, sparse combination
and approximated function nonzero on boundary
"""
#############
# settings: #
#############
# basis function settings
basis = Hat # Hat or Gauss
level_x = 4 # highest level of basis functions in x direction
level_y = 5 # highest level of basis functions in y direction
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = step_dD(input)
#############
# create 1D basis with hierarchical basis functions
bf_1D_x = basis.hierarchical(level_x, boundary=True)
bf_1D_y = basis.hierarchical(level_y, boundary=True)
bfs_1D = [bf_1D_x, bf_1D_y]
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, step_dD, "Example 5")
def example_6():
"""Example with orthogonal basis functionsin 2D, sparse combination
and approximated function nonzero on boundary
"""
#
#############
# settings: #
#############
# basis function settings
basis = Chebyshev # Fourier, Chebyshev, or Legendre
n_max = 40 # maximum level of basis functions
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = step_dD(input)
#############
# create 1D basis with orthogonal basis functions
bfs_1D = [basis(n_max)] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, step_dD, "Example 6")
def example_7():
"""Example with challenging function, orthogonal basis functions,
sparse combination and approximated function nonzero on boundary
"""
#############
# settings: #
#############
# basis function settings
basis = Fourier # Fourier, Chebyshev, or Legendre
n_max = 16 # maximum level of basis functions
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), 6 * torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = g_dD(input)
#############
# create 1D basis with orthogonal basis functions
bfs_1D = [basis(n_max, a=0.0, b=6.0)] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares with regularization
solver.le()
# plot
plot_3D_all(
model,
g_dD,
"Example 7",
x_min=0,
x_max=6,
y_min=0,
y_max=6,
steps=2 * eval_num,
)
def example_8():
"""Example with challenging function, hierarchical basis functions,
sparse combination and approximated function nonzero on boundary
"""
#############
# settings: #
#############
# basis function settings
basis = Hat # Hat or Gauss
level = 8 # highest level of basis functions in one dimension
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 150 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), 6 * torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = g_dD(input)
# create 1D basis with hierarchical basis functions
bf_1D = basis.hierarchical(level, boundary=False, a=0, b=6)
bfs_1D = [bf_1D] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares with regularization
solver.le()
# plot
plot_3D_all(
model,
g_dD,
"Example 8",
x_min=0,
x_max=6,
y_min=0,
y_max=6,
steps=2 * eval_num,
)
if __name__ == "__main__":
example_1()
example_2()
example_3()
example_5()
example_6()
example_7()
example_8()
| 27.791367
| 88
| 0.624471
| 1,530
| 11,589
| 4.560784
| 0.095425
| 0.022929
| 0.020636
| 0.018343
| 0.821152
| 0.791917
| 0.746632
| 0.730582
| 0.702207
| 0.702207
| 0
| 0.023804
| 0.267754
| 11,589
| 417
| 89
| 27.791367
| 0.798492
| 0.416343
| 0
| 0.554945
| 0
| 0
| 0.013074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06044
| false
| 0
| 0.043956
| 0
| 0.120879
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d8ceb9eb1df51b67f5209566b941a43fe90517fb
| 19
|
py
|
Python
|
honeysnap/__init__.py
|
honeynet/honeysnap
|
9b5e9ab6b5557692b78efe788cdaf24404ddf1eb
|
[
"FSFAP"
] | 7
|
2016-06-30T14:19:27.000Z
|
2017-07-12T12:14:53.000Z
|
honeysnap/__init__.py
|
honeynet/honeysnap
|
9b5e9ab6b5557692b78efe788cdaf24404ddf1eb
|
[
"FSFAP"
] | null | null | null |
honeysnap/__init__.py
|
honeynet/honeysnap
|
9b5e9ab6b5557692b78efe788cdaf24404ddf1eb
|
[
"FSFAP"
] | 2
|
2017-02-03T19:46:28.000Z
|
2018-11-21T18:14:09.000Z
|
# $Id$
import main
| 6.333333
| 11
| 0.631579
| 3
| 19
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 19
| 2
| 12
| 9.5
| 0.8
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2b321c5300cf221dabcd0a81ef2faf52a069cc00
| 5,935
|
py
|
Python
|
tools/abi.py
|
raiden-network/raiden-wizard
|
faea0f3075b748b6a1e204518e84b0fd1950d5b5
|
[
"MIT"
] | 9
|
2020-05-27T12:19:29.000Z
|
2022-03-20T05:55:36.000Z
|
tools/abi.py
|
raiden-network/raiden-wizard
|
faea0f3075b748b6a1e204518e84b0fd1950d5b5
|
[
"MIT"
] | 178
|
2020-02-26T17:03:22.000Z
|
2021-12-28T14:21:00.000Z
|
tools/abi.py
|
raiden-network/raiden-wizard
|
faea0f3075b748b6a1e204518e84b0fd1950d5b5
|
[
"MIT"
] | 9
|
2020-03-30T13:35:28.000Z
|
2022-03-01T17:24:20.000Z
|
import json
UDC_ABI = json.loads(
'[{"inputs":[{"internalType":"address","name":"_token_address","type":"address"},{"internalType":"uint256","name":"_whole_balance_limit","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":false,"internalType":"uint256","name":"newBalance","type":"uint256"}],"name":"BalanceReduced","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"withdrawer","type":"address"},{"indexed":false,"internalType":"uint256","name":"plannedBalance","type":"uint256"}],"name":"WithdrawPlanned","type":"event"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"balances","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"contract_address","type":"address"}],"name":"contractExists","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"beneficiary","type":"address"},{"internalType":"uint256","name":"new_total_deposit","type":"uint256"}],"name":"deposit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"}],"name":"effectiveBalance","outputs":[{"internalType":"uint256","name":"remaining_balance","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_msc_address","type":"address"},{"internalType":"address","name":"_one_to_n_address","type":"address"}],"name":"init","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"msc_address","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"one_to_n_address","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"planWithdraw","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"token","outputs":[{"internalType":"contract Token","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"total_deposit","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"sender","type":"address"},{"internalType":"address","name":"receiver","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"success","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"whole_balance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"whole_balance_limit","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"withdraw_delay","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"withdraw_plans","outputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"uint256","name":"withdraw_block","type":"uint256"}],"stateMutability":"view","type":"function"}]'
)
ERC20_ABI = json.loads(
'[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"_from","type":"address"},{"indexed":true,"name":"_to","type":"address"},{"indexed":false,"name":"_value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"_owner","type":"address"},{"indexed":true,"name":"_spender","type":"address"},{"indexed":false,"name":"_value","type":"uint256"}],"name":"Approval","type":"event"}]'
)
| 539.545455
| 3,749
| 0.657119
| 599
| 5,935
| 6.437396
| 0.12187
| 0.079876
| 0.1014
| 0.13667
| 0.785529
| 0.715249
| 0.634855
| 0.524378
| 0.480809
| 0.432313
| 0
| 0.019794
| 0.004044
| 5,935
| 10
| 3,750
| 593.5
| 0.632549
| 0
| 0
| 0
| 0
| 0.285714
| 0.986689
| 0.986521
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9929fd9b7d310102378c54592da5654e2a18fd2e
| 695
|
py
|
Python
|
LED-control/software/scripts/setoff.py
|
jeremywrnr/life-of-the-party
|
b29310a1fcc31d5c0e8b93c18ab0fc91bb316613
|
[
"MIT"
] | 1
|
2015-06-29T22:28:58.000Z
|
2015-06-29T22:28:58.000Z
|
LED-control/software/scripts/setoff.py
|
jeremywrnr/life-of-the-party
|
b29310a1fcc31d5c0e8b93c18ab0fc91bb316613
|
[
"MIT"
] | null | null | null |
LED-control/software/scripts/setoff.py
|
jeremywrnr/life-of-the-party
|
b29310a1fcc31d5c0e8b93c18ab0fc91bb316613
|
[
"MIT"
] | null | null | null |
import liblo
import time
addresses = [liblo.Address("192.168.1.3","2222"),liblo.Address("192.168.1.4","2222"),liblo.Address("192.168.1.5","2222"),liblo.Address("192.168.1.6","2222"),liblo.Address("192.168.1.7","2222"),liblo.Address("192.168.1.8","2222"),liblo.Address("192.168.1.9","2222"),liblo.Address("192.168.1.10","2222"),liblo.Address("192.168.1.11","2222"),liblo.Address("192.168.1.12","2222"),liblo.Address("192.168.1.13","2222"),liblo.Address("192.168.1.14","2222"),liblo.Address("192.168.1.15","2222"),liblo.Address("192.168.1.16","2222"),liblo.Address("192.168.1.17","2222")]
r=0
g=0
b=0
for address in addresses:
liblo.send(address,'22',('f', r),('f', g),('f', b))
| 57.916667
| 562
| 0.646043
| 130
| 695
| 3.453846
| 0.253846
| 0.400891
| 0.501114
| 0.601336
| 0.759465
| 0.717149
| 0
| 0
| 0
| 0
| 0
| 0.292868
| 0.051799
| 695
| 11
| 563
| 63.181818
| 0.388467
| 0
| 0
| 0
| 0
| 0
| 0.347953
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9948fce685f67f595cef3288dc2a0cd52489aa15
| 92
|
py
|
Python
|
getresponse/__init__.py
|
OpenAT/getresponse-python
|
8ab41bdbc794e8699ab4fb16af5cf73c6d9bafe3
|
[
"MIT"
] | 3
|
2019-08-21T19:51:49.000Z
|
2020-09-20T19:15:10.000Z
|
getresponse/__init__.py
|
OpenAT/getresponse-python
|
8ab41bdbc794e8699ab4fb16af5cf73c6d9bafe3
|
[
"MIT"
] | 4
|
2019-08-24T13:38:07.000Z
|
2021-02-05T11:30:54.000Z
|
getresponse/__init__.py
|
OpenAT/getresponse-python
|
8ab41bdbc794e8699ab4fb16af5cf73c6d9bafe3
|
[
"MIT"
] | 8
|
2018-06-23T15:00:32.000Z
|
2021-09-09T18:32:31.000Z
|
from getresponse.client import GetResponse
from getresponse.excs import UniquePropertyError
| 30.666667
| 48
| 0.891304
| 10
| 92
| 8.2
| 0.6
| 0.365854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 92
| 2
| 49
| 46
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
999adbde87c4daf80434c3ab020651243a7c7c09
| 165
|
py
|
Python
|
Chapter09/filepickle2.py
|
kaushalkumarshah/Learn-Python-in-7-Days
|
2663656767c8959ace836f0c0e272f3e501bbe6e
|
[
"MIT"
] | 12
|
2018-07-09T16:20:31.000Z
|
2022-03-21T22:52:15.000Z
|
Chapter09/filepickle2.py
|
kaushalkumarshah/Learn-Python-in-7-Days
|
2663656767c8959ace836f0c0e272f3e501bbe6e
|
[
"MIT"
] | null | null | null |
Chapter09/filepickle2.py
|
kaushalkumarshah/Learn-Python-in-7-Days
|
2663656767c8959ace836f0c0e272f3e501bbe6e
|
[
"MIT"
] | 19
|
2018-01-09T12:49:06.000Z
|
2021-11-23T08:05:55.000Z
|
import pickle
pickle_file = open("emp1.dat",'r')
name_list = pickle.load(pickle_file)
skill_list =pickle.load(pickle_file)
print name_list ,"\n", skill_list
| 33
| 38
| 0.733333
| 26
| 165
| 4.384615
| 0.5
| 0.263158
| 0.245614
| 0.350877
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006993
| 0.133333
| 165
| 5
| 39
| 33
| 0.79021
| 0
| 0
| 0
| 0
| 0
| 0.067901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.2
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
513e2d7a28a8a1e899e16928c076d91f928b34c3
| 48
|
py
|
Python
|
snow-dots/utilities/time.py
|
cpizzica/Lab-Matlab-Control
|
252a0a3e7ab9c4e60223144806dcfc7d1119f95a
|
[
"Apache-2.0"
] | 6
|
2017-06-06T15:06:36.000Z
|
2018-12-05T21:09:33.000Z
|
snow-dots/utilities/time.py
|
cpizzica/Lab-Matlab-Control
|
252a0a3e7ab9c4e60223144806dcfc7d1119f95a
|
[
"Apache-2.0"
] | 4
|
2017-07-05T15:45:55.000Z
|
2019-04-23T20:37:32.000Z
|
snow-dots/utilities/time.py
|
cpizzica/Lab-Matlab-Control
|
252a0a3e7ab9c4e60223144806dcfc7d1119f95a
|
[
"Apache-2.0"
] | 3
|
2017-06-16T05:54:44.000Z
|
2018-08-14T01:05:14.000Z
|
#! python3
import sys, time
print( time.time() )
| 16
| 20
| 0.6875
| 7
| 48
| 4.714286
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.145833
| 48
| 3
| 20
| 16
| 0.780488
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
514c7a2a80b327383938bbf569f00a4063348ab9
| 5,418
|
py
|
Python
|
pocean/tests/dsg/trajectory/test_trajectory_cr.py
|
axiom-data-science/pocean-core
|
11ad6b8fc43a4c29fa8aa404bf52cb7d39a9c8b1
|
[
"MIT"
] | 13
|
2017-03-26T03:17:33.000Z
|
2021-05-14T12:20:28.000Z
|
pocean/tests/dsg/trajectory/test_trajectory_cr.py
|
axiom-data-science/pocean-core
|
11ad6b8fc43a4c29fa8aa404bf52cb7d39a9c8b1
|
[
"MIT"
] | 43
|
2017-02-21T14:45:33.000Z
|
2022-03-09T18:04:10.000Z
|
pocean/tests/dsg/trajectory/test_trajectory_cr.py
|
axiom-data-science/pocean-core
|
11ad6b8fc43a4c29fa8aa404bf52cb7d39a9c8b1
|
[
"MIT"
] | 10
|
2017-03-03T18:35:00.000Z
|
2021-03-28T22:37:41.000Z
|
#!python
# coding=utf-8
import os
import unittest
import tempfile
from os.path import join as jn
from os.path import dirname as dn
import pytest
from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes
from pocean.tests.dsg.test_new import test_is_mine
import logging
from pocean import logger
logger.level = logging.INFO
logger.handlers = [logging.StreamHandler()]
@pytest.mark.parametrize("fp", [
#jn(dn(__file__), 'resources', 'cr-single.nc'),
jn(dn(__file__), 'resources', 'cr-multiple.nc'),
jn(dn(__file__), 'resources', 'cr-oot-A.nc'),
jn(dn(__file__), 'resources', 'cr-oot-B.nc'),
])
def test_crt_load(fp):
test_is_mine(ContiguousRaggedTrajectory, fp)
class TestContiguousRaggedTrajectory(unittest.TestCase):
def setUp(self):
self.multi = jn(dn(__file__), 'resources', 'cr-multiple.nc')
self.oot_A = jn(dn(__file__), 'resources', 'cr-oot-A.nc')
self.oot_B = jn(dn(__file__), 'resources', 'cr-oot-B.nc')
def test_crt_dataframe_multiple(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'z',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.multi) as ncd:
df = ncd.to_dataframe(axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes) as result_ncd:
assert 'trajectory' in result_ncd.dimensions
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_multiple_unique_dims(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'z',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.multi) as ncd:
df = ncd.to_dataframe(axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, unique_dims=True) as result_ncd:
assert 'trajectory_dim' in result_ncd.dimensions
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_unlimited_dim(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'z',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.multi) as ncd:
df = ncd.to_dataframe(axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, unlimited=True, unique_dims=True) as result_ncd:
assert 'trajectory_dim' in result_ncd.dimensions
assert 'obs_dim' in result_ncd.dimensions
assert result_ncd.dimensions['obs_dim'].isunlimited() is True
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_oot_A(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'depth',
'sample': 'sample'
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.oot_A) as ncd:
df = ncd.to_dataframe(axes=axes)
df = df.sort_values(['trajectory', 'time'])
attrs = get_calculated_attributes(df, axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, mode='a') as result_ncd:
assert 'sample' in result_ncd.dimensions
assert result_ncd.dimensions['sample'].size == 6610
assert 'trajectory' in result_ncd.dimensions
# This is removing null trajectories that have no data. Not much to do about this
# because there is no way to store this empty trajectory in a dataframe.
assert result_ncd.dimensions['trajectory'].size == 507
result_ncd.apply_meta(attrs)
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_oot_B(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'depth',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.oot_B) as ncd:
df = ncd.to_dataframe(axes=axes)
df = df.sort_values(['trajectory', 'time'])
attrs = get_calculated_attributes(df, axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, mode='a') as result_ncd:
assert 'obs' in result_ncd.dimensions
assert result_ncd.dimensions['obs'].size == 64116
assert 'trajectory' in result_ncd.dimensions
# This is removing null trajectories that have no data. Not much to do about this
# because there is no way to store this empty trajectory in a dataframe.
assert result_ncd.dimensions['trajectory'].size == 1000
result_ncd.apply_meta(attrs)
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
| 38.7
| 129
| 0.601883
| 641
| 5,418
| 4.907956
| 0.184087
| 0.057216
| 0.078512
| 0.053401
| 0.812778
| 0.795296
| 0.786713
| 0.768277
| 0.737762
| 0.706612
| 0
| 0.004405
| 0.287745
| 5,418
| 139
| 130
| 38.978417
| 0.810832
| 0.087302
| 0
| 0.578947
| 0
| 0
| 0.076627
| 0
| 0
| 0
| 0
| 0
| 0.114035
| 1
| 0.061404
| false
| 0
| 0.087719
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
514fe3c2fb78759b163edd5ac9aa605a08255b61
| 105
|
py
|
Python
|
src/rctgen/__init__.py
|
mballance/pyrctgen
|
eb47ed2039d36ab236b63e795b313feb499820bd
|
[
"Apache-2.0"
] | 1
|
2022-03-10T04:12:11.000Z
|
2022-03-10T04:12:11.000Z
|
src/rctgen/__init__.py
|
mballance/pyrctgen
|
eb47ed2039d36ab236b63e795b313feb499820bd
|
[
"Apache-2.0"
] | null | null | null |
src/rctgen/__init__.py
|
mballance/pyrctgen
|
eb47ed2039d36ab236b63e795b313feb499820bd
|
[
"Apache-2.0"
] | null | null | null |
from .activity_stmts import *
from .decorators import *
from .claims_refs import *
from .types import *
| 17.5
| 29
| 0.761905
| 14
| 105
| 5.571429
| 0.571429
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161905
| 105
| 6
| 30
| 17.5
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5ad9cdae1404490e2916a379acd371a0bfc35a37
| 71
|
py
|
Python
|
amocrm_api_client/token_provider/impl/standard/__init__.py
|
iqtek/amocrm_api_client
|
910ea42482698f5eb47d6b6e12d52ec09af77a3e
|
[
"MIT"
] | null | null | null |
amocrm_api_client/token_provider/impl/standard/__init__.py
|
iqtek/amocrm_api_client
|
910ea42482698f5eb47d6b6e12d52ec09af77a3e
|
[
"MIT"
] | null | null | null |
amocrm_api_client/token_provider/impl/standard/__init__.py
|
iqtek/amocrm_api_client
|
910ea42482698f5eb47d6b6e12d52ec09af77a3e
|
[
"MIT"
] | null | null | null |
from .StandardTokenProviderFactory import StandardTokenProviderFactory
| 35.5
| 70
| 0.929577
| 4
| 71
| 16.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056338
| 71
| 1
| 71
| 71
| 0.985075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
519031449c1bac7a36ab804688d80a6765808e84
| 27
|
py
|
Python
|
buildcage/__init__.py
|
Zhang-Dante/buildcage
|
701a27caaa8c9dd43754abc0ff6fbc13fbd54012
|
[
"MIT"
] | null | null | null |
buildcage/__init__.py
|
Zhang-Dante/buildcage
|
701a27caaa8c9dd43754abc0ff6fbc13fbd54012
|
[
"MIT"
] | null | null | null |
buildcage/__init__.py
|
Zhang-Dante/buildcage
|
701a27caaa8c9dd43754abc0ff6fbc13fbd54012
|
[
"MIT"
] | null | null | null |
from buildcage import src
| 9
| 25
| 0.814815
| 4
| 27
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 27
| 2
| 26
| 13.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
51e49ba38ee0da16fb739152e318c1b2e765c1c3
| 1,711
|
py
|
Python
|
tests/test_account_key.py
|
nichandy/flow-py-sdk
|
716c1690f38eeb78f479d1cf860b974cc6a53b04
|
[
"MIT"
] | 21
|
2020-11-25T16:30:53.000Z
|
2022-03-08T06:24:02.000Z
|
tests/test_account_key.py
|
nichandy/flow-py-sdk
|
716c1690f38eeb78f479d1cf860b974cc6a53b04
|
[
"MIT"
] | 29
|
2021-03-06T19:04:33.000Z
|
2022-03-18T15:16:44.000Z
|
tests/test_account_key.py
|
nichandy/flow-py-sdk
|
716c1690f38eeb78f479d1cf860b974cc6a53b04
|
[
"MIT"
] | 15
|
2021-03-06T18:36:40.000Z
|
2022-02-09T15:14:01.000Z
|
from unittest import TestCase
from flow_py_sdk import AccountKey, SignAlgo, HashAlgo
from flow_py_sdk.proto.flow.entities import AccountKey as ProtoAccountKey
class TestAccountKey(TestCase):
def test_rlp(self):
expected_rlp_hex = "f847b840c51c02aa382d8d382a121178de8ac97eb6a562a1008660669ab6a220c96fce76e1d392b0c156380ae713b0aa18ad9cff7b85bcc44a9eb43fcddb467f456f0ec803038203e8"
key = AccountKey(
public_key=bytes.fromhex(
"c51c02aa382d8d382a121178de8ac97eb6a562a1008660669ab6a220c96fce76e1d392b0c156380ae713b0aa18ad9cff7b85bcc44a9eb43fcddb467f456f0ec8"
),
sign_algo=SignAlgo.ECDSA_secp256k1,
hash_algo=HashAlgo.SHA3_256,
weight=AccountKey.weight_threshold,
)
rlp = key.rlp()
self.assertEqual(expected_rlp_hex, rlp.hex())
def test_hex(self):
expected_rlp_hex = "f847b840c51c02aa382d8d382a121178de8ac97eb6a562a1008660669ab6a220c96fce76e1d392b0c156380ae713b0aa18ad9cff7b85bcc44a9eb43fcddb467f456f0ec803038203e8"
key = AccountKey(
public_key=bytes.fromhex(
"c51c02aa382d8d382a121178de8ac97eb6a562a1008660669ab6a220c96fce76e1d392b0c156380ae713b0aa18ad9cff7b85bcc44a9eb43fcddb467f456f0ec8"
),
sign_algo=SignAlgo.ECDSA_secp256k1,
hash_algo=HashAlgo.SHA3_256,
weight=AccountKey.weight_threshold,
)
rlp_hex = key.hex()
self.assertEqual(expected_rlp_hex, rlp_hex)
def test_from_proto(self):
proto_account_key = ProtoAccountKey()
proto_account_key.sign_algo = 2
proto_account_key.hash_algo = 1
AccountKey.from_proto(proto_account_key)
| 38.886364
| 175
| 0.739334
| 132
| 1,711
| 9.272727
| 0.310606
| 0.034314
| 0.045752
| 0.021242
| 0.743464
| 0.743464
| 0.743464
| 0.743464
| 0.743464
| 0.674837
| 0
| 0.259779
| 0.208065
| 1,711
| 43
| 176
| 39.790698
| 0.643542
| 0
| 0
| 0.484848
| 0
| 0
| 0.320281
| 0.320281
| 0
| 0
| 0
| 0
| 0.060606
| 1
| 0.090909
| false
| 0
| 0.090909
| 0
| 0.212121
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cffa7838089942e1d4acbf22f359cff576dcbfe7
| 46
|
py
|
Python
|
grammar-pyml/__init__.py
|
lschaack/grammar-pyml
|
04fef15fa41bb6aca9fb062ea28b3b4105b38b1b
|
[
"MIT"
] | null | null | null |
grammar-pyml/__init__.py
|
lschaack/grammar-pyml
|
04fef15fa41bb6aca9fb062ea28b3b4105b38b1b
|
[
"MIT"
] | null | null | null |
grammar-pyml/__init__.py
|
lschaack/grammar-pyml
|
04fef15fa41bb6aca9fb062ea28b3b4105b38b1b
|
[
"MIT"
] | 1
|
2019-04-30T17:24:59.000Z
|
2019-04-30T17:24:59.000Z
|
from __future__ import division
import reader
| 15.333333
| 31
| 0.869565
| 6
| 46
| 6
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 46
| 3
| 32
| 15.333333
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5c84d431a9b5c62bbbbf76ef162b2113b2095179
| 159
|
py
|
Python
|
dev/local/optimizers/__init__.py
|
KeremTurgutlu/fast-kaggle
|
0ea341b44a58da2dfb606a0ae32bac166985b49e
|
[
"Apache-2.0"
] | 8
|
2019-10-02T05:52:10.000Z
|
2021-01-15T13:51:06.000Z
|
dev/local/optimizers/__init__.py
|
KeremTurgutlu/fast-kaggle
|
0ea341b44a58da2dfb606a0ae32bac166985b49e
|
[
"Apache-2.0"
] | 4
|
2019-10-02T06:13:13.000Z
|
2019-10-28T18:21:10.000Z
|
dev/local/optimizers/__init__.py
|
KeremTurgutlu/fast-kaggle
|
0ea341b44a58da2dfb606a0ae32bac166985b49e
|
[
"Apache-2.0"
] | 2
|
2019-12-07T16:59:01.000Z
|
2021-08-30T01:00:06.000Z
|
from .radam import *
from .novograd import *
from .ranger import *
from .ralamb import *
from .rangerlars import *
from .lookahead import *
from .lamb import *
| 22.714286
| 25
| 0.742138
| 21
| 159
| 5.619048
| 0.428571
| 0.508475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169811
| 159
| 7
| 26
| 22.714286
| 0.893939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5c925107f4121cae129176747bd8551beccc96fc
| 88,679
|
py
|
Python
|
cottonformation/res/imagebuilder.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/imagebuilder.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/imagebuilder.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class ImagePipelineImageTestsConfiguration(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ImagePipeline.ImageTestsConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-imagetestsconfiguration.html
Property Document:
- ``p_ImageTestsEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-imagetestsconfiguration.html#cfn-imagebuilder-imagepipeline-imagetestsconfiguration-imagetestsenabled
- ``p_TimeoutMinutes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-imagetestsconfiguration.html#cfn-imagebuilder-imagepipeline-imagetestsconfiguration-timeoutminutes
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ImagePipeline.ImageTestsConfiguration"
p_ImageTestsEnabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "ImageTestsEnabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-imagetestsconfiguration.html#cfn-imagebuilder-imagepipeline-imagetestsconfiguration-imagetestsenabled"""
p_TimeoutMinutes: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "TimeoutMinutes"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-imagetestsconfiguration.html#cfn-imagebuilder-imagepipeline-imagetestsconfiguration-timeoutminutes"""
@attr.s
class ContainerRecipeComponentConfiguration(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ContainerRecipe.ComponentConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-componentconfiguration.html
Property Document:
- ``p_ComponentArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-componentconfiguration.html#cfn-imagebuilder-containerrecipe-componentconfiguration-componentarn
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ContainerRecipe.ComponentConfiguration"
p_ComponentArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ComponentArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-componentconfiguration.html#cfn-imagebuilder-containerrecipe-componentconfiguration-componentarn"""
@attr.s
class ImageRecipeComponentConfiguration(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ImageRecipe.ComponentConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-componentconfiguration.html
Property Document:
- ``p_ComponentArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-componentconfiguration.html#cfn-imagebuilder-imagerecipe-componentconfiguration-componentarn
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ImageRecipe.ComponentConfiguration"
p_ComponentArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ComponentArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-componentconfiguration.html#cfn-imagebuilder-imagerecipe-componentconfiguration-componentarn"""
@attr.s
class ContainerRecipeEbsInstanceBlockDeviceSpecification(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ContainerRecipe.EbsInstanceBlockDeviceSpecification"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html
Property Document:
- ``p_DeleteOnTermination``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-deleteontermination
- ``p_Encrypted``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-encrypted
- ``p_Iops``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-iops
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-kmskeyid
- ``p_SnapshotId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-snapshotid
- ``p_VolumeSize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-volumesize
- ``p_VolumeType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-volumetype
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ContainerRecipe.EbsInstanceBlockDeviceSpecification"
p_DeleteOnTermination: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeleteOnTermination"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-deleteontermination"""
p_Encrypted: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Encrypted"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-encrypted"""
p_Iops: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Iops"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-iops"""
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-kmskeyid"""
p_SnapshotId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SnapshotId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-snapshotid"""
p_VolumeSize: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "VolumeSize"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-volumesize"""
p_VolumeType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "VolumeType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-containerrecipe-ebsinstanceblockdevicespecification-volumetype"""
@attr.s
class ImagePipelineSchedule(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ImagePipeline.Schedule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-schedule.html
Property Document:
- ``p_PipelineExecutionStartCondition``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-schedule.html#cfn-imagebuilder-imagepipeline-schedule-pipelineexecutionstartcondition
- ``p_ScheduleExpression``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-schedule.html#cfn-imagebuilder-imagepipeline-schedule-scheduleexpression
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ImagePipeline.Schedule"
p_PipelineExecutionStartCondition: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PipelineExecutionStartCondition"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-schedule.html#cfn-imagebuilder-imagepipeline-schedule-pipelineexecutionstartcondition"""
p_ScheduleExpression: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ScheduleExpression"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagepipeline-schedule.html#cfn-imagebuilder-imagepipeline-schedule-scheduleexpression"""
@attr.s
class ImageImageTestsConfiguration(Property):
"""
AWS Object Type = "AWS::ImageBuilder::Image.ImageTestsConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-image-imagetestsconfiguration.html
Property Document:
- ``p_ImageTestsEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-image-imagetestsconfiguration.html#cfn-imagebuilder-image-imagetestsconfiguration-imagetestsenabled
- ``p_TimeoutMinutes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-image-imagetestsconfiguration.html#cfn-imagebuilder-image-imagetestsconfiguration-timeoutminutes
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::Image.ImageTestsConfiguration"
p_ImageTestsEnabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "ImageTestsEnabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-image-imagetestsconfiguration.html#cfn-imagebuilder-image-imagetestsconfiguration-imagetestsenabled"""
p_TimeoutMinutes: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "TimeoutMinutes"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-image-imagetestsconfiguration.html#cfn-imagebuilder-image-imagetestsconfiguration-timeoutminutes"""
@attr.s
class InfrastructureConfigurationS3Logs(Property):
"""
AWS Object Type = "AWS::ImageBuilder::InfrastructureConfiguration.S3Logs"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-s3logs.html
Property Document:
- ``p_S3BucketName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-s3logs.html#cfn-imagebuilder-infrastructureconfiguration-s3logs-s3bucketname
- ``p_S3KeyPrefix``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-s3logs.html#cfn-imagebuilder-infrastructureconfiguration-s3logs-s3keyprefix
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::InfrastructureConfiguration.S3Logs"
p_S3BucketName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "S3BucketName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-s3logs.html#cfn-imagebuilder-infrastructureconfiguration-s3logs-s3bucketname"""
p_S3KeyPrefix: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "S3KeyPrefix"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-s3logs.html#cfn-imagebuilder-infrastructureconfiguration-s3logs-s3keyprefix"""
@attr.s
class ContainerRecipeInstanceBlockDeviceMapping(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ContainerRecipe.InstanceBlockDeviceMapping"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html
Property Document:
- ``p_DeviceName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-devicename
- ``p_Ebs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-ebs
- ``p_NoDevice``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-nodevice
- ``p_VirtualName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-virtualname
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ContainerRecipe.InstanceBlockDeviceMapping"
p_DeviceName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeviceName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-devicename"""
p_Ebs: typing.Union['ContainerRecipeEbsInstanceBlockDeviceSpecification', dict] = attr.ib(
default=None,
converter=ContainerRecipeEbsInstanceBlockDeviceSpecification.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ContainerRecipeEbsInstanceBlockDeviceSpecification)),
metadata={AttrMeta.PROPERTY_NAME: "Ebs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-ebs"""
p_NoDevice: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "NoDevice"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-nodevice"""
p_VirtualName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "VirtualName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceblockdevicemapping.html#cfn-imagebuilder-containerrecipe-instanceblockdevicemapping-virtualname"""
@attr.s
class DistributionConfigurationLaunchTemplateConfiguration(Property):
"""
AWS Object Type = "AWS::ImageBuilder::DistributionConfiguration.LaunchTemplateConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-launchtemplateconfiguration.html
Property Document:
- ``p_AccountId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-launchtemplateconfiguration.html#cfn-imagebuilder-distributionconfiguration-launchtemplateconfiguration-accountid
- ``p_LaunchTemplateId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-launchtemplateconfiguration.html#cfn-imagebuilder-distributionconfiguration-launchtemplateconfiguration-launchtemplateid
- ``p_SetDefaultVersion``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-launchtemplateconfiguration.html#cfn-imagebuilder-distributionconfiguration-launchtemplateconfiguration-setdefaultversion
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::DistributionConfiguration.LaunchTemplateConfiguration"
p_AccountId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AccountId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-launchtemplateconfiguration.html#cfn-imagebuilder-distributionconfiguration-launchtemplateconfiguration-accountid"""
p_LaunchTemplateId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "LaunchTemplateId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-launchtemplateconfiguration.html#cfn-imagebuilder-distributionconfiguration-launchtemplateconfiguration-launchtemplateid"""
p_SetDefaultVersion: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "SetDefaultVersion"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-launchtemplateconfiguration.html#cfn-imagebuilder-distributionconfiguration-launchtemplateconfiguration-setdefaultversion"""
@attr.s
class ContainerRecipeInstanceConfiguration(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ContainerRecipe.InstanceConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceconfiguration.html
Property Document:
- ``p_BlockDeviceMappings``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceconfiguration.html#cfn-imagebuilder-containerrecipe-instanceconfiguration-blockdevicemappings
- ``p_Image``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceconfiguration.html#cfn-imagebuilder-containerrecipe-instanceconfiguration-image
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ContainerRecipe.InstanceConfiguration"
p_BlockDeviceMappings: typing.List[typing.Union['ContainerRecipeInstanceBlockDeviceMapping', dict]] = attr.ib(
default=None,
converter=ContainerRecipeInstanceBlockDeviceMapping.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ContainerRecipeInstanceBlockDeviceMapping), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "BlockDeviceMappings"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceconfiguration.html#cfn-imagebuilder-containerrecipe-instanceconfiguration-blockdevicemappings"""
p_Image: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Image"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-instanceconfiguration.html#cfn-imagebuilder-containerrecipe-instanceconfiguration-image"""
@attr.s
class ContainerRecipeTargetContainerRepository(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ContainerRecipe.TargetContainerRepository"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-targetcontainerrepository.html
Property Document:
- ``p_RepositoryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-targetcontainerrepository.html#cfn-imagebuilder-containerrecipe-targetcontainerrepository-repositoryname
- ``p_Service``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-targetcontainerrepository.html#cfn-imagebuilder-containerrecipe-targetcontainerrepository-service
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ContainerRecipe.TargetContainerRepository"
p_RepositoryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RepositoryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-targetcontainerrepository.html#cfn-imagebuilder-containerrecipe-targetcontainerrepository-repositoryname"""
p_Service: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Service"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-containerrecipe-targetcontainerrepository.html#cfn-imagebuilder-containerrecipe-targetcontainerrepository-service"""
@attr.s
class ImageRecipeEbsInstanceBlockDeviceSpecification(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ImageRecipe.EbsInstanceBlockDeviceSpecification"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html
Property Document:
- ``p_DeleteOnTermination``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-deleteontermination
- ``p_Encrypted``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-encrypted
- ``p_Iops``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-iops
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-kmskeyid
- ``p_SnapshotId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-snapshotid
- ``p_VolumeSize``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-volumesize
- ``p_VolumeType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-volumetype
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ImageRecipe.EbsInstanceBlockDeviceSpecification"
p_DeleteOnTermination: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeleteOnTermination"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-deleteontermination"""
p_Encrypted: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Encrypted"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-encrypted"""
p_Iops: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Iops"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-iops"""
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-kmskeyid"""
p_SnapshotId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SnapshotId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-snapshotid"""
p_VolumeSize: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "VolumeSize"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-volumesize"""
p_VolumeType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "VolumeType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification.html#cfn-imagebuilder-imagerecipe-ebsinstanceblockdevicespecification-volumetype"""
@attr.s
class ImageRecipeInstanceBlockDeviceMapping(Property):
"""
AWS Object Type = "AWS::ImageBuilder::ImageRecipe.InstanceBlockDeviceMapping"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html
Property Document:
- ``p_DeviceName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-devicename
- ``p_Ebs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-ebs
- ``p_NoDevice``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-nodevice
- ``p_VirtualName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-virtualname
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ImageRecipe.InstanceBlockDeviceMapping"
p_DeviceName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DeviceName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-devicename"""
p_Ebs: typing.Union['ImageRecipeEbsInstanceBlockDeviceSpecification', dict] = attr.ib(
default=None,
converter=ImageRecipeEbsInstanceBlockDeviceSpecification.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ImageRecipeEbsInstanceBlockDeviceSpecification)),
metadata={AttrMeta.PROPERTY_NAME: "Ebs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-ebs"""
p_NoDevice: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "NoDevice"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-nodevice"""
p_VirtualName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "VirtualName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-imagerecipe-instanceblockdevicemapping.html#cfn-imagebuilder-imagerecipe-instanceblockdevicemapping-virtualname"""
@attr.s
class InfrastructureConfigurationLogging(Property):
"""
AWS Object Type = "AWS::ImageBuilder::InfrastructureConfiguration.Logging"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-logging.html
Property Document:
- ``p_S3Logs``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-logging.html#cfn-imagebuilder-infrastructureconfiguration-logging-s3logs
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::InfrastructureConfiguration.Logging"
p_S3Logs: typing.Union['InfrastructureConfigurationS3Logs', dict] = attr.ib(
default=None,
converter=InfrastructureConfigurationS3Logs.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(InfrastructureConfigurationS3Logs)),
metadata={AttrMeta.PROPERTY_NAME: "S3Logs"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-infrastructureconfiguration-logging.html#cfn-imagebuilder-infrastructureconfiguration-logging-s3logs"""
@attr.s
class DistributionConfigurationDistribution(Property):
"""
AWS Object Type = "AWS::ImageBuilder::DistributionConfiguration.Distribution"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html
Property Document:
- ``rp_Region``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-region
- ``p_AmiDistributionConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-amidistributionconfiguration
- ``p_ContainerDistributionConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-containerdistributionconfiguration
- ``p_LaunchTemplateConfigurations``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-launchtemplateconfigurations
- ``p_LicenseConfigurationArns``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-licenseconfigurationarns
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::DistributionConfiguration.Distribution"
rp_Region: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Region"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-region"""
p_AmiDistributionConfiguration: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "AmiDistributionConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-amidistributionconfiguration"""
p_ContainerDistributionConfiguration: dict = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(dict)),
metadata={AttrMeta.PROPERTY_NAME: "ContainerDistributionConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-containerdistributionconfiguration"""
p_LaunchTemplateConfigurations: typing.List[typing.Union['DistributionConfigurationLaunchTemplateConfiguration', dict]] = attr.ib(
default=None,
converter=DistributionConfigurationLaunchTemplateConfiguration.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(DistributionConfigurationLaunchTemplateConfiguration), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "LaunchTemplateConfigurations"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-launchtemplateconfigurations"""
p_LicenseConfigurationArns: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "LicenseConfigurationArns"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-imagebuilder-distributionconfiguration-distribution.html#cfn-imagebuilder-distributionconfiguration-distribution-licenseconfigurationarns"""
#--- Resource declaration ---
@attr.s
class Component(Resource):
"""
AWS Object Type = "AWS::ImageBuilder::Component"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-name
- ``rp_Platform``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-platform
- ``rp_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-version
- ``p_ChangeDescription``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-changedescription
- ``p_Data``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-data
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-description
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-kmskeyid
- ``p_SupportedOsVersions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-supportedosversions
- ``p_Uri``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-uri
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-tags
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::Component"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-name"""
rp_Platform: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Platform"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-platform"""
rp_Version: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Version"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-version"""
p_ChangeDescription: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ChangeDescription"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-changedescription"""
p_Data: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Data"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-data"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-description"""
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-kmskeyid"""
p_SupportedOsVersions: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SupportedOsVersions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-supportedosversions"""
p_Uri: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Uri"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-uri"""
p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#cfn-imagebuilder-component-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#aws-resource-imagebuilder-component-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#aws-resource-imagebuilder-component-return-values"""
return GetAtt(resource=self, attr_name="Name")
@property
def rv_Type(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#aws-resource-imagebuilder-component-return-values"""
return GetAtt(resource=self, attr_name="Type")
@property
def rv_Encrypted(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-component.html#aws-resource-imagebuilder-component-return-values"""
return GetAtt(resource=self, attr_name="Encrypted")
@attr.s
class InfrastructureConfiguration(Resource):
"""
AWS Object Type = "AWS::ImageBuilder::InfrastructureConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html
Property Document:
- ``rp_InstanceProfileName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-instanceprofilename
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-name
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-description
- ``p_InstanceTypes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-instancetypes
- ``p_KeyPair``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-keypair
- ``p_Logging``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-logging
- ``p_ResourceTags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-resourcetags
- ``p_SecurityGroupIds``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-securitygroupids
- ``p_SnsTopicArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-snstopicarn
- ``p_SubnetId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-subnetid
- ``p_TerminateInstanceOnFailure``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-terminateinstanceonfailure
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-tags
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::InfrastructureConfiguration"
rp_InstanceProfileName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "InstanceProfileName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-instanceprofilename"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-name"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-description"""
p_InstanceTypes: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "InstanceTypes"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-instancetypes"""
p_KeyPair: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KeyPair"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-keypair"""
p_Logging: typing.Union['InfrastructureConfigurationLogging', dict] = attr.ib(
default=None,
converter=InfrastructureConfigurationLogging.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(InfrastructureConfigurationLogging)),
metadata={AttrMeta.PROPERTY_NAME: "Logging"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-logging"""
p_ResourceTags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "ResourceTags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-resourcetags"""
p_SecurityGroupIds: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SecurityGroupIds"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-securitygroupids"""
p_SnsTopicArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SnsTopicArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-snstopicarn"""
p_SubnetId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SubnetId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-subnetid"""
p_TerminateInstanceOnFailure: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "TerminateInstanceOnFailure"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-terminateinstanceonfailure"""
p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#cfn-imagebuilder-infrastructureconfiguration-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#aws-resource-imagebuilder-infrastructureconfiguration-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-infrastructureconfiguration.html#aws-resource-imagebuilder-infrastructureconfiguration-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class ImagePipeline(Resource):
"""
AWS Object Type = "AWS::ImageBuilder::ImagePipeline"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html
Property Document:
- ``rp_InfrastructureConfigurationArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-infrastructureconfigurationarn
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-name
- ``p_ContainerRecipeArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-containerrecipearn
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-description
- ``p_DistributionConfigurationArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-distributionconfigurationarn
- ``p_EnhancedImageMetadataEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-enhancedimagemetadataenabled
- ``p_ImageRecipeArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-imagerecipearn
- ``p_ImageTestsConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-imagetestsconfiguration
- ``p_Schedule``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-schedule
- ``p_Status``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-status
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-tags
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ImagePipeline"
rp_InfrastructureConfigurationArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "InfrastructureConfigurationArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-infrastructureconfigurationarn"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-name"""
p_ContainerRecipeArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ContainerRecipeArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-containerrecipearn"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-description"""
p_DistributionConfigurationArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DistributionConfigurationArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-distributionconfigurationarn"""
p_EnhancedImageMetadataEnabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "EnhancedImageMetadataEnabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-enhancedimagemetadataenabled"""
p_ImageRecipeArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ImageRecipeArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-imagerecipearn"""
p_ImageTestsConfiguration: typing.Union['ImagePipelineImageTestsConfiguration', dict] = attr.ib(
default=None,
converter=ImagePipelineImageTestsConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ImagePipelineImageTestsConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "ImageTestsConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-imagetestsconfiguration"""
p_Schedule: typing.Union['ImagePipelineSchedule', dict] = attr.ib(
default=None,
converter=ImagePipelineSchedule.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ImagePipelineSchedule)),
metadata={AttrMeta.PROPERTY_NAME: "Schedule"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-schedule"""
p_Status: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Status"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-status"""
p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#cfn-imagebuilder-imagepipeline-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#aws-resource-imagebuilder-imagepipeline-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagepipeline.html#aws-resource-imagebuilder-imagepipeline-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class DistributionConfiguration(Resource):
"""
AWS Object Type = "AWS::ImageBuilder::DistributionConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html
Property Document:
- ``rp_Distributions``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-distributions
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-name
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-description
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-tags
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::DistributionConfiguration"
rp_Distributions: typing.List[typing.Union['DistributionConfigurationDistribution', dict]] = attr.ib(
default=None,
converter=DistributionConfigurationDistribution.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(DistributionConfigurationDistribution), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Distributions"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-distributions"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-name"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-description"""
p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#cfn-imagebuilder-distributionconfiguration-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#aws-resource-imagebuilder-distributionconfiguration-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-distributionconfiguration.html#aws-resource-imagebuilder-distributionconfiguration-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class ContainerRecipe(Resource):
"""
AWS Object Type = "AWS::ImageBuilder::ContainerRecipe"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html
Property Document:
- ``rp_Components``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-components
- ``rp_ContainerType``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-containertype
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-name
- ``rp_ParentImage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-parentimage
- ``rp_TargetRepository``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-targetrepository
- ``rp_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-version
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-description
- ``p_DockerfileTemplateData``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-dockerfiletemplatedata
- ``p_DockerfileTemplateUri``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-dockerfiletemplateuri
- ``p_ImageOsVersionOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-imageosversionoverride
- ``p_InstanceConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-instanceconfiguration
- ``p_KmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-kmskeyid
- ``p_PlatformOverride``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-platformoverride
- ``p_WorkingDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-workingdirectory
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-tags
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ContainerRecipe"
rp_Components: typing.List[typing.Union['ContainerRecipeComponentConfiguration', dict]] = attr.ib(
default=None,
converter=ContainerRecipeComponentConfiguration.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ContainerRecipeComponentConfiguration), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Components"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-components"""
rp_ContainerType: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ContainerType"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-containertype"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-name"""
rp_ParentImage: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ParentImage"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-parentimage"""
rp_TargetRepository: typing.Union['ContainerRecipeTargetContainerRepository', dict] = attr.ib(
default=None,
converter=ContainerRecipeTargetContainerRepository.from_dict,
validator=attr.validators.instance_of(ContainerRecipeTargetContainerRepository),
metadata={AttrMeta.PROPERTY_NAME: "TargetRepository"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-targetrepository"""
rp_Version: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Version"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-version"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-description"""
p_DockerfileTemplateData: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DockerfileTemplateData"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-dockerfiletemplatedata"""
p_DockerfileTemplateUri: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DockerfileTemplateUri"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-dockerfiletemplateuri"""
p_ImageOsVersionOverride: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ImageOsVersionOverride"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-imageosversionoverride"""
p_InstanceConfiguration: typing.Union['ContainerRecipeInstanceConfiguration', dict] = attr.ib(
default=None,
converter=ContainerRecipeInstanceConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ContainerRecipeInstanceConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "InstanceConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-instanceconfiguration"""
p_KmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-kmskeyid"""
p_PlatformOverride: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PlatformOverride"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-platformoverride"""
p_WorkingDirectory: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "WorkingDirectory"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-workingdirectory"""
p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#cfn-imagebuilder-containerrecipe-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#aws-resource-imagebuilder-containerrecipe-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-containerrecipe.html#aws-resource-imagebuilder-containerrecipe-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class ImageRecipe(Resource):
"""
AWS Object Type = "AWS::ImageBuilder::ImageRecipe"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html
Property Document:
- ``rp_Components``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-components
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-name
- ``rp_ParentImage``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-parentimage
- ``rp_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-version
- ``p_BlockDeviceMappings``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-blockdevicemappings
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-description
- ``p_WorkingDirectory``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-workingdirectory
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-tags
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::ImageRecipe"
rp_Components: typing.List[typing.Union['ImageRecipeComponentConfiguration', dict]] = attr.ib(
default=None,
converter=ImageRecipeComponentConfiguration.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ImageRecipeComponentConfiguration), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "Components"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-components"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-name"""
rp_ParentImage: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ParentImage"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-parentimage"""
rp_Version: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Version"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-version"""
p_BlockDeviceMappings: typing.List[typing.Union['ImageRecipeInstanceBlockDeviceMapping', dict]] = attr.ib(
default=None,
converter=ImageRecipeInstanceBlockDeviceMapping.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ImageRecipeInstanceBlockDeviceMapping), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "BlockDeviceMappings"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-blockdevicemappings"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-description"""
p_WorkingDirectory: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "WorkingDirectory"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-workingdirectory"""
p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#cfn-imagebuilder-imagerecipe-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#aws-resource-imagebuilder-imagerecipe-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-imagerecipe.html#aws-resource-imagebuilder-imagerecipe-return-values"""
return GetAtt(resource=self, attr_name="Name")
@attr.s
class Image(Resource):
"""
AWS Object Type = "AWS::ImageBuilder::Image"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html
Property Document:
- ``rp_InfrastructureConfigurationArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-infrastructureconfigurationarn
- ``p_ContainerRecipeArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-containerrecipearn
- ``p_DistributionConfigurationArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-distributionconfigurationarn
- ``p_EnhancedImageMetadataEnabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-enhancedimagemetadataenabled
- ``p_ImageRecipeArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-imagerecipearn
- ``p_ImageTestsConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-imagetestsconfiguration
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-tags
"""
AWS_OBJECT_TYPE = "AWS::ImageBuilder::Image"
rp_InfrastructureConfigurationArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "InfrastructureConfigurationArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-infrastructureconfigurationarn"""
p_ContainerRecipeArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ContainerRecipeArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-containerrecipearn"""
p_DistributionConfigurationArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "DistributionConfigurationArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-distributionconfigurationarn"""
p_EnhancedImageMetadataEnabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "EnhancedImageMetadataEnabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-enhancedimagemetadataenabled"""
p_ImageRecipeArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ImageRecipeArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-imagerecipearn"""
p_ImageTestsConfiguration: typing.Union['ImageImageTestsConfiguration', dict] = attr.ib(
default=None,
converter=ImageImageTestsConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ImageImageTestsConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "ImageTestsConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-imagetestsconfiguration"""
p_Tags: typing.Dict[str, TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_mapping(key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#cfn-imagebuilder-image-tags"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#aws-resource-imagebuilder-image-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@property
def rv_Name(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#aws-resource-imagebuilder-image-return-values"""
return GetAtt(resource=self, attr_name="Name")
@property
def rv_ImageId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-imagebuilder-image.html#aws-resource-imagebuilder-image-return-values"""
return GetAtt(resource=self, attr_name="ImageId")
| 73.28843
| 272
| 0.788011
| 8,773
| 88,679
| 7.86846
| 0.018808
| 0.03048
| 0.041909
| 0.064769
| 0.943561
| 0.941489
| 0.923149
| 0.899247
| 0.899247
| 0.899247
| 0
| 0.000385
| 0.092378
| 88,679
| 1,209
| 273
| 73.349049
| 0.857269
| 0.345798
| 0
| 0.590258
| 0
| 0
| 0.088216
| 0.059564
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024355
| false
| 0
| 0.005731
| 0
| 0.277937
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5c9f8a7b98571a2dbf32ff7b31132c196849bf5f
| 561
|
py
|
Python
|
fincorpy/__init__.py
|
Fincor-Blockchain/fincorpy
|
f80f4ea234cdc3bd79bb45b915ca35843dabf6ab
|
[
"MIT"
] | null | null | null |
fincorpy/__init__.py
|
Fincor-Blockchain/fincorpy
|
f80f4ea234cdc3bd79bb45b915ca35843dabf6ab
|
[
"MIT"
] | null | null | null |
fincorpy/__init__.py
|
Fincor-Blockchain/fincorpy
|
f80f4ea234cdc3bd79bb45b915ca35843dabf6ab
|
[
"MIT"
] | null | null | null |
from hdwallets import BIP32DerivationError as BIP32DerivationError # noqa: F401
from fincorpy._transaction import Transaction as Transaction # noqa: F401
from fincorpy._wallet import generate_wallet as generate_wallet # noqa: F401
from fincorpy._wallet import privkey_to_address as privkey_to_address # noqa: F401
from fincorpy._wallet import privkey_to_pubkey as privkey_to_pubkey # noqa: F401
from fincorpy._wallet import pubkey_to_address as pubkey_to_address # noqa: F401
from fincorpy._wallet import seed_to_privkey as seed_to_privkey # noqa: F401
| 62.333333
| 83
| 0.83779
| 80
| 561
| 5.575
| 0.2
| 0.125561
| 0.161435
| 0.269058
| 0.439462
| 0.439462
| 0.295964
| 0.295964
| 0
| 0
| 0
| 0.05102
| 0.12656
| 561
| 8
| 84
| 70.125
| 0.859184
| 0.135472
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5cd0c6cac5b5887e53d0aaf29b3b1d5a86a6267b
| 1,931
|
py
|
Python
|
tests/data/test_bo_remove_user_from_group.py
|
c17r/TagTrain
|
5aa1ca36439cc5e81d0c691f905a4bb879b78399
|
[
"MIT"
] | null | null | null |
tests/data/test_bo_remove_user_from_group.py
|
c17r/TagTrain
|
5aa1ca36439cc5e81d0c691f905a4bb879b78399
|
[
"MIT"
] | 7
|
2020-03-24T17:54:31.000Z
|
2021-09-21T12:34:34.000Z
|
tests/data/test_bo_remove_user_from_group.py
|
c17r/TagTrain
|
5aa1ca36439cc5e81d0c691f905a4bb879b78399
|
[
"MIT"
] | null | null | null |
import pytest
from . import db
from .db import database
from tagtrain import data
def test_unknown_owner(database):
with pytest.raises(data.Group.DoesNotExist):
group = data.by_owner.remove_user_from_group('non-existent', db.GROUP_NAME, 'doesnt-matter')
def test_unknown_group(database):
with pytest.raises(data.Group.DoesNotExist):
group = data.by_owner.remove_user_from_group(db.OWNER_NAME, 'non-existent', 'doesnt-matter')
def test_unknown_member(database):
with pytest.raises(data.Member.DoesNotExist):
group = data.by_owner.remove_user_from_group(db.OWNER_NAME, db.GROUP_NAME, 'non-existent')
def test_good_non_empty(database):
group = data.by_owner.find_group(db.OWNER_NAME, db.GROUP_NAME)
assert group.member_count == 4
assert len(list(group.members)) == 4
group = data.by_owner.remove_user_from_group(db.OWNER_NAME, db.GROUP_NAME, 'one')
assert group.name == db.GROUP_NAME
assert group.reddit_name == db.OWNER_NAME
assert group.member_count == 3
assert len(list(group.members)) == 3
assert group.members[0].reddit_name == 'two'
group = data.by_owner.find_group(db.OWNER_NAME, db.GROUP_NAME)
assert group.member_count == 3
assert len(list(group.members)) == 3
def test_good_empty(database):
group = data.by_owner.find_group(db.OWNER_NAME, db.GROUP_NAME)
assert group.member_count == 4
assert len(list(group.members)) == 4
members_to_delete = [m.reddit_name for m in group.members]
for m in members_to_delete:
group = data.by_owner.remove_user_from_group(db.OWNER_NAME, db.GROUP_NAME, m)
assert group.name == db.GROUP_NAME
assert group.reddit_name == db.OWNER_NAME
assert group.member_count == 0
assert len(list(group.members)) == 0
group = data.by_owner.find_group(db.OWNER_NAME, db.GROUP_NAME)
assert group.member_count == 0
assert len(list(group.members)) == 0
| 31.145161
| 100
| 0.726567
| 297
| 1,931
| 4.478114
| 0.144781
| 0.081203
| 0.082707
| 0.108271
| 0.795489
| 0.735338
| 0.735338
| 0.735338
| 0.735338
| 0.735338
| 0
| 0.008025
| 0.161056
| 1,931
| 61
| 101
| 31.655738
| 0.812963
| 0
| 0
| 0.55
| 0
| 0
| 0.035215
| 0
| 0
| 0
| 0
| 0
| 0.425
| 1
| 0.125
| false
| 0
| 0.1
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5ce2735e1bca58dfe0b4deef4facbe13970e7371
| 179
|
py
|
Python
|
insect/models/__init__.py
|
Kradukman/beesUlb
|
1234658af3aff7d2f580212c01d8acec96167078
|
[
"MIT"
] | null | null | null |
insect/models/__init__.py
|
Kradukman/beesUlb
|
1234658af3aff7d2f580212c01d8acec96167078
|
[
"MIT"
] | null | null | null |
insect/models/__init__.py
|
Kradukman/beesUlb
|
1234658af3aff7d2f580212c01d8acec96167078
|
[
"MIT"
] | null | null | null |
from . import super_family
from . import family
from . import sub_family
from . import tribe
from . import genus
from . import specie
from . import sub_specie
from . import wizard
| 22.375
| 26
| 0.782123
| 27
| 179
| 5.074074
| 0.333333
| 0.583942
| 0.350365
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173184
| 179
| 8
| 27
| 22.375
| 0.925676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
5ce3a6c18aeb5d27ee3d7881af1a388d7243469c
| 40
|
py
|
Python
|
nanopores/py4gmsh/__init__.py
|
mitschabaude/nanopores
|
b1a7effed8e99ef862dd24cd9aada577d6ce28e1
|
[
"MIT"
] | 8
|
2016-09-07T01:59:31.000Z
|
2021-03-06T12:14:31.000Z
|
nanopores/py4gmsh/__init__.py
|
mitschabaude/nanopores
|
b1a7effed8e99ef862dd24cd9aada577d6ce28e1
|
[
"MIT"
] | null | null | null |
nanopores/py4gmsh/__init__.py
|
mitschabaude/nanopores
|
b1a7effed8e99ef862dd24cd9aada577d6ce28e1
|
[
"MIT"
] | 4
|
2017-12-06T17:43:01.000Z
|
2020-05-01T05:41:14.000Z
|
from basic import *
from extra import *
| 13.333333
| 19
| 0.75
| 6
| 40
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 40
| 2
| 20
| 20
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7aa7deccc37ef3c0f83a23d08d44347e3c31a196
| 28
|
py
|
Python
|
odoo-13.0/venv/lib/python3.8/site-packages/ImageDraw.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | 3
|
2015-11-20T08:44:42.000Z
|
2016-12-14T01:40:03.000Z
|
odoo-13.0/venv/lib/python3.8/site-packages/ImageDraw.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | 1
|
2017-09-04T14:04:32.000Z
|
2020-05-26T19:04:00.000Z
|
odoo-13.0/venv/lib/python3.8/site-packages/ImageDraw.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
from PIL.ImageDraw import *
| 14
| 27
| 0.785714
| 4
| 28
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7aac396d0a9fa3abcb66dd784ddcd242ebbffd97
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/version.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/version.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydev_ipython/version.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/94/b0/52/47c9ad945d5e0b3c3039e8e58dc840c9f4b2d28a43f1bd30fd08d1f7b4
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.40625
| 0
| 96
| 1
| 96
| 96
| 0.489583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8fb74bb038b11fd78998cf6abe7960c6b6f197f3
| 8,533
|
py
|
Python
|
models/dino.py
|
guilhermesurek/3D-ResNets-PyTorch
|
e90d1a7c9904a54b576566d4769d491121cad3c5
|
[
"MIT"
] | null | null | null |
models/dino.py
|
guilhermesurek/3D-ResNets-PyTorch
|
e90d1a7c9904a54b576566d4769d491121cad3c5
|
[
"MIT"
] | null | null | null |
models/dino.py
|
guilhermesurek/3D-ResNets-PyTorch
|
e90d1a7c9904a54b576566d4769d491121cad3c5
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
class Head(nn.Module):
def __init__(self, n_classes, n_inputs=None, dropout=None):
super().__init__()
self.n_inputs = n_inputs
self.n_classes = n_classes
self.n_layers = 2
# Consolidate temporal features
# self.conv1 = nn.Conv2d(in_channels=1, out_channels=16, kernel_size=(7,256), stride=1)
# self.bn1 = nn.BatchNorm2d(self.conv1.out_channels)
# self.conv2 = nn.Conv2d(in_channels=self.conv1.out_channels, out_channels=32, kernel_size=(6,512), stride=1)
# self.bn2 = nn.BatchNorm2d(self.conv2.out_channels)
# self.conv3 = nn.Conv2d(in_channels=self.conv2.out_channels, out_channels=32, kernel_size=(5,1024), stride=1)
# self.bn3 = nn.BatchNorm2d(self.conv3.out_channels)
# self.relu = nn.ReLU(inplace=True)
# #self.fc1 = nn.Linear(self.conv3.out_channels,self.conv3.out_channels)
# self.fc = nn.Linear(self.conv3.out_channels,n_classes)
# self.avgpool = nn.AdaptiveAvgPool2d(output_size=(1, 1))
# conf2
# self.conv1 = nn.Conv2d(in_channels=1, out_channels=2, kernel_size=(7,7), stride=1)
# self.bn1 = nn.BatchNorm2d(self.conv1.out_channels)
# self.conv2 = nn.Conv2d(in_channels=self.conv1.out_channels, out_channels=2, kernel_size=(6,7), stride=1)
# self.bn2 = nn.BatchNorm2d(self.conv2.out_channels)
# self.conv3 = nn.Conv2d(in_channels=self.conv2.out_channels, out_channels=4, kernel_size=(5,5), stride=1)
# self.bn3 = nn.BatchNorm2d(self.conv3.out_channels)
# self.relu = nn.ReLU(inplace=True)
# #self.fc1 = nn.Linear(8128,8128) #conv3.out_channels,conv3.out_channels)
# self.fc = nn.Linear(8128,n_classes)
# conf3
# self.conv1 = nn.Conv2d(in_channels=1, out_channels=4, kernel_size=(9,75), stride=(1,3))
# self.bn1 = nn.BatchNorm2d(self.conv1.out_channels)
# self.conv2 = nn.Conv2d(in_channels=self.conv1.out_channels, out_channels=8, kernel_size=(8,75), stride=(1,3))
# self.bn2 = nn.BatchNorm2d(self.conv2.out_channels)
# self.relu = nn.ReLU(inplace=True)
# #self.fc1 = nn.Linear(1560,1560)
# self.fc = nn.Linear(1560,n_classes)
# conf4
# self.conv1 = nn.Conv2d(in_channels=1, out_channels=4, kernel_size=(16,155), stride=(1,3))
# self.bn1 = nn.BatchNorm2d(self.conv1.out_channels)
# self.relu = nn.ReLU(inplace=True)
# #self.fc1 = nn.Linear(2528,2528)
# self.fc = nn.Linear(2528,n_classes)
# conf5
# self.fc1 = nn.Linear(16*2048,2048)
# self.fc = nn.Linear(2048,n_classes)
# conf6
#self.fc = nn.Linear(16*2048,n_classes)
# ViT #1
#self.fc = nn.Linear(n_inputs, n_classes)
# ViT #2
self.lstm = nn.LSTM(input_size=self.n_inputs, hidden_size=self.n_inputs, num_layers=self.n_layers, dropout=dropout, batch_first=True) #lstm
self.fc = nn.Linear(self.n_inputs, self.n_classes)
def forward(self, x):
## Conf1
# x = self.conv1(x)
# x = self.bn1(x)
# x = self.relu(x)
# x = self.conv2(x)
# x = self.bn2(x)
# x = self.relu(x)
# x = self.conv3(x)
# x = self.bn3(x)
# x = self.relu(x)
# x = self.avgpool(x)
# x = torch.flatten(x, 1)
# #x = self.fc1(x)
# x = self.fc(x)
## Conf2
# x = self.conv1(x)
# x = self.bn1(x)
# x = self.relu(x)
# x = self.conv2(x)
# x = self.bn2(x)
# x = self.relu(x)
# x = self.conv3(x)
# x = self.bn3(x)
# x = self.relu(x)
# x = torch.flatten(x, 1)
# #x = self.fc1(x)
# x = self.fc(x)
## Conf3
# x = self.conv1(x)
# x = self.bn1(x)
# x = self.relu(x)
# x = self.conv2(x)
# x = self.bn2(x)
# x = self.relu(x)
# x = torch.flatten(x, 1)
# #x = self.fc1(x)
# x = self.fc(x)
## Conf4
# x = self.conv1(x)
# x = self.bn1(x)
# x = self.relu(x)
# x = torch.flatten(x, 1)
# #x = self.fc1(x)
# x = self.fc(x)
## Conf5
# x = torch.flatten(x, 1)
# x = self.fc1(x)
# x = self.fc(x)
## ViT #1
#x = torch.flatten(x, 1)
#x = self.fc(x)
## ViT #2
# Don't need to initialize hidden state because the states are not connected between iterations
x, _ = self.lstm(x)
x = x[:, -1, :]
x = self.fc(x)
return x
class Dino_ResNet(nn.Module):
def __init__(self,
#block,
#layers,
#block_inplanes,
#n_input_channels=3,
#conv1_t_size=7,
#conv1_t_stride=1,
#no_max_pool=False,
#shortcut_type='B',
#widen_factor=1.0,
n_classes=400):#,
#dropout_factor=0.5):
super().__init__()
# Extract Features
self.dino = torch.hub.load('facebookresearch/dino:main', 'dino_resnet50')
for param in self.dino.parameters():
param.requires_grad = False
self.head = Head(n_classes=n_classes)
# Consolidate temporal features
# self.conv1 = nn.Conv2d(in_channels=1, out_channels=64, kernel_size=(7,256), stride=1)
# self.bn1 = nn.BatchNorm2d(self.conv1.out_channels)
# self.conv2 = nn.Conv2d(in_channels=self.conv1.out_channels, out_channels=128, kernel_size=(6,512), stride=1)
# self.bn2 = nn.BatchNorm2d(self.conv2.out_channels)
# self.conv3 = nn.Conv2d(in_channels=self.conv2.out_channels, out_channels=256, kernel_size=(5,1024), stride=1)
# self.bn3 = nn.BatchNorm2d(self.conv3.out_channels)
# self.relu = nn.ReLU(inplace=True)
# #self.fc1 = nn.Linear(self.conv3.out_channels,self.conv3.out_channels)
# self.fc = nn.Linear(self.conv3.out_channels,n_classes)
# self.avgpool = nn.AdaptiveAvgPool2d(output_size=(1, 1))
def forward(self, x):
#import ipdb; ipdb.set_trace()
x = x.permute(0, 2, 1, 3, 4)
#x1 = [self.dino(x_in) for x_in in x]
#x1 = []
c=0
for x_in in x:
out = self.dino(x_in)
out = torch.stack((out,), 0)
if c==0:
x1 = out
c=+1
else:
x1 = torch.cat((x1,out),0)
#x1 = torch.stack(x1, 0)
# Fro LSTM comment below
x = torch.stack((x1,), 0).permute(1, 0, 2, 3)
#x = x1
x = self.head(x)
# x = self.conv1(x)
# x = self.bn1(x)
# x = self.relu(x)
# x = self.conv2(x)
# x = self.bn2(x)
# x = self.relu(x)
# x = self.conv3(x)
# x = self.bn3(x)
# x = self.relu(x)
# x = self.avgpool(x)
# x = torch.flatten(x, 1)
# #x = self.fc1(x)
# x = self.fc(x)
return x
class Dino_ViT(nn.Module):
def __init__(self,
#block,
#layers,
#block_inplanes,
#n_input_channels=3,
#conv1_t_size=7,
#conv1_t_stride=1,
#no_max_pool=False,
#shortcut_type='B',
#widen_factor=1.0,
n_classes=400,
dropout_factor=0.5):
super().__init__()
# Extract Features
self.dino = torch.hub.load('facebookresearch/dino:main', 'dino_vits16')
for param in self.dino.parameters():
param.requires_grad = False
self.head = Head(n_classes=n_classes, n_inputs=384, dropout=dropout_factor)
def forward(self, x):
#import ipdb; ipdb.set_trace()
x = x.permute(0, 2, 1, 3, 4)
c=0
for x_in in x:
out = self.dino(x_in)
out = torch.stack((out,), 0)
if c==0:
x1 = out
c=+1
else:
x1 = torch.cat((x1,out),0)
# Fro LSTM comment below
#x = torch.stack((x1,), 0).permute(1, 0, 2, 3)
x = x1
x = self.head(x)
return x
def generate_model(model_arch, **kwargs):
if model_arch == 'dino_resnet':
model = Dino_ResNet(**kwargs)
elif model_arch == 'dino_vit':
model = Dino_ViT(**kwargs)
return model
| 36.004219
| 147
| 0.535802
| 1,184
| 8,533
| 3.71875
| 0.121622
| 0.062457
| 0.054508
| 0.049057
| 0.804224
| 0.773563
| 0.772428
| 0.760164
| 0.751079
| 0.735635
| 0
| 0.061323
| 0.321575
| 8,533
| 237
| 148
| 36.004219
| 0.699257
| 0.530529
| 0
| 0.552239
| 0
| 0
| 0.024791
| 0.01357
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104478
| false
| 0
| 0.044776
| 0
| 0.253731
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8fb7daea755df835e924dc2fe78f7b3d386f330d
| 3,081
|
py
|
Python
|
tests/test_operator/test_parse_comparison.py
|
gruebel/pycep
|
063a17310d79b19ca2c154344854826bcc1a7020
|
[
"Apache-2.0"
] | 2
|
2022-03-03T15:33:26.000Z
|
2022-03-14T21:18:57.000Z
|
tests/test_operator/test_parse_comparison.py
|
gruebel/pycep
|
063a17310d79b19ca2c154344854826bcc1a7020
|
[
"Apache-2.0"
] | 14
|
2022-01-16T22:23:51.000Z
|
2022-03-21T04:26:27.000Z
|
tests/test_operator/test_parse_comparison.py
|
gruebel/pycep
|
063a17310d79b19ca2c154344854826bcc1a7020
|
[
"Apache-2.0"
] | null | null | null |
import json
from pathlib import Path
from assertpy import assert_that
from pycep import BicepParser
EXAMPLES_DIR = Path(__file__).parent / "examples/comparison"
BICEP_PARSER = BicepParser()
def test_parse_greater_than_or_equals() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "greater_than_or_equals"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
def test_parse_greater_than() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "greater_than"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
def test_parse_less_than_or_equals() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "less_than_or_equals"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
def test_parse_less_than() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "less_than"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
def test_parse_equals() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "equals"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
def test_parse_not_equals() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "not_equals"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
def test_parse_equals_case_insensitive() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "equals_case_insensitive"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
def test_parse_not_equals_case_insensitive() -> None:
# given
sub_dir_path = EXAMPLES_DIR / "not_equals_case_insensitive"
file_path = sub_dir_path / "main.bicep"
expected_result = json.loads((sub_dir_path / "result.json").read_text())
# when
result = BICEP_PARSER.parse(file_path=file_path)
# then
assert_that(result).is_equal_to(expected_result)
| 27.026316
| 76
| 0.715677
| 437
| 3,081
| 4.617849
| 0.09611
| 0.08672
| 0.11893
| 0.059465
| 0.925669
| 0.912785
| 0.912785
| 0.912785
| 0.895937
| 0.837463
| 0
| 0
| 0.176891
| 3,081
| 113
| 77
| 27.265487
| 0.795741
| 0.04122
| 0
| 0.592593
| 0
| 0
| 0.107545
| 0.024582
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.148148
| false
| 0
| 0.074074
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8fc5ecca3b0a0a6980f52fea3d9e63dedd9b35ef
| 22,960
|
py
|
Python
|
layers/equivariant_linear.py
|
JoshuaMitton/InvariantGraphNetworks
|
f6d8f43c7a053425eee785d11c5de91ac50f367c
|
[
"Apache-2.0"
] | null | null | null |
layers/equivariant_linear.py
|
JoshuaMitton/InvariantGraphNetworks
|
f6d8f43c7a053425eee785d11c5de91ac50f367c
|
[
"Apache-2.0"
] | null | null | null |
layers/equivariant_linear.py
|
JoshuaMitton/InvariantGraphNetworks
|
f6d8f43c7a053425eee785d11c5de91ac50f367c
|
[
"Apache-2.0"
] | null | null | null |
# import tensorflow as tf
import torch
import numpy as np
class equi_2_to_2(torch.nn.Module):
"""equivariant nn layer."""
def __init__(self, input_depth, output_depth, device):
super(equi_2_to_2, self).__init__()
self.basis_dimension = 15
self.device = device
# self.coeffs_values = torch.matmul(torch.randn(size=(input_depth, output_depth, self.basis_dimension), dtype=torch.float32), torch.sqrt(2. / (input_depth + output_depth).type(torch.FloatTensor)))
self.coeffs_values = torch.mul(torch.randn(size=(input_depth, output_depth, self.basis_dimension), dtype=torch.float32), torch.sqrt(torch.tensor([2.]) / (input_depth + output_depth)))#.cuda()
self.coeffs = torch.nn.Parameter(self.coeffs_values, requires_grad=False)
self.diag_bias = torch.nn.Parameter(torch.zeros((1, output_depth, 1, 1), dtype=torch.float32), requires_grad=False)
self.all_bias = torch.nn.Parameter(torch.zeros((1, output_depth, 1, 1), dtype=torch.float32), requires_grad=False)
def ops_2_to_2(self, inputs, dim, normalization='inf', normalization_val=1.0): # N x D x m x m
# print(f'input shape : {inputs.shape}')
diag_part = torch.diagonal(inputs, dim1=-2, dim2=-1) # N x D x m
# print(f'diag_part shape : {diag_part.shape}')
sum_diag_part = torch.sum(diag_part, dim=2, keepdim=True) # N x D x 1
# print(f'sum_diag_part shape : {sum_diag_part.shape}')
sum_of_rows = torch.sum(inputs, dim=3) # N x D x m
# print(f'sum_of_rows shape : {sum_of_rows.shape}')
sum_of_cols = torch.sum(inputs, dim=2) # N x D x m
# print(f'sum_of_cols shape : {sum_of_cols.shape}')
sum_all = torch.sum(sum_of_rows, dim=2) # N x D
# print(f'sum_all shape : {sum_all.shape}')
# op1 - (1234) - extract diag
op1 = torch.diag_embed(diag_part) # N x D x m x m
# op2 - (1234) + (12)(34) - place sum of diag on diag
op2 = torch.diag_embed(sum_diag_part.repeat(1, 1, dim)) # N x D x m x m
# op3 - (1234) + (123)(4) - place sum of row i on diag ii
op3 = torch.diag_embed(sum_of_rows) # N x D x m x m
# op4 - (1234) + (124)(3) - place sum of col i on diag ii
op4 = torch.diag_embed(sum_of_cols) # N x D x m x m
# op5 - (1234) + (124)(3) + (123)(4) + (12)(34) + (12)(3)(4) - place sum of all entries on diag
op5 = torch.diag_embed(torch.unsqueeze(sum_all, dim=2).repeat(1, 1, dim)) # N x D x m x m
# op6 - (14)(23) + (13)(24) + (24)(1)(3) + (124)(3) + (1234) - place sum of col i on row i
op6 = torch.unsqueeze(sum_of_cols, dim=3).repeat(1, 1, 1, dim) # N x D x m x m
# op7 - (14)(23) + (23)(1)(4) + (234)(1) + (123)(4) + (1234) - place sum of row i on row i
op7 = torch.unsqueeze(sum_of_rows, dim=3).repeat(1, 1, 1, dim) # N x D x m x m
# op8 - (14)(2)(3) + (134)(2) + (14)(23) + (124)(3) + (1234) - place sum of col i on col i
op8 = torch.unsqueeze(sum_of_cols, dim=2).repeat(1, 1, dim, 1) # N x D x m x m
# op9 - (13)(24) + (13)(2)(4) + (134)(2) + (123)(4) + (1234) - place sum of row i on col i
op9 = torch.unsqueeze(sum_of_rows, dim=2).repeat(1, 1, dim, 1) # N x D x m x m
# op10 - (1234) + (14)(23) - identity
op10 = inputs # N x D x m x m
# op11 - (1234) + (13)(24) - transpose
op11 = inputs.permute(0, 1, 3, 2) # N x D x m x m
# op12 - (1234) + (234)(1) - place ii element in row i
op12 = torch.unsqueeze(diag_part, dim=3).repeat(1, 1, 1, dim) # N x D x m x m
# op13 - (1234) + (134)(2) - place ii element in col i
op13 = torch.unsqueeze(diag_part, dim=2).repeat(1, 1, dim, 1) # N x D x m x m
# op14 - (34)(1)(2) + (234)(1) + (134)(2) + (1234) + (12)(34) - place sum of diag in all entries
op14 = torch.unsqueeze(sum_diag_part, dim=3).repeat(1, 1, dim, dim) # N x D x m x m
# op15 - sum of all ops - place sum of all entries in all entries
op15 = torch.unsqueeze(torch.unsqueeze(sum_all, dim=2), dim=3).repeat(1, 1, dim, dim) # N x D x m x m
if normalization is not None:
float_dim = dim.type(torch.FloatTensor)
if normalization is 'inf':
op2 = torch.div(op2, float_dim)
op3 = torch.div(op3, float_dim)
op4 = torch.div(op4, float_dim)
op5 = torch.div(op5, float_dim**2)
op6 = torch.div(op6, float_dim)
op7 = torch.div(op7, float_dim)
op8 = torch.div(op8, float_dim)
op9 = torch.div(op9, float_dim)
op14 = torch.div(op14, float_dim)
op15 = torch.div(op15, float_dim**2)
return [op1, op2, op3, op4, op5, op6, op7, op8, op9, op10, op11, op12, op13, op14, op15]
def forward(self, inputs, normalization='inf'):
m = torch.tensor(inputs.shape[3], dtype=torch.int32, device=self.device) # extract dimension
# print(f'inputs device : {inputs.device}')
ops_out = self.ops_2_to_2(inputs=inputs, dim=m, normalization=normalization)
# for idx, op in enumerate(ops_out):
# print(f'ops_out{idx} : {op.shape}')
ops_out = torch.stack(ops_out, dim=2)
# print(f'self.coeffs device : {self.coeffs.device}')
# print(f'ops_out device : {ops_out.device}')
output = torch.einsum('dsb,ndbij->nsij', self.coeffs.double(), ops_out) # N x S x m x m
# bias
# print(f'diag_bias shape : {self.diag_bias.shape}')
# print(f'eye shape : {torch.eye(torch.tensor(inputs.shape[3], dtype=torch.int32, device=self.device), device=self.device).shape}')
# mat_diag_bias = torch.mul(torch.unsqueeze(torch.unsqueeze(torch.eye(torch.tensor(inputs.shape[3], dtype=torch.int32, device=self.device), device=self.device), 0), 0), self.diag_bias)
mat_diag_bias = self.diag_bias.expand(-1,-1,inputs.shape[3],inputs.shape[3])
mat_diag_bias = torch.mul(mat_diag_bias, torch.eye(inputs.shape[3], device=self.device))
output = output + self.all_bias + mat_diag_bias
# print(f'mat_diag_bias shape : {mat_diag_bias.shape}')
return output
# def equi_2_to_2(name, input_depth, output_depth, inputs, normalization='inf', normalization_val=1.0):
# '''
# :param name: name of layer
# :param input_depth: D
# :param output_depth: S
# :param inputs: N x D x m x m tensor
# :return: output: N x S x m x m tensor
# '''
# basis_dimension = 15
# # initialization values for variables
# coeffs_values = torch.matmul(torch.randn(size=(input_depth, output_depth, basis_dimension), dtype=torch.float32), torch.sqrt(2. / (input_depth + output_depth).type(torch.FloatTensor)))
# # coeffs_values = tf.multiply(tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32), tf.sqrt(2. / tf.to_float(input_depth + output_depth)))
# #coeffs_values = tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32)
# # define variables
# coeffs = torch.autograd.Variable(coeffs_values, requires_grad=True)
# # coeffs = tf.get_variable('coeffs', initializer=coeffs_values)
# m = inputs.shape[3].type(torch.IntTensor) # extract dimension
# # m = tf.to_int32(tf.shape(inputs)[3]) # extract dimension
# ops_out = ops_2_to_2(inputs, m, normalization=normalization)
# ops_out = torch.stack(ops_out, dim=2)
# # ops_out = tf.stack(ops_out, axis=2)
# output = torch.einsum('dsb,ndbij->nsij', coeffs, ops_out) # N x S x m x m
# # output = tf.einsum('dsb,ndbij->nsij', coeffs, ops_out) # N x S x m x m
# # bias
# diag_bias = torch.autograd.Variable(torch.zeros((1, output_depth, 1, 1), dtype=torch.float32), requires_grad=True)
# # diag_bias = tf.get_variable('diag_bias', initializer=tf.zeros([1, output_depth, 1, 1], dtype=tf.float32))
# all_bias = torch.autograd.Variable(torch.zeros((1, output_depth, 1, 1), dtype=torch.float32), requires_grad=True)
# # all_bias = tf.get_variable('all_bias', initializer=tf.zeros([1, output_depth, 1, 1], dtype=tf.float32))
# mat_diag_bias = torch.matmul(torch.unsqueeze(torch.unsqueeze(torch.eye(inputs.shape[3].type(torch.IntTensor)), 0), 0), diag_bias)
# # mat_diag_bias = tf.multiply(tf.expand_dims(tf.expand_dims(tf.eye(tf.to_int32(tf.shape(inputs)[3])), 0), 0), diag_bias)
# output = output + all_bias + mat_diag_bias
# return output
def equi_2_to_1(name, input_depth, output_depth, inputs, normalization='inf', normalization_val=1.0):
'''
:param name: name of layer
:param input_depth: D
:param output_depth: S
:param inputs: N x D x m x m tensor
:return: output: N x S x m tensor
'''
basis_dimension = 5
# with tf.variable_scope(name, reuse=tf.AUTO_REUSE) as scope:
# initialization values for variables
coeffs_values = torch.matmul(torch.randn(size=(input_depth, output_depth, basis_dimension), dtype=torch.float32), torch.sqrt(2. / (input_depth + output_depth).type(torch.FloatTensor)))
# coeffs_values = tf.multiply(tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32), tf.sqrt(2. / tf.to_float(input_depth + output_depth)))
#coeffs_values = tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32)
# define variables
coeffs = torch.autograd.Variable(coeffs_values, requires_grad=True)
# coeffs = tf.get_variable('coeffs', initializer=coeffs_values)
m = inputs.shape[3].type(torch.IntTensor) # extract dimension
# m = tf.to_int32(tf.shape(inputs)[3]) # extract dimension
ops_out = ops_2_to_1(inputs, m, normalization=normalization)
ops_out = torch.stack(ops_out, dim=2)
# ops_out = tf.stack(ops_out, axis=2) # N x D x B x m
output = torch.einsum('dsb,ndbi->nsi', coeffs, ops_out) # N x S x m x m
# output = tf.einsum('dsb,ndbi->nsi', coeffs, ops_out) # N x S x m
# bias
bias = torch.autograd.Variable(torch.zeros((1, output_depth, 1), dtype=torch.float32), requires_grad=True)
# bias = tf.get_variable('bias', initializer=tf.zeros([1, output_depth, 1], dtype=tf.float32))
output = output + bias
return output
def equi_1_to_2(name, input_depth, output_depth, inputs, normalization='inf', normalization_val=1.0):
'''
:param name: name of layer
:param input_depth: D
:param output_depth: S
:param inputs: N x D x m tensor
:return: output: N x S x m x m tensor
'''
basis_dimension = 5
# with tf.variable_scope(name, reuse=tf.AUTO_REUSE) as scope:
# initialization values for variables
coeffs_values = torch.matmul(torch.randn(size=(input_depth, output_depth, basis_dimension), dtype=torch.float32), torch.sqrt(2. / (input_depth + output_depth).type(torch.FloatTensor)))
# coeffs_values = tf.multiply(tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32), tf.sqrt(2. / tf.to_float(input_depth + output_depth)))
#coeffs_values = tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32)
# define variables
coeffs = torch.autograd.Variable(coeffs_values, requires_grad=True)
# coeffs = tf.get_variable('coeffs', initializer=coeffs_values)
m = inputs.shape[3].type(torch.IntTensor) # extract dimension
# m = tf.to_int32(tf.shape(inputs)[2]) # extract dimension
ops_out = ops_1_to_2(inputs, m, normalization=normalization)
ops_out = torch.stack(ops_out, dim=2)
# ops_out = tf.stack(ops_out, axis=2) # N x D x B x m x m
output = torch.einsum('dsb,ndbij->nsij', coeffs, ops_out) # N x S x m x m
# output = tf.einsum('dsb,ndbij->nsij', coeffs, ops_out) # N x S x m x m
# bias
bias = torch.autograd.Variable(torch.zeros((1, output_depth, 1, 1), dtype=torch.float32), requires_grad=True)
# bias = tf.get_variable('bias', initializer=tf.zeros([1, output_depth, 1, 1], dtype=tf.float32))
output = output + bias
return output
def equi_1_to_1(name, input_depth, output_depth, inputs, normalization='inf', normalization_val=1.0):
'''
:param name: name of layer
:param input_depth: D
:param output_depth: S
:param inputs: N x D x m tensor
:return: output: N x S x m tensor
'''
basis_dimension = 2
# with tf.variable_scope(name, reuse=tf.AUTO_REUSE) as scope:
# initialization values for variables
coeffs_values = torch.matmul(torch.randn(size=(input_depth, output_depth, basis_dimension), dtype=torch.float32), torch.sqrt(2. / (input_depth + output_depth).type(torch.FloatTensor)))
# coeffs_values = tf.multiply(tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32), tf.sqrt(2. / tf.to_float(input_depth + output_depth)))
#coeffs_values = tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32)
# define variables
coeffs = torch.autograd.Variable(coeffs_values, requires_grad=True)
# coeffs = tf.get_variable('coeffs', initializer=coeffs_values)
m = inputs.shape[3].type(torch.IntTensor) # extract dimension
# m = tf.to_int32(tf.shape(inputs)[2]) # extract dimension
ops_out = ops_1_to_1(inputs, m, normalization=normalization)
ops_out = torch.stack(ops_out, dim=2)
# ops_out = tf.stack(ops_out, axis=2) # N x D x B x m
output = torch.einsum('dsb,ndbi->nsi', coeffs, ops_out) # N x S x m x m
# output = tf.einsum('dsb,ndbi->nsi', coeffs, ops_out) # N x S x m
# bias
bias = torch.autograd.Variable(torch.zeros((1, output_depth, 1), dtype=torch.float32), requires_grad=True)
# bias = tf.get_variable('bias', initializer=tf.zeros([1, output_depth, 1], dtype=tf.float32))
output = output + bias
return output
def equi_basic(name, input_depth, output_depth, inputs):
'''
:param name: name of layer
:param input_depth: D
:param output_depth: S
:param inputs: N x D x m x m tensor
:return: output: N x S x m x m tensor
'''
basis_dimension = 4
# with tf.variable_scope(name, reuse=tf.AUTO_REUSE) as scope:
# initialization values for variables
coeffs_values = torch.matmul(torch.randn(size=(input_depth, output_depth, basis_dimension), dtype=torch.float32), torch.sqrt(2. / (input_depth + output_depth).type(torch.FloatTensor)))
# coeffs_values = tf.multiply(tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32), tf.sqrt(2. / tf.to_float(input_depth + output_depth)))
#coeffs_values = tf.random_normal([input_depth, output_depth, basis_dimension], dtype=tf.float32)
# define variables
coeffs = torch.autograd.Variable(coeffs_values, requires_grad=True)
# coeffs = tf.get_variable('coeffs', initializer=coeffs_values)
m = inputs.shape[3].type(torch.IntTensor) # extract dimension
# m = tf.to_int32(tf.shape(inputs)[3]) # extract dimension
float_dim = m.type(torch.FloatTensor)
# float_dim = tf.to_float(m)
# apply ops
ops_out = []
# w1 - identity
ops_out.append(inputs)
# w2 - sum cols
sum_of_cols = torch.divide(torch.sum(inputs, dim=2), float_dim) # N x D x m
# sum_of_cols = tf.divide(tf.reduce_sum(inputs, axis=2), float_dim) # N x D x m
ops_out.append(torch.unsqueeze(sum_of_cols, dim=2).repeat(1, 1, m, 1)) # N x D x m x m
# ops_out.append(tf.tile(tf.expand_dims(sum_of_cols, axis=2), [1, 1, m, 1])) # N x D x m x m
# w3 - sum rows
sum_of_rows = torch.divide(torch.sum(inputs, dim=3), float_dim) # N x D x m
# sum_of_rows = tf.divide(tf.reduce_sum(inputs, axis=3), float_dim) # N x D x m
ops_out.append(torch.unsqueeze(sum_of_rows, dim=3).repeat(1, 1, 1, m)) # N x D x m x m
# ops_out.append(tf.tile(tf.expand_dims(sum_of_rows, axis=3), [1, 1, 1, m])) # N x D x m x m
# w4 - sum all
sum_all = torch.divide(torch.sum(sum_of_rows, dim=2), torch.square(float_dim)) # N x D
# sum_all = tf.divide(tf.reduce_sum(sum_of_rows, axis=2), tf.square(float_dim)) # N x D
ops_out.append(torch.unsqueeze(torch.unsqueeze(sum_all, dim=2), dim=3).repeat(1, 1, m, m)) # N x D x m x m
# ops_out.append(tf.tile(tf.expand_dims(tf.expand_dims(sum_all, axis=2), axis=3), [1, 1, m, m])) # N x D x m x m
ops_out = torch.stack(ops_out, dim=2)
# ops_out = tf.stack(ops_out, axis=2)
output = torch.einsum('dsb,ndbij->nsij', coeffs, ops_out) # N x S x m x m
# output = tf.einsum('dsb,ndbij->nsij', coeffs, ops_out) # N x S x m x m
# bias
bias = torch.autograd.Variable(torch.zeros((1, output_depth, 1, 1), dtype=torch.float32), requires_grad=True)
# bias = tf.get_variable('bias', initializer=tf.zeros([1, output_depth, 1, 1], dtype=tf.float32))
output = output + bias
return output
# def ops_2_to_2(inputs, dim, normalization='inf', normalization_val=1.0): # N x D x m x m
# diag_part = torch.diagonal(inputs) # N x D x m
# sum_diag_part = torch.sum(diag_part, dim=2, keepdim=True) # N x D x 1
# sum_of_rows = torch.sum(inputs, dim=3) # N x D x m
# sum_of_cols = torch.sum(inputs, dim=2) # N x D x m
# sum_all = torch.sum(sum_of_rows, dim=2) # N x D
# # op1 - (1234) - extract diag
# op1 = torch.diagonal(diag_part) # N x D x m x m
# # op2 - (1234) + (12)(34) - place sum of diag on diag
# op2 = torch.diagonal(sum_diag_part.repeat(1, 1, dim)) # N x D x m x m
# # op3 - (1234) + (123)(4) - place sum of row i on diag ii
# op3 = torch.diagonal(sum_of_rows) # N x D x m x m
# # op4 - (1234) + (124)(3) - place sum of col i on diag ii
# op4 = torch.diagonal(sum_of_cols) # N x D x m x m
# # op5 - (1234) + (124)(3) + (123)(4) + (12)(34) + (12)(3)(4) - place sum of all entries on diag
# op5 = torch.diagonal(torch.unsqueeze(sum_all, dim=2).repeat(1, 1, dim)) # N x D x m x m
# # op6 - (14)(23) + (13)(24) + (24)(1)(3) + (124)(3) + (1234) - place sum of col i on row i
# op6 = torch.unsqueeze(sum_of_cols, dim=3).repeat(1, 1, 1, dim) # N x D x m x m
# # op7 - (14)(23) + (23)(1)(4) + (234)(1) + (123)(4) + (1234) - place sum of row i on row i
# op7 = torch.unsqueeze(sum_of_rows, dim=3).repeat(1, 1, 1, dim) # N x D x m x m
# # op8 - (14)(2)(3) + (134)(2) + (14)(23) + (124)(3) + (1234) - place sum of col i on col i
# op8 = torch.unsqueeze(sum_of_cols, dim=2).repeat(1, 1, dim, 1) # N x D x m x m
# # op9 - (13)(24) + (13)(2)(4) + (134)(2) + (123)(4) + (1234) - place sum of row i on col i
# op9 = torch.unsqueeze(sum_of_rows, dim=2).repeat(1, 1, dim, 1) # N x D x m x m
# # op10 - (1234) + (14)(23) - identity
# op10 = inputs # N x D x m x m
# # op11 - (1234) + (13)(24) - transpose
# op11 = inputs.permute(0, 1, 3, 2) # N x D x m x m
# # op12 - (1234) + (234)(1) - place ii element in row i
# op12 = torch.unsqueeze(diag_part, dim=3).repeat(1, 1, 1, dim) # N x D x m x m
# # op13 - (1234) + (134)(2) - place ii element in col i
# op13 = torch.unsqueeze(diag_part, dim=2).repeat(1, 1, dim, 1) # N x D x m x m
# # op14 - (34)(1)(2) + (234)(1) + (134)(2) + (1234) + (12)(34) - place sum of diag in all entries
# op14 = torch.unsqueeze(sum_diag_part, dim=3).repeat(1, 1, dim, dim) # N x D x m x m
# # op15 - sum of all ops - place sum of all entries in all entries
# op15 = torch.unsqueeze(torch.unsqueeze(sum_all, dim=2), dim=3).repeat(1, 1, dim, dim) # N x D x m x m
# if normalization is not None:
# float_dim = dim.type(torch.FloatTensor)
# if normalization is 'inf':
# op2 = torch.div(op2, float_dim)
# op3 = torch.div(op3, float_dim)
# op4 = torch.div(op4, float_dim)
# op5 = torch.div(op5, float_dim**2)
# op6 = torch.div(op6, float_dim)
# op7 = torch.div(op7, float_dim)
# op8 = torch.div(op8, float_dim)
# op9 = torch.div(op9, float_dim)
# op14 = torch.div(op14, float_dim)
# op15 = torch.div(op15, float_dim**2)
# return [op1, op2, op3, op4, op5, op6, op7, op8, op9, op10, op11, op12, op13, op14, op15]
def ops_2_to_1(inputs, dim, normalization='inf', normalization_val=1.0): # N x D x m x m
diag_part = tf.matrix_diag_part(inputs) # N x D x m
sum_diag_part = tf.reduce_sum(diag_part, axis=2, keepdims=True) # N x D x 1
sum_of_rows = tf.reduce_sum(inputs, axis=3) # N x D x m
sum_of_cols = tf.reduce_sum(inputs, axis=2) # N x D x m
sum_all = tf.reduce_sum(inputs, axis=(2, 3)) # N x D
# op1 - (123) - extract diag
op1 = diag_part # N x D x m
# op2 - (123) + (12)(3) - tile sum of diag part
op2 = tf.tile(sum_diag_part, [1, 1, dim]) # N x D x m
# op3 - (123) + (13)(2) - place sum of row i in element i
op3 = sum_of_rows # N x D x m
# op4 - (123) + (23)(1) - place sum of col i in element i
op4 = sum_of_cols # N x D x m
# op5 - (1)(2)(3) + (123) + (12)(3) + (13)(2) + (23)(1) - tile sum of all entries
op5 = tf.tile(tf.expand_dims(sum_all, axis=2), [1, 1, dim]) # N x D x m
if normalization is not None:
float_dim = tf.to_float(dim)
if normalization is 'inf':
op2 = tf.divide(op2, float_dim)
op3 = tf.divide(op3, float_dim)
op4 = tf.divide(op4, float_dim)
op5 = tf.divide(op5, float_dim ** 2)
return [op1, op2, op3, op4, op5]
def ops_1_to_2(inputs, dim, normalization='inf', normalization_val=1.0): # N x D x m
sum_all = tf.reduce_sum(inputs, axis=2, keepdims=True) # N x D x 1
# op1 - (123) - place on diag
op1 = tf.matrix_diag(inputs) # N x D x m x m
# op2 - (123) + (12)(3) - tile sum on diag
op2 = tf.matrix_diag(tf.tile(sum_all, [1, 1, dim])) # N x D x m x m
# op3 - (123) + (13)(2) - tile element i in row i
op3 = tf.tile(tf.expand_dims(inputs, axis=2), [1, 1, dim, 1]) # N x D x m x m
# op4 - (123) + (23)(1) - tile element i in col i
op4 = tf.tile(tf.expand_dims(inputs, axis=3), [1, 1, 1, dim]) # N x D x m x m
# op5 - (1)(2)(3) + (123) + (12)(3) + (13)(2) + (23)(1) - tile sum of all entries
op5 = tf.tile(tf.expand_dims(sum_all, axis=3), [1, 1, dim, dim]) # N x D x m x m
if normalization is not None:
float_dim = tf.to_float(dim)
if normalization is 'inf':
op2 = tf.divide(op2, float_dim)
op5 = tf.divide(op5, float_dim)
return [op1, op2, op3, op4, op5]
def ops_1_to_1(inputs, dim, normalization='inf', normalization_val=1.0): # N x D x m
sum_all = tf.reduce_sum(inputs, axis=2, keepdims=True) # N x D x 1
# op1 - (12) - identity
op1 = inputs # N x D x m
# op2 - (1)(2) - tile sum of all
op2 = tf.tile(sum_all, [1, 1, dim]) # N x D x m
if normalization is not None:
float_dim = tf.to_float(dim)
if normalization is 'inf':
op2 = tf.divide(op2, float_dim)
return [op1, op2]
| 48.644068
| 204
| 0.627134
| 3,908
| 22,960
| 3.544012
| 0.045036
| 0.021661
| 0.018195
| 0.022816
| 0.893141
| 0.866498
| 0.829892
| 0.815307
| 0.803682
| 0.792563
| 0
| 0.058972
| 0.226002
| 22,960
| 471
| 205
| 48.747346
| 0.720387
| 0.572082
| 0
| 0.364865
| 0
| 0
| 0.011404
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067568
| false
| 0
| 0.013514
| 0
| 0.148649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8fcf7fbd8c2fc6e64aa225d23c5bdbdc75aeaf42
| 44
|
py
|
Python
|
je_editor/ui/ui_utils/keyword/__init__.py
|
JE-Chen/je_editor
|
2f18dedb6f0eb27c38668dc53f520739c8d5c6c6
|
[
"MIT"
] | 1
|
2021-12-10T14:57:15.000Z
|
2021-12-10T14:57:15.000Z
|
je_editor/ui/ui_utils/keyword/__init__.py
|
JE-Chen/je_editor
|
2f18dedb6f0eb27c38668dc53f520739c8d5c6c6
|
[
"MIT"
] | null | null | null |
je_editor/ui/ui_utils/keyword/__init__.py
|
JE-Chen/je_editor
|
2f18dedb6f0eb27c38668dc53f520739c8d5c6c6
|
[
"MIT"
] | null | null | null |
from je_editor.ui.ui_utils.keyword import *
| 22
| 43
| 0.818182
| 8
| 44
| 4.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 44
| 1
| 44
| 44
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
89119cee789369ffa347bd6d8866c60bf754b098
| 11,650
|
py
|
Python
|
maoyan/maoyan/spiders/weibo.py
|
hellending/-requests-selenium-
|
3dca09db77eace30b44174c51846c6e85e68388a
|
[
"Apache-2.0"
] | null | null | null |
maoyan/maoyan/spiders/weibo.py
|
hellending/-requests-selenium-
|
3dca09db77eace30b44174c51846c6e85e68388a
|
[
"Apache-2.0"
] | null | null | null |
maoyan/maoyan/spiders/weibo.py
|
hellending/-requests-selenium-
|
3dca09db77eace30b44174c51846c6e85e68388a
|
[
"Apache-2.0"
] | null | null | null |
from selenium import webdriver
import time,re,requests,csv,os,socket
from lxml import etree
import os
import sys
from selenium.webdriver.remote.webelement import WebElement
socket.setdefaulttimeout(7)
os.environ['REQUESTS_CA_BUNDLE'] = os.path.join(os.path.dirname(sys.argv[0]), 'cacert.pem')
from selenium.webdriver.common.action_chains import ActionChains
options = webdriver.ChromeOptions()
browser = webdriver.Chrome()
browser.maximize_window()
browser.get('https://weibo.com/u/6718757082/home?wvr=5')
print('您将有1分钟时间登陆......')
# time.sleep(7)
# browser.find_element_by_xpath('//*[@id="loginname"]').send_keys('18861560575')
# browser.find_element_by_xpath('//*[@id="pl_login_form"]/div/div[3in]/div[2]/div/put').send_keys('1364350280wsq')
time.sleep(60)
f = open(r'.\1.txt','r',encoding='utf-8-sig')
s = f.readlines()[0]
if os.path.exists(r'.\weibo_data_财经.csv'):
os.remove(r'.\weibo_data_财经.csv')
f = open(r'.\weibo_data_财经.csv', 'w', encoding='utf-8-sig')
csv_writer = csv.writer(f)
csv_writer.writerow(['微博名', '性别', '所在地', '粉丝数', '联系方式', '简介'])
f.flush()
browser.find_element_by_xpath('//*[@id="plc_top"]/div/div/div[2]/input').send_keys(s)
time.sleep(1)
browser.find_element_by_xpath('//*[@id="plc_top"]/div/div/div[2]/a').click()
browser.find_element_by_xpath('//*[@id="pl_feedtop_top"]/div[3]/a').click()
time.sleep(1)
list_history = []
# browser.find_element_by_xpath('/html/body/div[8]/div[2]/div/div[1.txt]/div/dl[2]/dd/input').clear()
# browser.find_element_by_xpath('/html/body/div[8]/div[2]/div/div[1.txt]/div/dl[2]/dd/input').send_keys(s)
browser.find_element_by_xpath('//*[@id="radio05"]').click()
# move = browser.find_element_by_xpath('//*[@id="pl_user_filtertab"]/div[1.txt]/ul/li[2]/span')
# ActionChains(browser).move_to_element(move).perform()
browser.find_element_by_xpath('/html/body/div[7]/div[2]/div/div[2]/a[1]').click()
browser.find_element_by_xpath('/html/body/div[1]/div[2]/ul/li[2]/a').click()
proxies = {'http':'http://153.99.22.113'}
m = 1
while m<=50:
html = browser.execute_script('return document.documentElement.outerHTML')
parse_html = etree.HTML(html)
people_src_list = parse_html.xpath('//div[@class="avator"]/a/@href')
print(people_src_list)
cookies = browser.get_cookies()
url = "http:"
session = requests.session()
cookieJar = requests.cookies.RequestsCookieJar()
for i in cookies:
cookieJar.set(i["name"],i["value"])
session.cookies.update(cookieJar)
for i in people_src_list:
try:
url1 = str(url)+str(i)+"?ishot=1.txt"
print("url1: ",url1)
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36'}
html = session.get(url=url1,headers=headers,timeout=(5,5)).text
socket.setdefaulttimeout(7)
pattern = re.compile(r"oid']='(.*?)';")
src = pattern.findall(html,re.S)[0]
pattern = re.compile(r'class=\\"username\\">(.*?)<\\/h1>')
result1 = pattern.findall(html,re.S)[0]
print(result1)
#已经收录过的用户不会再被收录
if result1 in list_history:
continue
try:
k = 2
flag = 0
while True:
if(k>=20):
flag = 1
break
pattern = re.compile(r'<strong class=\\"W_f1{}\\">(.*?)<\\/strong>'.format(k))
list = pattern.findall(html, re.S)
if len(list)>=2:
result2 = list[1]
print(result2)
break
k+=2
if flag==1 or int(result2)<1000:
continue
except:
pass
pattern_sex = re.compile(r'\\"icon_bed\\"><a><i class=\\"W_icon icon_pf_(.*?)\\"><\\/i>')
try:
text_sex = pattern_sex.findall(html,re.S)[0]
except:
print('no sex')
continue
if text_sex=='male':
result_sex = '男'
else:
result_sex = '女'
url2 = str(url)+"//weibo.com/"+str(src)+"/about"
pattern1 = re.compile(r"page_id']='(.*?)';")
src1 = pattern1.findall(html,re.S)[0]
html = session.get(url2,timeout=(5,5)).text
socket.setdefaulttimeout(7)
pattern = re.compile('<title>(.*?)</title>')
t = pattern.findall(html,re.S)[0]
if(t=='404错误'):
url2 = str(url)+"//weibo.com/p/"+str(src1)+"/info?mod=pedit_more"
html = session.get(url2,timeout=(5,5)).text
socket.setdefaulttimeout(7)
print("url2: ",url2)
# print(html)
# browser.find_element_by_xpath('//*[@id="pl_user_feedList"]/div[1.txt]/div[1.txt]/a').click()
# windows = browser.window_handles
# time.sleep(5)
# browser.switch_to.window(windows[-1.txt])
# js = 'var q=document.documentElement.scrollTop={}'.format(500)
# browser.execute_script(js)
# html = browser.execute_script('return document.documentElement.outerHTML')
# print(html)
# browser.find_element_by_css_selector("[class='WB_cardmore S_txt1 S_line1 clearfix']").click() #还要泛化
# time.sleep(2)
# js = 'var q=document.documentElement.scrollTop={}'.format(500)
# browser.execute_script(js)
# html = browser.execute_script('return document.documentElement.outerHTML')
#需要一个数据清洗函数和可行的正则表达式
# print(html)
pattern = re.compile(r'<span class=\\"pt_title S_txt2\\">(.*?)<\\/span>.*?<span class=\\"pt_detail\\">(.*?)<\\/span>')
ss = pattern.findall(html,re.S)
result3 = ''
result_location = ''
result_intro = ''
for z in range(len(ss)):
if ('QQ' in ss[z][0]) or ('电话' in ss[z][0]) or ('微信' in ss[z][0]) or ('邮箱' in ss[z][0]):
result3+=str(ss[z][0])+str(ss[z][1])+" "
elif '所在地' in ss[z][0]:
result_location+=str(ss[z][0])+str(ss[z][1])
elif '简介' in ss[z][0]:
result_intro += str(ss[z][0]) + str(ss[z][1])
if result3=='':
result3 = '无'
if result_location=='':
result_location = '无'
if result_intro=='':
result_intro = '无'
print(result3)
# result_intro = ''
# pattern_intro = re.compile(r'<p class=\\"p_txt\\">(.*?)<\\/p>')
# try:
# result_intro = pattern_intro.findall(html,re.S)[0]
# except:
# result_intro = '无'
csv_writer.writerow([result1,result_sex,result_location,result2,result3,result_intro])
f.flush()
list_history.append(result1)
# time.sleep(1.txt)
except:
continue
browser.find_element_by_class_name('next').click()
m+=1
# time.sleep(1)
# if os.path.exists(r'.\weibo_data_金融.csv'):
# os.remove(r'.\weibo_data_金融.csv')
# f = open(r'.\weibo_data_金融.csv', 'w', encoding='utf-8-sig')
# csv_writer = csv.writer(f)
# csv_writer.writerow(['微博名', '性别', '所在地', '粉丝数', '联系方式', '简介'])
# f.flush()
# browser.find_element_by_xpath('//*[@id="pl_feedtop_top"]/div[3]/a').click()
# browser.find_element_by_xpath('/html/body/div[8]/div[2]/div/div[1.txt]/div/dl[2]/dd/input').clear()
# browser.find_element_by_xpath('/html/body/div[8]/div[2]/div/div[1.txt]/div/dl[2]/dd/input').send_keys('金融')
# browser.find_element_by_xpath('/html/body/div[8]/div[2]/div/div[2]/a[1.txt]').click()
# time.sleep(2)
# proxies = {'http':'http://153.99.22.113'}
# m = 1.txt
# while m<=50:
# html = browser.execute_script('return document.documentElement.outerHTML')
# parse_html = etree.HTML(html)
# people_src_list = parse_html.xpath('//div[@class="avator"]/a/@href')
# print(people_src_list)
# cookies = browser.get_cookies()
# url = "http:"
# session = requests.session()
# cookieJar = requests.cookies.RequestsCookieJar()
# for i in cookies:
# cookieJar.set(i["name"],i["value"])
# session.cookies.update(cookieJar)
# for i in people_src_list:
# try:
# url1 = str(url)+str(i)+"?ishot=1.txt"
# print("url1: ",url1)
# headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36'}
# html = session.get(url=url1,headers=headers,timeout=(5,5)).text
# socket.setdefaulttimeout(7)
# pattern = re.compile(r"oid']='(.*?)';")
# src = pattern.findall(html,re.S)[0]
# pattern = re.compile(r'class=\\"username\\">(.*?)<\\/h1>')
# result1 = pattern.findall(html,re.S)[0]
# print(result1)
# #已经收录过的用户不会再被收录
# if result1 in list_history:
# continue
# try:
# k = 2
# flag = 0
# while True:
# if(k>=20):
# flag = 1.txt
# break
# pattern = re.compile(r'<strong class=\\"W_f1{}\\">(.*?)<\\/strong>'.format(k))
# list = pattern.findall(html, re.S)
# if len(list)>=2:
# result2 = list[1.txt]
# print(result2)
# break
# k+=2
# if flag==1.txt or int(result2)<1000 or int(result2)>10000:
# continue
# except:
# pass
# pattern_sex = re.compile(r'\\"icon_bed\\"><a><i class=\\"W_icon icon_pf_(.*?)\\"><\\/i>')
# try:
# text_sex = pattern_sex.findall(html,re.S)[0]
# except:
# print('no sex')
# continue
# if text_sex=='male':
# result_sex = '男'
# else:
# result_sex = '女'
# url2 = str(url)+"//weibo.com/"+str(src)+"/about"
# pattern1 = re.compile(r"page_id']='(.*?)';")
# src1 = pattern1.findall(html,re.S)[0]
# html = session.get(url2,timeout=(5,5)).text
# socket.setdefaulttimeout(7)
# pattern = re.compile('<title>(.*?)</title>')
# t = pattern.findall(html,re.S)[0]
# if(t=='404错误'):
# url2 = str(url)+"//weibo.com/p/"+str(src1)+"/info?mod=pedit_more"
# html = session.get(url2,timeout=(5,5)).text
# socket.setdefaulttimeout(7)
# print("url2: ",url2)
# pattern = re.compile(r'<span class=\\"pt_title S_txt2\\">(.*?)<\\/span>.*?<span class=\\"pt_detail\\">(.*?)<\\/span>')
# ss = pattern.findall(html,re.S)
# result3 = ''
# result_location = ''
# result_intro = ''
# for z in range(len(ss)):
# if ('QQ' in ss[z][0]) or ('电话' in ss[z][0]) or ('微信' in ss[z][0]) or ('邮箱' in ss[z][0]):
# result3+=str(ss[z][0])+str(ss[z][1.txt])+" "
# elif '所在地' in ss[z][0]:
# result_location+=str(ss[z][0])+str(ss[z][1.txt])
# elif '简介' in ss[z][0]:
# result_intro+=str(ss[z][0])+str(ss[z][1.txt])
#
# if result3=='':
# result3 = '无'
# if result_location=='':
# result_location = '无'
# if result_intro=='':
# result_intro = '无'
# print(result3)
# # result_intro = ''
# # pattern_intro = re.compile(r'<p class=\\"p_txt\\">(.*?)<\\/p>')
# # try:
# # result_intro = pattern_intro.findall(html,re.S)[0]
# # except:
# # result_intro = '无'
# csv_writer.writerow([result1,result_sex,result_location,result2,result3,result_intro])
# f.flush()
# list_history.append(result1)
# time.sleep(1.txt)
# except:
# continue
# browser.find_element_by_class_name('next').click()
# m+=1.txt
# time.sleep(1.txt)
print('数据收录完毕。。。。。')
| 42.210145
| 152
| 0.560944
| 1,571
| 11,650
| 4.031827
| 0.16359
| 0.011367
| 0.053994
| 0.059994
| 0.840227
| 0.838333
| 0.811494
| 0.795864
| 0.77155
| 0.756078
| 0
| 0.038839
| 0.239742
| 11,650
| 276
| 153
| 42.210145
| 0.676301
| 0.525837
| 0
| 0.173228
| 0
| 0.015748
| 0.191283
| 0.075619
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.007874
| 0.055118
| 0
| 0.055118
| 0.070866
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
64fd2f9d5b5aa14d75cca139bbf71ba41ccdcdfc
| 39
|
py
|
Python
|
appviews/test.py
|
johnderm/remote
|
54895fe6e6877407fd0b076d37c09f8b6aacfdfa
|
[
"MIT"
] | null | null | null |
appviews/test.py
|
johnderm/remote
|
54895fe6e6877407fd0b076d37c09f8b6aacfdfa
|
[
"MIT"
] | null | null | null |
appviews/test.py
|
johnderm/remote
|
54895fe6e6877407fd0b076d37c09f8b6aacfdfa
|
[
"MIT"
] | null | null | null |
dict = {'1':2, '2':3}
print(dict['1'])
| 19.5
| 22
| 0.461538
| 8
| 39
| 2.25
| 0.625
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 0.128205
| 39
| 2
| 23
| 19.5
| 0.382353
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
8f11aa0d60465bb3d29c240abcf7387f57e4bdb9
| 30
|
py
|
Python
|
bin/secret_parser.py
|
fasrc/hubzero-docker
|
911477e328156ddfeb9ab02cbb030276ce8b269b
|
[
"MIT"
] | null | null | null |
bin/secret_parser.py
|
fasrc/hubzero-docker
|
911477e328156ddfeb9ab02cbb030276ce8b269b
|
[
"MIT"
] | null | null | null |
bin/secret_parser.py
|
fasrc/hubzero-docker
|
911477e328156ddfeb9ab02cbb030276ce8b269b
|
[
"MIT"
] | null | null | null |
import os
import configparser
| 10
| 19
| 0.866667
| 4
| 30
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 2
| 20
| 15
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8f4b62f16812fe1ec8afc5c339d540a6d77a1dae
| 108
|
py
|
Python
|
terrascript/kubernetes/__init__.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/kubernetes/__init__.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/kubernetes/__init__.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
# terrascript/kubernetes/__init__.py
import terrascript
class kubernetes(terrascript.Provider):
pass
| 13.5
| 39
| 0.796296
| 11
| 108
| 7.454545
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 108
| 7
| 40
| 15.428571
| 0.87234
| 0.314815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
8f6102b2721389aad1346e24e8223c456fb1aeb2
| 33,302
|
py
|
Python
|
test/test_filters.py
|
ecometrica/grandfatherson
|
b166e4e44887960c3066ebd28eecadfae19561e1
|
[
"BSD-3-Clause"
] | 15
|
2015-05-11T11:08:52.000Z
|
2021-04-16T04:03:03.000Z
|
test/test_filters.py
|
ecometrica/grandfatherson
|
b166e4e44887960c3066ebd28eecadfae19561e1
|
[
"BSD-3-Clause"
] | 3
|
2016-04-18T01:09:12.000Z
|
2016-10-18T15:32:30.000Z
|
test/test_filters.py
|
ecometrica/grandfatherson
|
b166e4e44887960c3066ebd28eecadfae19561e1
|
[
"BSD-3-Clause"
] | 4
|
2016-08-05T17:19:06.000Z
|
2020-11-25T05:46:49.000Z
|
from datetime import datetime, date
import unittest
from grandfatherson import (FRIDAY, SATURDAY, SUNDAY)
from grandfatherson.filters import (Seconds, Minutes, Hours, Days, Weeks,
Months, Years, UTC)
def utcdatetime(*args):
return datetime(*args, tzinfo=UTC())
class TestSeconds(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, 0, 0, 1, 1)
self.datetimes = [
datetime(2000, 1, 1, 0, 0, 1, 0),
datetime(2000, 1, 1, 0, 0, 0, 1),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(1999, 12, 31, 23, 59, 57, 0),
]
def test_mask(self):
self.assertEqual(
Seconds.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)),
datetime(1999, 12, 31, 23, 59, 59, 0)
)
def test_future(self):
datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia
self.assertEqual(Seconds.filter(datetimes, number=0, now=self.now),
set(datetimes))
self.assertEqual(Seconds.filter(datetimes, number=1, now=self.now),
set(datetimes))
def test_invalid_number(self):
self.assertRaises(ValueError,
Seconds.filter, [], number=-1, now=self.now)
self.assertRaises(ValueError,
Seconds.filter, [], number=0.1, now=self.now)
self.assertRaises(ValueError,
Seconds.filter, [], number='1', now=self.now)
def test_no_input(self):
self.assertEqual(Seconds.filter([], number=1, now=self.now),
set())
def test_no_results(self):
self.assertEqual(Seconds.filter([self.now], number=0, now=self.now),
set())
self.assertEqual(Seconds.filter(self.datetimes, number=0,
now=self.now),
set())
def test_current(self):
self.assertEqual(Seconds.filter(self.datetimes, number=1,
now=self.now),
set([datetime(2000, 1, 1, 0, 0, 1, 0)]))
def test_duplicates(self):
# Ensure we get the oldest per-second datetime when there are
# duplicates: i.e. not datetime(2000, 1, 1, 0, 0, 0, 1)
self.assertEqual(Seconds.filter(self.datetimes, number=2,
now=self.now),
set([datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 1, 0)]))
def test_microseconds(self):
self.assertEqual(Seconds.filter(self.datetimes, number=3,
now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 1, 0)]))
def test_before_start(self):
# datetime(1999, 12, 31, 23, 59, 57, 0) is too old to show up
# in the results
self.assertEqual(Seconds.filter(self.datetimes, number=4,
now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 1, 0)]))
def test_all_input(self):
self.assertEqual(Seconds.filter(self.datetimes, number=5,
now=self.now),
set([datetime(1999, 12, 31, 23, 59, 57, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 1, 0)]))
self.assertEqual(Seconds.filter(self.datetimes, number=6,
now=self.now),
set([datetime(1999, 12, 31, 23, 59, 57, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 1, 0)]))
def test_with_tzinfo(self):
utcnow = utcdatetime(2000, 1, 1, 0, 0, 1, 1)
tzinfo_datetimes = [
utcdatetime(2000, 1, 1, 0, 0, 1, 0),
utcdatetime(2000, 1, 1, 0, 0, 0, 1),
utcdatetime(2000, 1, 1, 0, 0, 0, 0),
utcdatetime(1999, 12, 31, 23, 59, 59, 999999),
utcdatetime(1999, 12, 31, 23, 59, 57, 0),
]
self.assertEqual(Seconds.filter(tzinfo_datetimes, number=5,
now=utcnow),
set([utcdatetime(1999, 12, 31, 23, 59, 57, 0),
utcdatetime(1999, 12, 31, 23, 59, 59, 999999),
utcdatetime(2000, 1, 1, 0, 0, 0, 0),
utcdatetime(2000, 1, 1, 0, 0, 1, 0)]))
self.assertEqual(Seconds.filter(tzinfo_datetimes, number=6,
now=utcnow),
set([utcdatetime(1999, 12, 31, 23, 59, 57, 0),
utcdatetime(1999, 12, 31, 23, 59, 59, 999999),
utcdatetime(2000, 1, 1, 0, 0, 0, 0),
utcdatetime(2000, 1, 1, 0, 0, 1, 0)]))
class TestMinutes(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, 0, 1, 1, 1)
self.datetimes = [
datetime(2000, 1, 1, 0, 1, 0, 0),
datetime(2000, 1, 1, 0, 0, 1, 0),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(1999, 12, 31, 23, 57, 0, 0),
]
def test_mask(self):
self.assertEqual(
Minutes.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)),
datetime(1999, 12, 31, 23, 59, 0, 0)
)
def test_future(self):
datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia
self.assertEqual(Minutes.filter(datetimes, number=0, now=self.now),
set(datetimes))
self.assertEqual(Minutes.filter(datetimes, number=1, now=self.now),
set(datetimes))
def test_invalid_number(self):
self.assertRaises(ValueError,
Minutes.filter, [], number=-1, now=self.now)
self.assertRaises(ValueError,
Minutes.filter, [], number=0.1, now=self.now)
self.assertRaises(ValueError,
Minutes.filter, [], number='1', now=self.now)
def test_no_input(self):
self.assertEqual(Minutes.filter([], number=1, now=self.now),
set())
def test_no_results(self):
self.assertEqual(Minutes.filter([self.now], number=0, now=self.now),
set())
self.assertEqual(Minutes.filter(self.datetimes, number=0,
now=self.now),
set())
def test_current(self):
self.assertEqual(Minutes.filter(self.datetimes, number=1,
now=self.now),
set([datetime(2000, 1, 1, 0, 1, 0, 0)]))
def test_duplicates(self):
# Ensure we get the oldest per-minute datetime when there are
# duplicates: i.e. not datetime(2000, 1, 1, 0, 0, 1, 0)
self.assertEqual(Minutes.filter(self.datetimes, number=2,
now=self.now),
set([datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 1, 0, 0)]))
def test_microseconds(self):
self.assertEqual(Minutes.filter(self.datetimes, number=3,
now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 1, 0, 0)]))
def test_before_start(self):
# datetime(1999, 12, 31, 23, 57, 0, 0) is too old to show up
# in the results
self.assertEqual(Minutes.filter(self.datetimes, number=4,
now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 1, 0, 0)]))
def test_all_input(self):
self.assertEqual(Minutes.filter(self.datetimes, number=5,
now=self.now),
set([datetime(1999, 12, 31, 23, 57, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 1, 0, 0)]))
self.assertEqual(Minutes.filter(self.datetimes, number=6,
now=self.now),
set([datetime(1999, 12, 31, 23, 57, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 1, 0, 0)]))
class TestHours(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, 1, 1, 1, 1)
self.datetimes = [
datetime(2000, 1, 1, 1, 0, 0, 0),
datetime(2000, 1, 1, 0, 1, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(1999, 12, 31, 21, 0, 0, 0),
]
def test_mask(self):
self.assertEqual(
Hours.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)),
datetime(1999, 12, 31, 23, 0, 0, 0)
)
def test_future(self):
datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia
self.assertEqual(Hours.filter(datetimes, number=0, now=self.now),
set(datetimes))
self.assertEqual(Hours.filter(datetimes, number=1, now=self.now),
set(datetimes))
def test_invalid_number(self):
self.assertRaises(ValueError,
Hours.filter, [], number=-1, now=self.now)
self.assertRaises(ValueError,
Hours.filter, [], number=0.1, now=self.now)
self.assertRaises(ValueError,
Hours.filter, [], number='1', now=self.now)
def test_no_input(self):
self.assertEqual(Hours.filter([], number=1, now=self.now),
set())
def test_no_results(self):
self.assertEqual(Hours.filter([self.now], number=0, now=self.now),
set())
self.assertEqual(Hours.filter(self.datetimes, number=0, now=self.now),
set())
def test_current(self):
self.assertEqual(Hours.filter(self.datetimes, number=1, now=self.now),
set([datetime(2000, 1, 1, 1, 0, 0, 0)]))
def test_duplicates(self):
# Ensure we get the oldest per-hour datetime when there are
# duplicates: i.e. not datetime(2000, 1, 1, 0, 1, 0, 0)
self.assertEqual(Hours.filter(self.datetimes, number=2,
now=self.now),
set([datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 1, 0, 0, 0)]))
def test_microseconds(self):
self.assertEqual(Hours.filter(self.datetimes, number=3, now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 1, 0, 0, 0)]))
def test_before_start(self):
# datetime(1999, 12, 31, 21, 0, 0, 0) is too old to show up
# in the results
self.assertEqual(Hours.filter(self.datetimes, number=4, now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 1, 0, 0, 0)]))
def test_all_input(self):
self.assertEqual(Hours.filter(self.datetimes, number=5, now=self.now),
set([datetime(1999, 12, 31, 21, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 1, 0, 0, 0)]))
self.assertEqual(Hours.filter(self.datetimes, number=6, now=self.now),
set([datetime(1999, 12, 31, 21, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 1, 0, 0, 0)]))
class TestDays(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, 1, 1, 1, 1)
self.datetimes = [
datetime(2000, 1, 1, 1, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(1999, 12, 30, 0, 0, 0, 0),
datetime(1999, 12, 28, 0, 0, 0, 0),
]
def test_mask(self):
self.assertEqual(
Days.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)),
datetime(1999, 12, 31, 0, 0, 0, 0)
)
def test_future(self):
datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia
self.assertEqual(Days.filter(datetimes, number=0, now=self.now),
set(datetimes))
self.assertEqual(Days.filter(datetimes, number=1, now=self.now),
set(datetimes))
def test_invalid_number(self):
self.assertRaises(ValueError,
Days.filter, [], number=-1, now=self.now)
self.assertRaises(ValueError,
Days.filter, [], number=0.1, now=self.now)
self.assertRaises(ValueError,
Days.filter, [], number='1', now=self.now)
def test_no_input(self):
self.assertEqual(Days.filter([], number=1, now=self.now),
set())
def test_no_results(self):
self.assertEqual(Days.filter([self.now], number=0, now=self.now),
set())
self.assertEqual(Days.filter(self.datetimes, number=0, now=self.now),
set())
def test_current(self):
self.assertEqual(Days.filter(self.datetimes, number=1, now=self.now),
set([datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_duplicates(self):
# Ensure we get the oldest per-day datetime when there are
# duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0)
self.assertEqual(Days.filter(self.datetimes, number=2, now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_before_start(self):
# datetime(1999, 12, 28, 0, 0, 0, 0) is too old to show up
# in the results
self.assertEqual(Days.filter(self.datetimes, number=4, now=self.now),
set([datetime(1999, 12, 30, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_all_input(self):
self.assertEqual(Days.filter(self.datetimes, number=5, now=self.now),
set([datetime(1999, 12, 28, 0, 0, 0, 0),
datetime(1999, 12, 30, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
self.assertEqual(Days.filter(self.datetimes, number=6, now=self.now),
set([datetime(1999, 12, 28, 0, 0, 0, 0),
datetime(1999, 12, 30, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_leap_year(self):
# 2004 is a leap year, because it is divisible by 4
now = datetime(2004, 3, 1, 0, 0, 0, 0)
datetimes_2004 = [
datetime(2004, 3, 1, 0, 0, 0, 0),
datetime(2004, 2, 29, 0, 0, 0, 0),
datetime(2004, 2, 28, 0, 0, 0, 0),
datetime(2004, 2, 27, 0, 0, 0, 0),
]
self.assertEqual(Days.filter(datetimes_2004, number=1, now=now),
set([datetime(2004, 3, 1, 0, 0, 0, 0)]))
self.assertEqual(Days.filter(datetimes_2004, number=2, now=now),
set([datetime(2004, 2, 29, 0, 0, 0, 0),
datetime(2004, 3, 1, 0, 0, 0, 0)]))
self.assertEqual(Days.filter(datetimes_2004, number=3, now=now),
set([datetime(2004, 2, 28, 0, 0, 0, 0),
datetime(2004, 2, 29, 0, 0, 0, 0),
datetime(2004, 3, 1, 0, 0, 0, 0)]))
def test_not_leap_year(self):
# 1900 was not a leap year, because it is divisible by 400
now = datetime(1900, 3, 1, 0, 0, 0, 0)
datetimes_1900 = [
datetime(1900, 3, 1, 0, 0, 0, 0),
datetime(1900, 2, 28, 0, 0, 0, 0),
datetime(1900, 2, 27, 0, 0, 0, 0),
]
self.assertEqual(Days.filter(datetimes_1900, number=1, now=now),
set([datetime(1900, 3, 1, 0, 0, 0, 0)]))
self.assertEqual(Days.filter(datetimes_1900, number=2, now=now),
set([datetime(1900, 2, 28, 0, 0, 0, 0),
datetime(1900, 3, 1, 0, 0, 0, 0)]))
self.assertEqual(Days.filter(datetimes_1900, number=3, now=now),
set([datetime(1900, 2, 27, 0, 0, 0, 0),
datetime(1900, 2, 28, 0, 0, 0, 0),
datetime(1900, 3, 1, 0, 0, 0, 0)]))
def test_with_tzinfo_and_date(self):
tzinfo_datetimes = [
utcdatetime(2000, 1, 1, 1, 0, 0, 0),
utcdatetime(2000, 1, 1, 0, 0, 0, 0),
utcdatetime(1999, 12, 31, 23, 59, 59, 999999),
utcdatetime(1999, 12, 30, 0, 0, 0, 0),
utcdatetime(1999, 12, 28, 0, 0, 0, 0),
]
today = date(2000, 1, 1)
self.assertEqual(Days.filter(tzinfo_datetimes, number=5, now=today),
set([utcdatetime(1999, 12, 28, 0, 0, 0, 0),
utcdatetime(1999, 12, 30, 0, 0, 0, 0),
utcdatetime(1999, 12, 31, 23, 59, 59, 999999),
utcdatetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_with_date(self):
today = date(2000, 1, 1)
self.assertEqual(Days.filter(self.datetimes, number=5, now=today),
set([datetime(1999, 12, 28, 0, 0, 0, 0),
datetime(1999, 12, 30, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
class TestWeeks(unittest.TestCase):
def setUp(self):
# 1 January 2000 is a Saturday
self.now = datetime(2000, 1, 1, 1, 1, 1, 1)
self.datetimes = [
datetime(2000, 1, 1, 1, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(1999, 12, 18, 0, 0, 0, 0),
datetime(1999, 12, 4, 0, 0, 0, 0),
]
def test_mask(self):
# 31 December 1999 is a Friday.
dt = datetime(1999, 12, 31, 23, 59, 59, 999999)
self.assertEqual(dt.weekday(), FRIDAY)
# Default firstweekday is Saturday
self.assertEqual(Weeks.mask(dt),
Weeks.mask(dt, firstweekday=SATURDAY))
self.assertEqual(Weeks.mask(dt),
datetime(1999, 12, 25, 0, 0, 0, 0))
# Sunday
self.assertEqual(Weeks.mask(dt, firstweekday=SUNDAY),
datetime(1999, 12, 26, 0, 0, 0, 0))
# If firstweekday is the same as dt.weekday, then it should return
# the same day.
self.assertEqual(Weeks.mask(dt, firstweekday=dt.weekday()),
Days.mask(dt))
def test_future(self):
datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia
self.assertEqual(Weeks.filter(datetimes, number=0, now=self.now),
set(datetimes))
self.assertEqual(Weeks.filter(datetimes, number=1, now=self.now),
set(datetimes))
def test_invalid_number(self):
self.assertRaises(ValueError,
Weeks.filter, [], number=-1, now=self.now)
self.assertRaises(ValueError,
Weeks.filter, [], number=0.1, now=self.now)
self.assertRaises(ValueError,
Weeks.filter, [], number='1', now=self.now)
def test_no_input(self):
self.assertEqual(Weeks.filter([], number=1, now=self.now),
set())
def test_no_results(self):
self.assertEqual(Weeks.filter([self.now], number=0, now=self.now),
set())
self.assertEqual(Weeks.filter(self.datetimes, number=0, now=self.now),
set())
def test_current(self):
self.assertEqual(Weeks.filter(self.datetimes, number=1, now=self.now),
set([datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_duplicates(self):
# Ensure we get the oldest per-day datetime when there are
# duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0)
self.assertEqual(Weeks.filter(self.datetimes, number=2, now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_before_start(self):
# datetime(1999, 12, 4, 0, 0, 0, 0) is too old to show up
# in the results
self.assertEqual(Weeks.filter(self.datetimes, number=4, now=self.now),
set([datetime(1999, 12, 18, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_all_input(self):
self.assertEqual(Weeks.filter(self.datetimes, number=5, now=self.now),
set([datetime(1999, 12, 4, 0, 0, 0, 0),
datetime(1999, 12, 18, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
self.assertEqual(Weeks.filter(self.datetimes, number=6, now=self.now),
set([datetime(1999, 12, 4, 0, 0, 0, 0),
datetime(1999, 12, 18, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_different_firstweekday(self):
self.assertEqual(
Weeks.filter(
self.datetimes, number=3, firstweekday=3, now=self.now
),
set([datetime(1999, 12, 18, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999)])
)
filtered = Weeks.filter(
self.datetimes, number=5, firstweekday=3, now=self.now
)
self.assertEqual(
Weeks.filter(
self.datetimes, number=5, firstweekday=3, now=self.now
),
set([datetime(1999, 12, 18, 0, 0, 0, 0),
datetime(1999, 12, 4, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999)])
)
class TestMonths(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 2, 1, 1, 1, 1, 1)
self.datetimes = [
datetime(2000, 2, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 1, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(1999, 10, 1, 0, 0, 0, 0),
]
def test_mask(self):
self.assertEqual(
Months.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)),
datetime(1999, 12, 1, 0, 0, 0, 0)
)
def test_future(self):
datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia
self.assertEqual(Months.filter(datetimes, number=0, now=self.now),
set(datetimes))
self.assertEqual(Months.filter(datetimes, number=1, now=self.now),
set(datetimes))
def test_invalid_number(self):
self.assertRaises(ValueError,
Months.filter, [], number=-1, now=self.now)
self.assertRaises(ValueError,
Months.filter, [], number=0.1, now=self.now)
self.assertRaises(ValueError,
Months.filter, [], number='1', now=self.now)
def test_no_input(self):
self.assertEqual(Months.filter([], number=1, now=self.now),
set())
def test_no_results(self):
self.assertEqual(Months.filter([self.now], number=0, now=self.now),
set())
self.assertEqual(Months.filter(self.datetimes, number=0, now=self.now),
set())
def test_current(self):
self.assertEqual(Months.filter(self.datetimes, number=1, now=self.now),
set([datetime(2000, 2, 1, 0, 0, 0, 0)]))
def test_duplicates(self):
# Ensure we get the oldest per-month datetime when there are
# duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0)
self.assertEqual(Months.filter(self.datetimes, number=2, now=self.now),
set([datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 2, 1, 0, 0, 0, 0)]))
def test_new_year(self):
self.assertEqual(Months.filter(self.datetimes, number=3, now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 2, 1, 0, 0, 0, 0)]))
def test_before_start(self):
# datetime(1999, 10, 1, 0, 0, 0, 0) is too old to show up
# in the results
self.assertEqual(Months.filter(self.datetimes, number=4, now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 2, 1, 0, 0, 0, 0)]))
def test_all_input(self):
self.assertEqual(Months.filter(self.datetimes, number=5, now=self.now),
set([datetime(1999, 10, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 2, 1, 0, 0, 0, 0)]))
self.assertEqual(Months.filter(self.datetimes, number=6, now=self.now),
set([datetime(1999, 10, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 2, 1, 0, 0, 0, 0)]))
def test_multiple_years(self):
now = datetime(2000, 1, 1, 0, 0, 0, 0)
datetimes = [
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 1, 0, 0, 0, 0),
datetime(1999, 1, 1, 0, 0, 0, 0),
datetime(1998, 12, 1, 0, 0, 0, 0),
datetime(1997, 12, 1, 0, 0, 0, 0),
]
# 12 months back ignores datetime(1999, 1, 1, 0, 0, 0, 0)
self.assertEqual(Months.filter(datetimes, number=12, now=now),
set([datetime(1999, 12, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
# But 13 months back gets it
self.assertEqual(Months.filter(datetimes, number=13, now=now),
set([datetime(1999, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
# But 14 months back gets datetime(1998, 12, 1, 0, 0, 0, 0)
self.assertEqual(Months.filter(datetimes, number=14, now=now),
set([datetime(1998, 12, 1, 0, 0, 0, 0),
datetime(1999, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
# As does 24 months back
self.assertEqual(Months.filter(datetimes, number=24, now=now),
set([datetime(1998, 12, 1, 0, 0, 0, 0),
datetime(1999, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
# 36 months back should get datetime(1997, 12, 1, 0, 0, 0, 0)
self.assertEqual(Months.filter(datetimes, number=36, now=now),
set([datetime(1997, 12, 1, 0, 0, 0, 0),
datetime(1998, 12, 1, 0, 0, 0, 0),
datetime(1999, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 1, 0, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
class TestYears(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, 1, 1, 1, 1)
self.datetimes = [
datetime(2000, 1, 1, 1, 0, 0, 0),
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(1998, 1, 1, 0, 0, 0, 0),
datetime(1996, 1, 1, 0, 0, 0, 0),
]
def test_mask(self):
self.assertEqual(
Years.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)),
datetime(1999, 1, 1, 0, 0, 0, 0)
)
def test_future(self):
datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia
self.assertEqual(Years.filter(datetimes, number=0, now=self.now),
set(datetimes))
self.assertEqual(Years.filter(datetimes, number=1, now=self.now),
set(datetimes))
def test_invalid_number(self):
self.assertRaises(ValueError,
Years.filter, [], number=-1, now=self.now)
self.assertRaises(ValueError,
Years.filter, [], number=0.1, now=self.now)
self.assertRaises(ValueError,
Years.filter, [], number='1', now=self.now)
def test_no_input(self):
self.assertEqual(Years.filter([], number=1, now=self.now),
set())
def test_no_results(self):
self.assertEqual(Years.filter([self.now], number=0, now=self.now),
set())
self.assertEqual(Years.filter(self.datetimes, number=0, now=self.now),
set())
def test_current(self):
self.assertEqual(Years.filter(self.datetimes, number=1, now=self.now),
set([datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_duplicates(self):
# Ensure we get the oldest per-month datetime when there are
# duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0)
self.assertEqual(Years.filter(self.datetimes, number=2, now=self.now),
set([datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_before_start(self):
# datetime(1996, 1, 1, 0, 0, 0, 0) is too old to show up
# in the results
self.assertEqual(Years.filter(self.datetimes, number=4, now=self.now),
set([datetime(1998, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
def test_all_input(self):
self.assertEqual(Years.filter(self.datetimes, number=5, now=self.now),
set([datetime(1996, 1, 1, 0, 0, 0, 0),
datetime(1998, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
self.assertEqual(Years.filter(self.datetimes, number=6, now=self.now),
set([datetime(1996, 1, 1, 0, 0, 0, 0),
datetime(1998, 1, 1, 0, 0, 0, 0),
datetime(1999, 12, 31, 23, 59, 59, 999999),
datetime(2000, 1, 1, 0, 0, 0, 0)]))
| 44.700671
| 79
| 0.477269
| 4,235
| 33,302
| 3.721606
| 0.034947
| 0.067508
| 0.062433
| 0.038576
| 0.945752
| 0.941755
| 0.910983
| 0.895819
| 0.843919
| 0.823425
| 0
| 0.17277
| 0.38334
| 33,302
| 744
| 80
| 44.760753
| 0.594712
| 0.056213
| 0
| 0.623509
| 0
| 0
| 0.000223
| 0
| 0
| 0
| 0
| 0
| 0.20954
| 1
| 0.139693
| false
| 0
| 0.006814
| 0.001704
| 0.160136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8f6145a34883b75ea4a2679936f8b35438d60e18
| 119
|
py
|
Python
|
mugicli/pytail.py
|
mugiseyebrows/mugi-cli
|
4381086d4bd5a781248cd2cc5ef0e1042534942e
|
[
"MIT"
] | null | null | null |
mugicli/pytail.py
|
mugiseyebrows/mugi-cli
|
4381086d4bd5a781248cd2cc5ef0e1042534942e
|
[
"MIT"
] | null | null | null |
mugicli/pytail.py
|
mugiseyebrows/mugi-cli
|
4381086d4bd5a781248cd2cc5ef0e1042534942e
|
[
"MIT"
] | null | null | null |
from . import head_tail_main, T_TAIL
def main():
head_tail_main(T_TAIL)
if __name__ == "__main__":
main()
| 17
| 36
| 0.663866
| 18
| 119
| 3.611111
| 0.5
| 0.246154
| 0.369231
| 0.4
| 0.523077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218487
| 119
| 7
| 37
| 17
| 0.698925
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
56ae93dfca80ca3bfdbf61d0e7515d63462cd0a2
| 12,459
|
py
|
Python
|
autogalaxy/analysis/aggregator/aggregator.py
|
jonathanfrawley/PyAutoGalaxy
|
55fb44f22ce5490318378dc31596c887d0d2e29b
|
[
"MIT"
] | null | null | null |
autogalaxy/analysis/aggregator/aggregator.py
|
jonathanfrawley/PyAutoGalaxy
|
55fb44f22ce5490318378dc31596c887d0d2e29b
|
[
"MIT"
] | null | null | null |
autogalaxy/analysis/aggregator/aggregator.py
|
jonathanfrawley/PyAutoGalaxy
|
55fb44f22ce5490318378dc31596c887d0d2e29b
|
[
"MIT"
] | null | null | null |
from autofit.database.model.fit import Fit
import autogalaxy as ag
from typing import Optional
from functools import partial
def plane_gen_from(aggregator):
"""
Returns a generator of `Plane` objects from an input aggregator, which generates a list of the `Plane` objects
for every set of results loaded in the aggregator.
This is performed by mapping the `plane_from_agg_obj` with the aggregator, which sets up each plane using only
generators ensuring that manipulating the planes of large sets of results is done in a memory efficient way.
Parameters
----------
aggregator : af.Aggregator
A PyAutoFit aggregator object containing the results of PyAutoGalaxy model-fits."""
return aggregator.map(func=plane_via_database_from)
def plane_via_database_from(fit: Fit):
"""
Returns a `Plane` object from an aggregator's `SearchOutput` class, which we call an 'agg_obj' to describe that
it acts as the aggregator object for one result in the `Aggregator`. This uses the aggregator's generator outputs
such that the function can use the `Aggregator`'s map function to to create a `Plane` generator.
The `Plane` is created following the same method as the PyAutoGalaxy `Search` classes using an instance of the
maximum log likelihood model's galaxies. These galaxies have their hyper-images added (if they were used in the
fit) and passed into a Plane object.
Parameters
----------
fit : af.SearchOutput
A PyAutoFit aggregator's SearchOutput object containing the generators of the results of PyAutoGalaxy model-fits.
"""
galaxies = fit.instance.galaxies
hyper_model_image = fit.value(name="hyper_model_image")
hyper_galaxy_image_path_dict = fit.value(name="hyper_galaxy_image_path_dict")
if hyper_galaxy_image_path_dict is not None:
for (galaxy_path, galaxy) in fit.instance.path_instance_tuples_for_class(
ag.Galaxy
):
if galaxy_path in hyper_galaxy_image_path_dict:
galaxy.hyper_model_image = hyper_model_image
galaxy.hyper_galaxy_image = hyper_galaxy_image_path_dict[galaxy_path]
return ag.Plane(galaxies=galaxies)
def imaging_gen_from(aggregator, settings_imaging: Optional[ag.SettingsImaging] = None):
"""
Returns a generator of `Imaging` objects from an input aggregator, which generates a list of the
`Imaging` objects for every set of results loaded in the aggregator.
This is performed by mapping the `imaging_from_agg_obj` with the aggregator, which sets up each
imaging using only generators ensuring that manipulating the imaging of large sets of results is done in a
memory efficient way.
Parameters
----------
aggregator : af.Aggregator
A PyAutoFit aggregator object containing the results of PyAutoGalaxy model-fits."""
func = partial(imaging_via_database_from, settings_imaging=settings_imaging)
return aggregator.map(func=func)
def imaging_via_database_from(
fit: Fit, settings_imaging: Optional[ag.SettingsImaging] = None
):
"""
Returns a `Imaging` object from an aggregator's `SearchOutput` class, which we call an 'agg_obj' to describe
that it acts as the aggregator object for one result in the `Aggregator`. This uses the aggregator's generator
outputs such that the function can use the `Aggregator`'s map function to to create a `Imaging` generator.
The `Imaging` is created following the same method as the PyAutoGalaxy `Search` classes, including using the
`SettingsImaging` instance output by the Search to load inputs of the `Imaging` (e.g. psf_shape_2d).
Parameters
----------
fit : af.SearchOutput
A PyAutoFit aggregator's SearchOutput object containing the generators of the results of PyAutoGalaxy model-fits.
"""
data = fit.value(name="data")
noise_map = fit.value(name="noise_map")
psf = fit.value(name="psf")
settings_imaging = settings_imaging or fit.value(name="settings_dataset")
imaging = ag.Imaging(
image=data,
noise_map=noise_map,
psf=psf,
settings=settings_imaging,
setup_convolver=True,
)
imaging.apply_settings(settings=settings_imaging)
return imaging
def fit_imaging_gen_from(
aggregator,
settings_imaging: Optional[ag.SettingsImaging] = None,
settings_pixelization: Optional[ag.SettingsPixelization] = None,
settings_inversion: Optional[ag.SettingsInversion] = None,
):
"""
Returns a generator of `FitImaging` objects from an input aggregator, which generates a list of the
`FitImaging` objects for every set of results loaded in the aggregator.
This is performed by mapping the `fit_imaging_from_agg_obj` with the aggregator, which sets up each fit using
only generators ensuring that manipulating the fits of large sets of results is done in a memory efficient way.
Parameters
----------
aggregator : af.Aggregator
A PyAutoFit aggregator object containing the results of PyAutoGalaxy model-fits."""
func = partial(
fit_imaging_via_database_from,
settings_imaging=settings_imaging,
settings_pixelization=settings_pixelization,
settings_inversion=settings_inversion,
)
return aggregator.map(func=func)
def fit_imaging_via_database_from(
fit: Fit,
settings_imaging: Optional[ag.SettingsImaging] = None,
settings_pixelization: Optional[ag.SettingsPixelization] = None,
settings_inversion: Optional[ag.SettingsInversion] = None,
):
"""
Returns a `FitImaging` object from an aggregator's `SearchOutput` class, which we call an 'agg_obj' to describe
that it acts as the aggregator object for one result in the `Aggregator`. This uses the aggregator's generator
outputs such that the function can use the `Aggregator`'s map function to to create a `FitImaging` generator.
The `FitImaging` is created following the same method as the PyAutoGalaxy `Search` classes.
Parameters
----------
fit : af.SearchOutput
A PyAutoFit aggregator's SearchOutput object containing the generators of the results of PyAutoGalaxy model-fits.
"""
imaging = imaging_via_database_from(fit=fit, settings_imaging=settings_imaging)
plane = plane_via_database_from(fit=fit)
settings_pixelization = settings_pixelization or fit.value(
name="settings_pixelization"
)
settings_inversion = settings_inversion or fit.value(name="settings_inversion")
return ag.FitImaging(
imaging=imaging,
plane=plane,
settings_pixelization=settings_pixelization,
settings_inversion=settings_inversion,
)
def interferometer_gen_from(
aggregator,
real_space_mask: Optional[ag.Mask2D] = None,
settings_interferometer: Optional[ag.SettingsInterferometer] = None,
):
"""
Returns a generator of `Interferometer` objects from an input aggregator, which generates a list of the
`Interferometer` objects for every set of results loaded in the aggregator.
This is performed by mapping the `interferometer_from_agg_obj` with the aggregator, which sets up each
interferometer object using only generators ensuring that manipulating the interferometer objects of large
sets of results is done in a memory efficient way.
Parameters
----------
aggregator : af.Aggregator
A PyAutoFit aggregator object containing the results of PyAutoGalaxy model-fits."""
func = partial(
interferometer_via_database_from,
real_space_mask=real_space_mask,
settings_interferometer=settings_interferometer,
)
return aggregator.map(func=func)
def interferometer_via_database_from(
fit: Fit,
real_space_mask: Optional[ag.Mask2D] = None,
settings_interferometer: Optional[ag.SettingsInterferometer] = None,
):
"""
Returns a `Interferometer` object from an aggregator's `SearchOutput` class, which we call an 'agg_obj' to
describe that it acts as the aggregator object for one result in the `Aggregator`. This uses the aggregator's
generator outputs such that the function can use the `Aggregator`'s map function to to create a
`Interferometer` generator.
The `Interferometer` is created following the same method as the PyAutoGalaxy `Search` classes, including
using the `SettingsInterferometer` instance output by the Search to load inputs of the `Interferometer`
(e.g. psf_shape_2d).
Parameters
----------
fit : af.SearchOutput
A PyAutoFit aggregator's SearchOutput object containing the generators of the results of PyAutoGalaxy
model-fits.
"""
data = fit.value(name="data")
noise_map = fit.value(name="noise_map")
uv_wavelengths = fit.value(name="uv_wavelengths")
real_space_mask = real_space_mask or fit.value(name="real_space_mask")
settings_interferometer = settings_interferometer or fit.value(
name="settings_dataset"
)
interferometer = ag.Interferometer(
visibilities=data,
noise_map=noise_map,
uv_wavelengths=uv_wavelengths,
real_space_mask=real_space_mask,
)
interferometer = interferometer.apply_settings(settings=settings_interferometer)
return interferometer
def fit_interferometer_gen_from(
aggregator,
real_space_mask: Optional[ag.Mask2D] = None,
settings_interferometer: Optional[ag.SettingsInterferometer] = None,
settings_pixelization: Optional[ag.SettingsPixelization] = None,
settings_inversion: Optional[ag.SettingsInversion] = None,
):
"""
Returns a `FitInterferometer` object from an aggregator's `SearchOutput` class, which we call an 'agg_obj' to
describe that it acts as the aggregator object for one result in the `Aggregator`. This uses the aggregator's
generator outputs such that the function can use the `Aggregator`'s map function to to create a `FitInterferometer`
generator.
The `FitInterferometer` is created following the same method as the PyAutoGalaxy `Search` classes.
Parameters
----------
agg_obj : af.SearchOutput
A PyAutoFit aggregator's SearchOutput object containing the generators of the results of PyAutoGalaxy model-fits.
"""
func = partial(
fit_interferometer_via_database_from,
real_space_mask=real_space_mask,
settings_interferometer=settings_interferometer,
settings_pixelization=settings_pixelization,
settings_inversion=settings_inversion,
)
return aggregator.map(func=func)
def fit_interferometer_via_database_from(
fit: Fit,
real_space_mask: Optional[ag.Mask2D] = None,
settings_interferometer: Optional[ag.SettingsInterferometer] = None,
settings_pixelization: Optional[ag.SettingsPixelization] = None,
settings_inversion: Optional[ag.SettingsInversion] = None,
):
"""
Returns a generator of `FitInterferometer` objects from an input aggregator, which generates a list of the
`FitInterferometer` objects for every set of results loaded in the aggregator.
This is performed by mapping the `fit_interferometer_from_agg_obj` with the aggregator, which sets up each fit
using only generators ensuring that manipulating the fits of large sets of results is done in a memory efficient
way.
Parameters
----------
aggregator : af.Aggregator
A PyAutoFit aggregator object containing the results of PyAutoGalaxy model-fits.
"""
settings_pixelization = settings_pixelization or fit.value(
name="settings_pixelization"
)
settings_inversion = settings_inversion or fit.value(name="settings_inversion")
interferometer = interferometer_via_database_from(
fit=fit,
real_space_mask=real_space_mask,
settings_interferometer=settings_interferometer,
)
plane = plane_via_database_from(fit=fit)
return ag.FitInterferometer(
interferometer=interferometer,
plane=plane,
settings_pixelization=settings_pixelization,
settings_inversion=settings_inversion,
)
| 39.678344
| 122
| 0.715948
| 1,536
| 12,459
| 5.65625
| 0.10026
| 0.04489
| 0.020718
| 0.021869
| 0.822053
| 0.809392
| 0.781998
| 0.752417
| 0.724678
| 0.718692
| 0
| 0.000616
| 0.218156
| 12,459
| 313
| 123
| 39.805112
| 0.891284
| 0.489847
| 0
| 0.510949
| 0
| 0
| 0.038117
| 0.012527
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072993
| false
| 0
| 0.029197
| 0
| 0.175182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
711c1a3246ef968f7abb8d18ff5d2d240c932ce1
| 41
|
py
|
Python
|
tests/cases/stats/tests/__init__.py
|
murphyke/avocado
|
62824bb2673d4cac81a15fee45fad60a8fe7622b
|
[
"BSD-2-Clause"
] | null | null | null |
tests/cases/stats/tests/__init__.py
|
murphyke/avocado
|
62824bb2673d4cac81a15fee45fad60a8fe7622b
|
[
"BSD-2-Clause"
] | null | null | null |
tests/cases/stats/tests/__init__.py
|
murphyke/avocado
|
62824bb2673d4cac81a15fee45fad60a8fe7622b
|
[
"BSD-2-Clause"
] | null | null | null |
from .agg import *
from .kmeans import *
| 13.666667
| 21
| 0.707317
| 6
| 41
| 4.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 41
| 2
| 22
| 20.5
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
713df4a1b27b275c9a6c5cd2afa03244eb89ca57
| 92
|
py
|
Python
|
job/SLURM/Opuntia.py
|
martintb/typyQ
|
889b4ea40c28ee76c452f8b2bc92f042e6be199d
|
[
"MIT"
] | null | null | null |
job/SLURM/Opuntia.py
|
martintb/typyQ
|
889b4ea40c28ee76c452f8b2bc92f042e6be199d
|
[
"MIT"
] | null | null | null |
job/SLURM/Opuntia.py
|
martintb/typyQ
|
889b4ea40c28ee76c452f8b2bc92f042e6be199d
|
[
"MIT"
] | null | null | null |
from SLURM import SLURMJob
class OpuntiaJob(SLURMJob):
#no specialization needed!
pass
| 15.333333
| 28
| 0.782609
| 11
| 92
| 6.545455
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163043
| 92
| 5
| 29
| 18.4
| 0.935065
| 0.271739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
859e592373db73053923e4a526f3b54ff1dda6eb
| 42
|
py
|
Python
|
junospyez_ossh_server/__init__.py
|
jeremyschulman/junospyez-ossh-server
|
dd34923a1f98062ed8e08a96e8a842ac5a33f6e6
|
[
"MIT"
] | 3
|
2018-10-19T11:54:12.000Z
|
2019-04-16T01:39:56.000Z
|
junospyez_ossh_server/__init__.py
|
jeremyschulman/junospyez-ossh-server
|
dd34923a1f98062ed8e08a96e8a842ac5a33f6e6
|
[
"MIT"
] | null | null | null |
junospyez_ossh_server/__init__.py
|
jeremyschulman/junospyez-ossh-server
|
dd34923a1f98062ed8e08a96e8a842ac5a33f6e6
|
[
"MIT"
] | null | null | null |
from .ossh_server import OutboundSSHServer
| 42
| 42
| 0.904762
| 5
| 42
| 7.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a4345cf6a8a198712245d02c2590b89fb288a70f
| 110
|
py
|
Python
|
util/__init__.py
|
Ovakefali13/buerro
|
1476f6e708f95a09a2d73f67ae8aa2cb3bb836af
|
[
"MIT"
] | 2
|
2020-03-26T19:20:31.000Z
|
2020-03-30T13:09:07.000Z
|
util/__init__.py
|
Ovakefali13/buerro
|
1476f6e708f95a09a2d73f67ae8aa2cb3bb836af
|
[
"MIT"
] | 51
|
2020-03-05T09:04:21.000Z
|
2021-12-13T20:34:22.000Z
|
util/__init__.py
|
Ovakefali13/buerro
|
1476f6e708f95a09a2d73f67ae8aa2cb3bb836af
|
[
"MIT"
] | null | null | null |
from .singleton import Singleton
from .to_html import link_to_html, list_to_html, dict_to_html, table_to_html
| 36.666667
| 76
| 0.854545
| 20
| 110
| 4.25
| 0.45
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 110
| 2
| 77
| 55
| 0.858586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f117c40289eed7e849985ed5abe7a8978267612f
| 27
|
py
|
Python
|
model/__init__.py
|
reacher1130/AIGCN
|
8875c172a5fee88276bfcc666f5ad624e6aad937
|
[
"MIT"
] | null | null | null |
model/__init__.py
|
reacher1130/AIGCN
|
8875c172a5fee88276bfcc666f5ad624e6aad937
|
[
"MIT"
] | null | null | null |
model/__init__.py
|
reacher1130/AIGCN
|
8875c172a5fee88276bfcc666f5ad624e6aad937
|
[
"MIT"
] | null | null | null |
from . import util, AIGCN
| 9
| 25
| 0.703704
| 4
| 27
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 27
| 2
| 26
| 13.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f126c7cbf96925763b7edcb0084c661f69fa147a
| 244
|
py
|
Python
|
photonpy/__init__.py
|
qnano/photonpy
|
9c03a1c9f4c2177c9c6fb3f2f16dfec2306006d4
|
[
"MIT"
] | 5
|
2021-04-29T21:06:05.000Z
|
2022-03-23T03:45:25.000Z
|
photonpy/__init__.py
|
qnano/photonpy
|
9c03a1c9f4c2177c9c6fb3f2f16dfec2306006d4
|
[
"MIT"
] | null | null | null |
photonpy/__init__.py
|
qnano/photonpy
|
9c03a1c9f4c2177c9c6fb3f2f16dfec2306006d4
|
[
"MIT"
] | 1
|
2021-06-18T12:39:28.000Z
|
2021-06-18T12:39:28.000Z
|
from .cpp.context import *
from .cpp.estimator import *
from .cpp.gaussian import Gauss3D_Calibration, GaussianPSFMethods
from .cpp.cspline import *
from .cpp.postprocess import *
from .smlm.dataset import Dataset
from .cpp.spotdetect import *
| 30.5
| 65
| 0.79918
| 32
| 244
| 6.0625
| 0.4375
| 0.216495
| 0.201031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004651
| 0.118852
| 244
| 7
| 66
| 34.857143
| 0.897674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f1303de7fc5669c0518ffc0b9c4534e1d88cba33
| 271
|
py
|
Python
|
happytransformer/cuda_detect.py
|
swcrazyfan/happy-transformer
|
8e62c90a976d6ee5237e35103aff8a78b84fe7ce
|
[
"Apache-2.0"
] | null | null | null |
happytransformer/cuda_detect.py
|
swcrazyfan/happy-transformer
|
8e62c90a976d6ee5237e35103aff8a78b84fe7ce
|
[
"Apache-2.0"
] | null | null | null |
happytransformer/cuda_detect.py
|
swcrazyfan/happy-transformer
|
8e62c90a976d6ee5237e35103aff8a78b84fe7ce
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch_xla
import torch_xla.core.xla_model as xm
def detect_cuda_device_number():
return torch.cuda.current_device() if torch.cuda.is_available() else -1
def detect_tpu_device_number():
return xm.xla_device().index if xm.xla_device() else -1
| 27.1
| 75
| 0.782288
| 46
| 271
| 4.326087
| 0.456522
| 0.165829
| 0.140704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008439
| 0.125461
| 271
| 9
| 76
| 30.111111
| 0.831224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.428571
| 0.285714
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
f14ab11af2a1fe0ba0da42082f7fc9ac24452414
| 2,549
|
py
|
Python
|
autoencoders/mnist_old/dataloaders.py
|
dyth/generative_models
|
8f3aed5662d52b28e965cc67e7924415b06d82df
|
[
"MIT"
] | 1
|
2020-01-20T22:17:00.000Z
|
2020-01-20T22:17:00.000Z
|
autoencoders/mnist_old/dataloaders.py
|
dyth/autoencoders
|
8f3aed5662d52b28e965cc67e7924415b06d82df
|
[
"MIT"
] | null | null | null |
autoencoders/mnist_old/dataloaders.py
|
dyth/autoencoders
|
8f3aed5662d52b28e965cc67e7924415b06d82df
|
[
"MIT"
] | 1
|
2020-05-21T16:15:58.000Z
|
2020-05-21T16:15:58.000Z
|
#!/usr/bin/env python
"""
download mnist
"""
import torch.utils.data
from torchvision import datasets, transforms
def get_mnist(path, use_cuda, batch_size, test_batch_size):
'download into folder data if folder does not exist, then create dataloader'
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
t = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
train_loader = torch.utils.data.DataLoader(
datasets.MNIST(path, train=True, download=True, transform=t),
batch_size=batch_size, shuffle=True, **kwargs
)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST(path, train=False, download=True, transform=t),
batch_size=test_batch_size, shuffle=True, **kwargs
)
return train_loader, test_loader
def get_2d_mnist(path, use_cuda, batch_size, test_batch_size):
'download into folder data if folder does not exist, then create dataloader'
t = transforms.Compose([
transforms.Resize((28, 28)),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST(path, train=True, download=True, transform=t),
batch_size=batch_size, shuffle=True, **kwargs
)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST(path, train=False, download=True, transform=t),
batch_size=test_batch_size, shuffle=True, **kwargs
)
return train_loader, test_loader
def get_cifar10(path, use_cuda, batch_size, test_batch_size):
'download into folder data if folder does not exist, then create dataloader'
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
t = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(path, train=True, download=True, transform=t),
batch_size=batch_size, shuffle=True, **kwargs
)
test_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(path, train=False, download=True, transform=t),
batch_size=test_batch_size, shuffle=True, **kwargs
)
return train_loader, test_loader
if __name__ == '__main__':
use_cuda = torch.cuda.is_available()
path = '../data'
get_mnist(path, use_cuda, 64, 1000)
get_cifar10(path, use_cuda, 64, 1000)
| 33.539474
| 80
| 0.675951
| 338
| 2,549
| 4.902367
| 0.186391
| 0.097767
| 0.059143
| 0.065178
| 0.894387
| 0.85697
| 0.85697
| 0.853953
| 0.820157
| 0.788775
| 0
| 0.029412
| 0.199686
| 2,549
| 75
| 81
| 33.986667
| 0.782843
| 0.102001
| 0
| 0.642857
| 0
| 0
| 0.119713
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053571
| false
| 0
| 0.035714
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f154bd20abfc1667dec87f83f9975cdb6d927cab
| 167
|
py
|
Python
|
physionet-django/console/admin.py
|
partizaans/physionet-build
|
ed2211c5c6d6584b73d73bf5ce48a554809c448b
|
[
"BSD-3-Clause"
] | null | null | null |
physionet-django/console/admin.py
|
partizaans/physionet-build
|
ed2211c5c6d6584b73d73bf5ce48a554809c448b
|
[
"BSD-3-Clause"
] | null | null | null |
physionet-django/console/admin.py
|
partizaans/physionet-build
|
ed2211c5c6d6584b73d73bf5ce48a554809c448b
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from physionet import models
# Register your models here.
admin.site.register(models.StaticPage)
admin.site.register(models.Section)
| 23.857143
| 38
| 0.826347
| 23
| 167
| 6
| 0.565217
| 0.130435
| 0.246377
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095808
| 167
| 6
| 39
| 27.833333
| 0.913907
| 0.155689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f180b938eafb28c374af23c6881dcf2638832bdf
| 99
|
py
|
Python
|
tests/test_version.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | 32
|
2021-01-07T15:09:56.000Z
|
2022-01-30T05:49:23.000Z
|
tests/test_version.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | 228
|
2020-09-03T14:03:54.000Z
|
2022-03-31T20:16:12.000Z
|
tests/test_version.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | 12
|
2020-09-15T21:36:03.000Z
|
2022-03-31T17:13:17.000Z
|
def test_version():
from datadog_api_client.version import __version__
assert __version__
| 19.8
| 54
| 0.787879
| 12
| 99
| 5.583333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171717
| 99
| 4
| 55
| 24.75
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
74dbb4df31657bc2d137e3529aa621c7c6255487
| 35
|
py
|
Python
|
OZoptics/__init__.py
|
gregmoille/InstrumentControl
|
4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133
|
[
"MIT"
] | 3
|
2018-05-02T20:14:15.000Z
|
2020-10-18T03:57:09.000Z
|
OZoptics/__init__.py
|
gregmoille/InstrumentControl
|
4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133
|
[
"MIT"
] | 1
|
2019-05-23T15:21:08.000Z
|
2019-05-23T15:21:08.000Z
|
OZoptics/__init__.py
|
gregmoille/InstrumentControl
|
4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133
|
[
"MIT"
] | 2
|
2019-05-16T20:36:25.000Z
|
2020-09-22T18:26:49.000Z
|
from .attenuator import Attenuator
| 17.5
| 34
| 0.857143
| 4
| 35
| 7.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
74deaa39454eab679de0c4a7602da700ce5da3ac
| 54
|
py
|
Python
|
freebasics/__init__.py
|
praekeltfoundation/mc-freebasics
|
ad9b2408aa97402a2be6444e619c4533663118fb
|
[
"BSD-2-Clause"
] | null | null | null |
freebasics/__init__.py
|
praekeltfoundation/mc-freebasics
|
ad9b2408aa97402a2be6444e619c4533663118fb
|
[
"BSD-2-Clause"
] | 29
|
2016-02-29T11:53:47.000Z
|
2018-04-05T07:46:15.000Z
|
freebasics/__init__.py
|
praekeltfoundation/mc2-freebasics
|
ad9b2408aa97402a2be6444e619c4533663118fb
|
[
"BSD-2-Clause"
] | null | null | null |
from .celery_app import app as the_celery_app # noqa
| 27
| 53
| 0.796296
| 10
| 54
| 4
| 0.7
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 54
| 1
| 54
| 54
| 0.888889
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
744169f8bd09fb137430b729dafa2f616b2f1c8c
| 81
|
py
|
Python
|
SampleAIs/Sample_Daddies/__init__.py
|
YSabarad/monopyly
|
0460f2452c83846b6b9e3b234be411e12a86d69c
|
[
"MIT"
] | 4
|
2015-11-04T21:18:40.000Z
|
2020-12-26T21:15:23.000Z
|
SampleAIs/Sample_Daddies/__init__.py
|
YSabarad/monopyly
|
0460f2452c83846b6b9e3b234be411e12a86d69c
|
[
"MIT"
] | 2
|
2021-08-09T18:19:58.000Z
|
2021-08-10T14:44:54.000Z
|
SampleAIs/Sample_Daddies/__init__.py
|
YSabarad/monopyly
|
0460f2452c83846b6b9e3b234be411e12a86d69c
|
[
"MIT"
] | 6
|
2015-08-01T17:54:17.000Z
|
2022-02-28T00:00:21.000Z
|
from .generous_daddy import GenerousDaddyAI
from .mean_daddy import MeanDaddyAI
| 20.25
| 43
| 0.864198
| 10
| 81
| 6.8
| 0.7
| 0.323529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 81
| 3
| 44
| 27
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
746c9bf4dec7a7e16415c4d33623b7ca8983d45a
| 200
|
py
|
Python
|
exercicios-Python/desaf021.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
exercicios-Python/desaf021.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
exercicios-Python/desaf021.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
#import pygame
#pygame.mixer.init()
#pygame.mixer.music.load('desaf021.mp3')
#pygame.mixer.music.play()
#while pygame.mixer.music.get_busy(): pass
import playsound
playsound.playsound('desaf021.mp3')
| 25
| 42
| 0.775
| 28
| 200
| 5.5
| 0.5
| 0.285714
| 0.311688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042328
| 0.055
| 200
| 7
| 43
| 28.571429
| 0.772487
| 0.685
| 0
| 0
| 0
| 0
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
749cef63cc04b62780b7df39b205b0c77cd8eee3
| 104
|
py
|
Python
|
kwep.py
|
sumitgo/ram22
|
de12d2372d29ad238613163a96fbbc642b0adc90
|
[
"MIT"
] | null | null | null |
kwep.py
|
sumitgo/ram22
|
de12d2372d29ad238613163a96fbbc642b0adc90
|
[
"MIT"
] | null | null | null |
kwep.py
|
sumitgo/ram22
|
de12d2372d29ad238613163a96fbbc642b0adc90
|
[
"MIT"
] | 1
|
2021-06-06T03:15:43.000Z
|
2021-06-06T03:15:43.000Z
|
import os
os.system("chmod 777 /content/xorta/Miners/ethminer/v0.11.0_Nvidia_Optimized/Linux/ethminer")
| 34.666667
| 93
| 0.817308
| 17
| 104
| 4.882353
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070707
| 0.048077
| 104
| 2
| 94
| 52
| 0.767677
| 0
| 0
| 0
| 0
| 0
| 0.769231
| 0.673077
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
776c6ba9c381eb598371b95ed753ba05d13083fb
| 4,075
|
py
|
Python
|
modules/loss.py
|
ChenX17/aligntts
|
b9cf6ca5d3f07ad62edf8a7d2f9310d258e46b12
|
[
"MIT"
] | null | null | null |
modules/loss.py
|
ChenX17/aligntts
|
b9cf6ca5d3f07ad62edf8a7d2f9310d258e46b12
|
[
"MIT"
] | null | null | null |
modules/loss.py
|
ChenX17/aligntts
|
b9cf6ca5d3f07ad62edf8a7d2f9310d258e46b12
|
[
"MIT"
] | null | null | null |
'''
Date: 2021-01-23 18:37:19
LastEditors: Xi Chen(chenxi50@lenovo.com)
LastEditTime: 2021-02-02 23:30:55
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import hparams as hp
from utils.utils import get_mask_from_lengths
import math
class MDNLoss(nn.Module):
def __init__(self):
super(MDNLoss, self).__init__()
def forward(self, mu_sigma, melspec, text_lengths, mel_lengths):
# mu, sigma: B, L, F / melspec: B, F, T
B, L, _ = mu_sigma.size()
T = melspec.size(2)
x = melspec.transpose(1,2).unsqueeze(1) # B, 1, T, F
mu = torch.sigmoid(mu_sigma[:, :, :hp.n_mel_channels].unsqueeze(2)) # B, L, 1, F
log_sigma = mu_sigma[:, :, hp.n_mel_channels:].unsqueeze(2) # B, L, 1, F
exponential = -0.5*torch.sum((x-mu)*(x-mu)/log_sigma.exp()**2, dim=-1) # B, L, T
log_prob_matrix = exponential - (hp.n_mel_channels/2)*torch.log(torch.tensor(2*math.pi)) - 0.5 * log_sigma.sum(dim=-1)
log_alpha = mu_sigma.new_ones(B, L, T)*(-1e30)
log_alpha[:, 0, 0] = log_prob_matrix[:,0, 0]
# import pdb;pdb.set_trace()
# prob_matrix = torch.tensor(2*math.pi).exp()**(-0.5) * torch.exp(-0.5*torch.sum((x-mu)*(x-mu)/log_sigma.exp()**2, dim=-1))
# alpha = mu_sigma.new_ones(B, L, T)*(1e-30)
# alpha[:, 0, 0] = prob_matrix[:,0, 0]
# import pdb;pdb.set_trace()
for t in range(1, T):
prev_step = torch.cat([log_alpha[:, :, t-1:t], F.pad(log_alpha[:, :, t-1:t], (0,0,1,-1), value=-1e30)], dim=-1)
log_alpha[:, :, t] = torch.logsumexp(prev_step+1e-30, dim=-1)+log_prob_matrix[:, :, t]
# prev_step = torch.cat([alpha[:, :, t-1:t], F.pad(alpha[:, :, t-1:t], (0,0,1,-1), value=1e-30)], dim=-1)
# alpha[:, :, t] = torch.sum(prev_step+1e-30, dim=-1)*prob_matrix[:, :, t]
# scaler = torch.unsqueeze(1 / torch.sum(log_alpha[:, :, t], dim=1) + 1e-30, -1)
# log_alpha[:, :, t] = log_alpha[:, :, t] * scaler
alpha_last = log_alpha[torch.arange(B), text_lengths-1, mel_lengths-1]
# alpha_last = torch.log(alpha[torch.arange(B), text_lengths-1, mel_lengths-1])
mdn_loss = -alpha_last.mean()
return mdn_loss, log_prob_matrix
class MDNDNNLoss(nn.Module):
def __init__(self):
super(MDNDNNLoss, self).__init__()
def forward(self, probs, melspec, text_lengths, mel_lengths):
# mu, sigma: B, L, F / melspec: B, F, T
# B, L, _ = mu_sigma.size()
# probs: B, L, T
# import pdb;pdb.set_trace()
B, L, _ = probs.size()
T = melspec.size(2)
# x = melspec.transpose(1,2).unsqueeze(1) # B, 1, T, F
# mu = torch.sigmoid(mu_sigma[:, :, :hp.n_mel_channels].unsqueeze(2)) # B, L, 1, F
# log_sigma = mu_sigma[:, :, hp.n_mel_channels:].unsqueeze(2) # B, L, 1, F
# exponential = -0.5*torch.sum((x-mu)*(x-mu)/log_sigma.exp()**2, dim=-1) # B, L, T
log_prob_matrix = torch.log(probs+1e-30)
log_alpha = probs.new_ones(B, L, T)*(-1e30)
log_alpha[:, 0, 0] = log_prob_matrix[:,0, 0]
for t in range(1, T):
prev_step = torch.cat([log_alpha[:, :, t-1:t], F.pad(log_alpha[:, :, t-1:t], (0,0,1,-1), value=-1e30)], dim=-1)
log_alpha[:, :, t] = torch.logsumexp(prev_step+1e-30, dim=-1)+log_prob_matrix[:, :, t]
# prev_step = torch.cat([alpha[:, :, t-1:t], F.pad(alpha[:, :, t-1:t], (0,0,1,-1), value=1e-30)], dim=-1)
# alpha[:, :, t] = torch.sum(prev_step+1e-30, dim=-1)*prob_matrix[:, :, t]
# scaler = torch.unsqueeze(1 / torch.sum(log_alpha[:, :, t], dim=1) + 1e-30, -1)
# log_alpha[:, :, t] = log_alpha[:, :, t] * scaler
alpha_last = log_alpha[torch.arange(B), text_lengths-1, mel_lengths-1]
# alpha_last = torch.log(alpha[torch.arange(B), text_lengths-1, mel_lengths-1])
mdn_loss = -alpha_last.mean()
return mdn_loss, log_prob_matrix
| 43.351064
| 131
| 0.554847
| 654
| 4,075
| 3.272171
| 0.142202
| 0.074766
| 0.050467
| 0.029907
| 0.819626
| 0.772897
| 0.750467
| 0.750467
| 0.734579
| 0.71028
| 0
| 0.055483
| 0.248098
| 4,075
| 94
| 132
| 43.351064
| 0.64295
| 0.39092
| 0
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.15
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bb0f8c80573b02b52c8bc68ac28062639522bc50
| 156
|
py
|
Python
|
timidy/testing.py
|
meawoppl/tiMIDI
|
809f4dffd932dbd6acf4caacbb83f864648e37b5
|
[
"BSD-2-Clause"
] | null | null | null |
timidy/testing.py
|
meawoppl/tiMIDI
|
809f4dffd932dbd6acf4caacbb83f864648e37b5
|
[
"BSD-2-Clause"
] | null | null | null |
timidy/testing.py
|
meawoppl/tiMIDI
|
809f4dffd932dbd6acf4caacbb83f864648e37b5
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest, sys
import timidi.tests
def test():
return unittest.main(timidi.tests)
if __name__ == "__main__":
sys.exit(0 if test() else 1)
| 14.181818
| 38
| 0.685897
| 23
| 156
| 4.304348
| 0.652174
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.192308
| 156
| 10
| 39
| 15.6
| 0.769841
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
24ed6db97e06104d586102270375326a08ab081c
| 47
|
py
|
Python
|
authlib/client/errors.py
|
moriyoshi/authlib
|
ce8b46b9e64ab799587b0a577cf122c1523c69a6
|
[
"BSD-3-Clause"
] | null | null | null |
authlib/client/errors.py
|
moriyoshi/authlib
|
ce8b46b9e64ab799587b0a577cf122c1523c69a6
|
[
"BSD-3-Clause"
] | 4
|
2021-03-19T08:17:59.000Z
|
2021-06-10T19:34:36.000Z
|
authlib/client/errors.py
|
moriyoshi/authlib
|
ce8b46b9e64ab799587b0a577cf122c1523c69a6
|
[
"BSD-3-Clause"
] | null | null | null |
from authlib.integrations.base_client import *
| 23.5
| 46
| 0.851064
| 6
| 47
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
24fa107e0d8aab1457cf2f2ed4f928364d613053
| 28
|
py
|
Python
|
app/main/__init__.py
|
AlchemistPrimus/data_crunchers_knbs
|
b6d5a73bdbfed3f4a99e7047bd0747f3653a7fd2
|
[
"MIT"
] | null | null | null |
app/main/__init__.py
|
AlchemistPrimus/data_crunchers_knbs
|
b6d5a73bdbfed3f4a99e7047bd0747f3653a7fd2
|
[
"MIT"
] | null | null | null |
app/main/__init__.py
|
AlchemistPrimus/data_crunchers_knbs
|
b6d5a73bdbfed3f4a99e7047bd0747f3653a7fd2
|
[
"MIT"
] | null | null | null |
import flask
import pandas
| 7
| 13
| 0.821429
| 4
| 28
| 5.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 28
| 3
| 14
| 9.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
709373e914611e1ff6ded53aab77829a7df820fa
| 36
|
py
|
Python
|
autoscalingsim/scaling/policiesbuilder/adjustmentplacement/desired_adjustment_calculator/scoring/score/score_impl/__init__.py
|
Remit/autoscaling-simulator
|
091943c0e9eedf9543e9305682a067ab60f56def
|
[
"MIT"
] | 6
|
2021-03-10T16:23:10.000Z
|
2022-01-14T04:57:46.000Z
|
autoscalingsim/scaling/policiesbuilder/adjustmentplacement/desired_adjustment_calculator/scoring/score/score_impl/__init__.py
|
Remit/autoscaling-simulator
|
091943c0e9eedf9543e9305682a067ab60f56def
|
[
"MIT"
] | null | null | null |
autoscalingsim/scaling/policiesbuilder/adjustmentplacement/desired_adjustment_calculator/scoring/score/score_impl/__init__.py
|
Remit/autoscaling-simulator
|
091943c0e9eedf9543e9305682a067ab60f56def
|
[
"MIT"
] | 1
|
2022-01-14T04:57:55.000Z
|
2022-01-14T04:57:55.000Z
|
from .price_score import PriceScore
| 18
| 35
| 0.861111
| 5
| 36
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
70a5b20648fb2faca3fcea6273bbcce781cc0bab
| 79
|
py
|
Python
|
quantumdl/__init__.py
|
dexterai-lab/quantumdl
|
eee35aca5df5933e20edc81d8b1afb3a54546ccc
|
[
"Apache-2.0"
] | null | null | null |
quantumdl/__init__.py
|
dexterai-lab/quantumdl
|
eee35aca5df5933e20edc81d8b1afb3a54546ccc
|
[
"Apache-2.0"
] | null | null | null |
quantumdl/__init__.py
|
dexterai-lab/quantumdl
|
eee35aca5df5933e20edc81d8b1afb3a54546ccc
|
[
"Apache-2.0"
] | null | null | null |
from quantumdl.models.quantummodel import *
from quantumdl.core.engine import *
| 39.5
| 43
| 0.835443
| 10
| 79
| 6.6
| 0.7
| 0.393939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 2
| 44
| 39.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
560c7a31500666fc1832cff4fb2239a042dbb88d
| 80
|
py
|
Python
|
isitup/util/__init__.py
|
Twi1ightSparkle/matrix
|
255191928c3378e55b929beb7e11c993a1bc7a8a
|
[
"MIT"
] | 2
|
2019-12-04T09:53:17.000Z
|
2020-07-10T23:51:11.000Z
|
isitup/util/__init__.py
|
Twi1ightSparkle/matrix
|
255191928c3378e55b929beb7e11c993a1bc7a8a
|
[
"MIT"
] | 2
|
2020-01-14T17:30:42.000Z
|
2020-04-30T16:55:28.000Z
|
isitup/util/__init__.py
|
Twi1ightSparkle/matrix
|
255191928c3378e55b929beb7e11c993a1bc7a8a
|
[
"MIT"
] | 1
|
2019-12-04T09:55:30.000Z
|
2019-12-04T09:55:30.000Z
|
from . import content
from . import http
from . import matrix
from . import sql
| 16
| 21
| 0.75
| 12
| 80
| 5
| 0.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 80
| 4
| 22
| 20
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5684ffcd477f4ea36396a5ca9ab5a703ba143c38
| 39
|
py
|
Python
|
envs/__init__.py
|
addy1997/Grid
|
591d7b27a8d98f6e3f9ed68fbb29ab469eb3862a
|
[
"MIT"
] | 21
|
2020-06-27T03:06:23.000Z
|
2022-02-16T23:22:13.000Z
|
envs/__init__.py
|
addy1997/Grid
|
591d7b27a8d98f6e3f9ed68fbb29ab469eb3862a
|
[
"MIT"
] | 2
|
2020-09-23T12:27:33.000Z
|
2020-10-20T06:48:58.000Z
|
envs/__init__.py
|
addy1997/Grid
|
591d7b27a8d98f6e3f9ed68fbb29ab469eb3862a
|
[
"MIT"
] | 3
|
2020-09-22T22:19:24.000Z
|
2021-03-21T14:33:00.000Z
|
from Grid.envs.GridEnvironment import *
| 39
| 39
| 0.846154
| 5
| 39
| 6.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
569c356598b72f8d7b1a3145e8755a6815c7d761
| 29
|
py
|
Python
|
torchgeometry/losses/__init__.py
|
Wizaron/torchgeometry
|
59a8d25dd811ded6a139d5c0c2442b06f43dc775
|
[
"Apache-2.0"
] | 1
|
2020-08-14T04:09:30.000Z
|
2020-08-14T04:09:30.000Z
|
torchgeometry/losses/__init__.py
|
Wizaron/torchgeometry
|
59a8d25dd811ded6a139d5c0c2442b06f43dc775
|
[
"Apache-2.0"
] | null | null | null |
torchgeometry/losses/__init__.py
|
Wizaron/torchgeometry
|
59a8d25dd811ded6a139d5c0c2442b06f43dc775
|
[
"Apache-2.0"
] | 1
|
2020-05-21T12:35:10.000Z
|
2020-05-21T12:35:10.000Z
|
from .ssim import SSIM, ssim
| 14.5
| 28
| 0.758621
| 5
| 29
| 4.4
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 29
| 1
| 29
| 29
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
56a8044b62c3025c4a2c62aea0ff5c1a527f72c3
| 8,753
|
py
|
Python
|
mlhiphy/tests/test_kernel.py
|
ratnania/mlhiphy
|
c75b5c4b5fbc557f77d234df001fe11b10681d7d
|
[
"MIT"
] | 6
|
2018-07-12T09:03:43.000Z
|
2019-10-29T09:50:34.000Z
|
mlhiphy/tests/test_kernel.py
|
ratnania/mlhiphy
|
c75b5c4b5fbc557f77d234df001fe11b10681d7d
|
[
"MIT"
] | null | null | null |
mlhiphy/tests/test_kernel.py
|
ratnania/mlhiphy
|
c75b5c4b5fbc557f77d234df001fe11b10681d7d
|
[
"MIT"
] | 4
|
2018-04-25T06:33:03.000Z
|
2020-03-13T02:25:07.000Z
|
# coding: utf-8
from mlhiphy.calculus import dx, dy, dz
from mlhiphy.calculus import Constant
from mlhiphy.calculus import Unknown
from mlhiphy.kernels import compute_kernel, generic_kernel
from sympy import expand
from sympy import Lambda
from sympy import Function, Derivative
from sympy import symbols
from sympy import exp
from sympy import Tuple
def test_generic_kernel_1d():
x, xi, xj = symbols('x xi xj')
u = Unknown('u')
# ... testing u
assert(generic_kernel(u, u, xi) == Function('u')(xi))
assert(generic_kernel(u, u, xj) == Function('u')(xj))
assert(generic_kernel(u, u, (xi, xj)) == Function('u')(xi, xj))
# ...
# ... testing dx(u)
assert(generic_kernel(dx(u), u, xi) == Derivative(Function('u')(xi), xi))
assert(generic_kernel(dx(u), u, xj) == Derivative(Function('u')(xj), xj))
assert(generic_kernel(dx(u), u, (xi, xj)) == Derivative(Function('u')(xi, xj), xi, xj))
# ...
# ... testing dx(dx(u))
assert(generic_kernel(dx(dx(u)), u, xi) == Derivative(Function('u')(xi), xi, xi))
assert(generic_kernel(dx(dx(u)), u, xj) == Derivative(Function('u')(xj), xj, xj))
assert(generic_kernel(dx(dx(u)), u, (xi, xj)) == Derivative(Function('u')(xi, xj), xi, xi, xj, xj))
# ...
def test_generic_kernel_2d():
x, xi, xj = symbols('x xi xj')
y, yi, yj = symbols('y yi yj')
X = Tuple(x,y)
Xi = Tuple(xi,yi)
Xj = Tuple(xj,yj)
u = Unknown('u')
# ... testing u
assert(generic_kernel(u, u, xi) == Function('u')(xi))
assert(generic_kernel(u, u, xj) == Function('u')(xj))
assert(generic_kernel(u, u, (xi, xj)) == Function('u')(xi, xj))
# ...
# ... testing dx(u)
assert(generic_kernel(dx(u), u, Xi) ==
Derivative(Function('u')(*Xi), xi))
assert(generic_kernel(dx(u), u, Xj) ==
Derivative(Function('u')(*Xj), xj))
assert(generic_kernel(dx(u), u, (Xi, Xj)) ==
Derivative(Function('u')(*Xi, *Xj), xi, xj))
# ...
# ... testing dy(u)
assert(generic_kernel(dy(u), u, Xi) ==
Derivative(Function('u')(*Xi), yi))
assert(generic_kernel(dy(u), u, Xj) ==
Derivative(Function('u')(*Xj), yj))
assert(generic_kernel(dy(u), u, (Xi, Xj)) ==
Derivative(Function('u')(*Xi, *Xj), yi, yj))
# ...
# ... testing dx(dx(u))
assert(generic_kernel(dx(dx(u)), u, Xi) ==
Derivative(Function('u')(*Xi), xi, xi))
assert(generic_kernel(dx(dx(u)), u, Xj) ==
Derivative(Function('u')(*Xj), xj, xj))
assert(generic_kernel(dx(dx(u)), u, (Xi, Xj)) ==
Derivative(Function('u')(*Xi, *Xj), xi, xi, xj, xj))
# ...
def test_generic_kernel_3d():
x, xi, xj = symbols('x xi xj')
y, yi, yj = symbols('y yi yj')
z, zi, zj = symbols('z zi zj')
X = Tuple(x,y,z)
Xi = Tuple(xi,yi,zi)
Xj = Tuple(xj,yj,zj)
u = Unknown('u')
# ... testing u
assert(generic_kernel(u, u, xi) == Function('u')(xi))
assert(generic_kernel(u, u, xj) == Function('u')(xj))
assert(generic_kernel(u, u, (xi, xj)) == Function('u')(xi, xj))
# ...
# ... testing dx(u)
assert(generic_kernel(dx(u), u, Xi) ==
Derivative(Function('u')(*Xi), xi))
assert(generic_kernel(dx(u), u, Xj) ==
Derivative(Function('u')(*Xj), xj))
assert(generic_kernel(dx(u), u, (Xi, Xj)) ==
Derivative(Function('u')(*Xi, *Xj), xi, xj))
# ...
# ... testing dy(u)
assert(generic_kernel(dy(u), u, Xi) ==
Derivative(Function('u')(*Xi), yi))
assert(generic_kernel(dy(u), u, Xj) ==
Derivative(Function('u')(*Xj), yj))
assert(generic_kernel(dy(u), u, (Xi, Xj)) ==
Derivative(Function('u')(*Xi, *Xj), yi, yj))
# ...
# ... testing dz(u)
assert(generic_kernel(dz(u), u, Xi) ==
Derivative(Function('u')(*Xi), zi))
assert(generic_kernel(dz(u), u, Xj) ==
Derivative(Function('u')(*Xj), zj))
assert(generic_kernel(dz(u), u, (Xi, Xj)) ==
Derivative(Function('u')(*Xi, *Xj), zi, zj))
# ...
# ... testing dx(dx(u))
assert(generic_kernel(dx(dx(u)), u, Xi) ==
Derivative(Function('u')(*Xi), xi, xi))
assert(generic_kernel(dx(dx(u)), u, Xj) ==
Derivative(Function('u')(*Xj), xj, xj))
assert(generic_kernel(dx(dx(u)), u, (Xi, Xj)) ==
Derivative(Function('u')(*Xi, *Xj), xi, xi, xj, xj))
# ...
def test_1d():
x, xi, xj = symbols('x xi xj')
u = Unknown('u')
alpha = Constant('alpha')
beta = Constant('beta')
mu = Constant('mu')
theta = Constant('theta')
# expr = alpha * u
# expr = alpha * dx(u)
# expr = alpha * u + beta * dx(u)
# expr = mu * u + dx(u)
# expr = mu * u + dx(dx(u))
# expr = mu * u + alpha * dx(u) + beta * dx(dx(u))
expr = mu * u + dx(u) + dx(dx(u))
# print('> generic_kernel := ', expand(generic_kernel(expr, u, xi)))
# print('> generic_kernel := ', expand(generic_kernel(expr, u, xj)))
print('> generic_kernel := ', expand(generic_kernel(expr, u, (xi, xj))))
# kuu = theta * exp(-0.5*((xi - xj)**2))
#
# kuf = compute_kernel(expr, kuu, xi)
# kfu = compute_kernel(expr, kuu, xj)
# kff = compute_kernel(expr, kuu, (xi, xj))
#
# print('> kuf := ', kuf)
# print('> kfu := ', kfu)
# print('> kff := ', kff)
def test_2d():
x, xi, xj = symbols('x xi xj')
y, yi, yj = symbols('y yi yj')
X = Tuple(x,y)
Xi = Tuple(xi,yi)
Xj = Tuple(xj,yj)
u = Unknown('u')
alpha = Constant('alpha')
beta = Constant('beta')
mu = Constant('mu')
nu = Constant('nu')
zeta = Constant('zeta')
theta = Constant('theta')
# expr = alpha * u
# expr = alpha * dx(u)
# expr = alpha * dy(u)
# expr = alpha * u + beta * dx(u)
# expr = alpha * u + beta * dy(u)
# expr = mu * u + alpha * dx(u) + beta * dx(dx(u))
# expr = mu * u + alpha * dx(u) + beta * dy(dy(u))
expr = mu * u + alpha * dx(u) + beta * dx(dx(u)) + nu * dy(dy(u)) + zeta * dx(dy(u))
# print('> generic_kernel := ', expand(generic_kernel(expr, u, Xi)))
# print('> generic_kernel := ', expand(generic_kernel(expr, u, Xj)))
print('> generic_kernel := ', expand(generic_kernel(expr, u, (Xi, Xj))))
# kuu = theta * exp(-0.5*((xi - xj)**2 + (yi - yj)**2))
#
# kuf = compute_kernel(expr, kuu, Xi)
# kfu = compute_kernel(expr, kuu, Xj)
# kff = compute_kernel(expr, kuu, (Xi, Xj))
#
# print('> kuf := ', kuf)
# print('> kfu := ', kfu)
# print('> kff := ', kff)
def test_3d():
x, xi, xj = symbols('x xi xj')
y, yi, yj = symbols('y yi yj')
z, zi, zj = symbols('z zi zj')
X = Tuple(x,y,z)
Xi = Tuple(xi,yi,zi)
Xj = Tuple(xj,yj,zj)
u = Unknown('u')
alpha = Constant('alpha')
beta = Constant('beta')
mu = Constant('mu')
nu = Constant('nu')
theta = Constant('theta')
# expr = alpha * u
# expr = alpha * dx(u)
# expr = alpha * dy(u)
# expr = alpha * dz(u)
# expr = alpha * u + beta * dx(u)
# expr = alpha * u + beta * dy(u)
# expr = alpha * u + beta * dz(u)
# expr = mu * u + alpha * dx(u) + beta * dx(dx(u))
# expr = mu * u + alpha * dx(u) + beta * dy(dy(u))
# expr = mu * u + alpha * dx(u) + beta * dz(dz(u))
expr = mu * u + alpha * dx(u) + beta * dy(dz(u)) + nu * dx(dz(u))
# print('> generic_kernel := ', expand(generic_kernel(expr, u, Xi)))
# print('> generic_kernel := ', expand(generic_kernel(expr, u, Xj)))
print('> generic_kernel := ', expand(generic_kernel(expr, u, (Xi, Xj))))
# kuu = theta * exp(-0.5*((xi - xj)**2 + (yi - yj)**2) + (zi - zj)**2))
#
# kuf = compute_kernel(expr, kuu, Xi)
# kfu = compute_kernel(expr, kuu, Xj)
# kff = compute_kernel(expr, kuu, (Xi, Xj))
#
# print('> kuf := ', kuf)
# print('> kfu := ', kfu)
# print('> kff := ', kff)
def test_est_2dkernel():
"""example from Harsha."""
x, xi, xj = symbols('x xi xj')
y, yi, yj = symbols('y yi yj')
X = Tuple(x,y)
Xi = Tuple(xi,yi)
Xj = Tuple(xj,yj)
u = Unknown('u')
phi = Constant('phi')
theta = Constant('theta')
expr = phi * u + dx(u) + dy(dy(u))
print('> generic_kernel := ', expand(generic_kernel(expr, u, (Xi, Xj))))
print('')
kuu = theta * exp(-0.5*((xi - xj)**2 + (yi - yj)**2))
kuf = compute_kernel(expr, kuu, Xi)
kfu = compute_kernel(expr, kuu, Xj)
kff = compute_kernel(expr, kuu, (Xi, Xj))
print('> kuf := ', kuf)
print('> kfu := ', kfu)
print('> kff := ', kff)
#############################################
if __name__ == '__main__':
test_generic_kernel_1d()
test_generic_kernel_2d()
test_generic_kernel_3d()
test_1d()
test_2d()
test_3d()
test_est_2dkernel()
| 29.079734
| 103
| 0.529761
| 1,278
| 8,753
| 3.545383
| 0.050078
| 0.180755
| 0.15096
| 0.083425
| 0.878614
| 0.878614
| 0.870227
| 0.847936
| 0.845288
| 0.833149
| 0
| 0.004718
| 0.249286
| 8,753
| 300
| 104
| 29.176667
| 0.684827
| 0.260825
| 0
| 0.655629
| 0
| 0
| 0.050481
| 0
| 0
| 0
| 0
| 0
| 0.238411
| 1
| 0.046358
| false
| 0
| 0.066225
| 0
| 0.112583
| 0.05298
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3b0d0bb6e3b33d892075a83da45c07e4f07692ba
| 395
|
py
|
Python
|
odoo_social_security/models/__init__.py
|
joytao-zhu/odooExtModel
|
8608aaeae7a8c86d53b68ce26b7b308f779c3dd8
|
[
"Apache-2.0"
] | 2
|
2019-12-06T04:47:49.000Z
|
2021-05-16T15:42:25.000Z
|
odoo_social_security/models/__init__.py
|
niulinlnc/odooExtModel
|
8608aaeae7a8c86d53b68ce26b7b308f779c3dd8
|
[
"Apache-2.0"
] | null | null | null |
odoo_social_security/models/__init__.py
|
niulinlnc/odooExtModel
|
8608aaeae7a8c86d53b68ce26b7b308f779c3dd8
|
[
"Apache-2.0"
] | 6
|
2020-03-01T08:16:07.000Z
|
2021-11-05T05:48:53.000Z
|
# -*- coding: utf-8 -*-
###################################################################################
# Copyright (C) 2019 SuXueFeng GNU
###################################################################################
from . import insured_scheme
from . import insured_scheme_emp
from . import insured_monthly_statement
from . import employee_month_report
from . import res_company
| 35.909091
| 83
| 0.443038
| 31
| 395
| 5.387097
| 0.645161
| 0.299401
| 0.305389
| 0.275449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013966
| 0.093671
| 395
| 10
| 84
| 39.5
| 0.452514
| 0.144304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
3b237eb6d785b4f28b781db904ae69ac649a136f
| 120
|
py
|
Python
|
deepxde/icbcs/__init__.py
|
blutjens/deepxde
|
19855bb7790d0a9a327d4cc6921e6bc7a3c4c8cd
|
[
"Apache-2.0"
] | null | null | null |
deepxde/icbcs/__init__.py
|
blutjens/deepxde
|
19855bb7790d0a9a327d4cc6921e6bc7a3c4c8cd
|
[
"Apache-2.0"
] | 1
|
2022-03-02T03:50:18.000Z
|
2022-03-02T03:50:18.000Z
|
deepxde/icbcs/__init__.py
|
ZongrenZou/deepxde
|
f74b9a48165eee1984f4b43dec05f1129f77200f
|
[
"Apache-2.0"
] | null | null | null |
"""Initial conditions and boundary conditions."""
from .boundary_conditions import *
from .initial_conditions import *
| 24
| 49
| 0.791667
| 13
| 120
| 7.153846
| 0.461538
| 0.365591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 120
| 4
| 50
| 30
| 0.877358
| 0.358333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3b84db58bb7604f83ae7b2ab94268207206a50bd
| 189
|
py
|
Python
|
Chapter01/19_iterator_example.py
|
add54/ADMIN_SYS_PYTHON
|
5a6d9705537c8663c8f7b0f45d29ccc87b6096e7
|
[
"MIT"
] | 116
|
2018-12-21T01:05:47.000Z
|
2022-03-23T21:41:41.000Z
|
Chapter01/19_iterator_example.py
|
add54/ADMIN_SYS_PYTHON
|
5a6d9705537c8663c8f7b0f45d29ccc87b6096e7
|
[
"MIT"
] | 2
|
2021-03-31T19:36:19.000Z
|
2021-06-10T22:29:26.000Z
|
Chapter01/19_iterator_example.py
|
add54/ADMIN_SYS_PYTHON
|
5a6d9705537c8663c8f7b0f45d29ccc87b6096e7
|
[
"MIT"
] | 147
|
2018-12-19T14:10:32.000Z
|
2022-03-20T11:03:20.000Z
|
numbers = [10, 20, 30, 40]
numbers_iter = iter(numbers)
print(next(numbers_iter))
print(next(numbers_iter))
print(numbers_iter.__next__())
print(numbers_iter.__next__())
next(numbers_iter)
| 23.625
| 30
| 0.772487
| 28
| 189
| 4.714286
| 0.285714
| 0.5
| 0.340909
| 0.30303
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045714
| 0.074074
| 189
| 7
| 31
| 27
| 0.708571
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
8e797a9d3cf98d80cb13babcd73adff248c720b4
| 47
|
py
|
Python
|
tests/extension/src/test_project/sub.py
|
OriolAbril/sphinx-codeautolink
|
31660847baae16a5b2b7c6bfbb2cfd394791361a
|
[
"MIT"
] | 21
|
2021-09-13T15:53:32.000Z
|
2022-03-24T15:27:36.000Z
|
tests/extension/src/test_project/sub.py
|
OriolAbril/sphinx-codeautolink
|
31660847baae16a5b2b7c6bfbb2cfd394791361a
|
[
"MIT"
] | 89
|
2021-09-19T21:42:38.000Z
|
2022-03-25T09:09:39.000Z
|
tests/extension/src/test_project/sub.py
|
OriolAbril/sphinx-codeautolink
|
31660847baae16a5b2b7c6bfbb2cfd394791361a
|
[
"MIT"
] | 5
|
2021-10-14T03:08:39.000Z
|
2022-02-11T10:50:19.000Z
|
def subfoo():
"""Function in submodule."""
| 15.666667
| 32
| 0.595745
| 5
| 47
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191489
| 47
| 2
| 33
| 23.5
| 0.736842
| 0.468085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
8e9c3960b847a6a0f304d1f30f9a6c7f5728ff69
| 153
|
py
|
Python
|
orbital/constants.py
|
getsentry/sentry-orbital
|
849ea623acaa09efd001c1fe95ee8d72ea9b1355
|
[
"Apache-2.0"
] | 6
|
2016-05-12T18:33:22.000Z
|
2020-12-16T15:48:34.000Z
|
orbital/constants.py
|
getsentry/sentry-orbital
|
849ea623acaa09efd001c1fe95ee8d72ea9b1355
|
[
"Apache-2.0"
] | 1
|
2019-10-23T06:35:48.000Z
|
2019-10-23T06:58:56.000Z
|
orbital/constants.py
|
getsentry/sentry-orbital
|
849ea623acaa09efd001c1fe95ee8d72ea9b1355
|
[
"Apache-2.0"
] | 1
|
2021-03-02T11:16:53.000Z
|
2021-03-02T11:16:53.000Z
|
from __future__ import absolute_import
from django.conf import settings
ORBITAL_UDP_SERVER = getattr(settings, 'ORBITAL_UDP_SERVER', '127.0.0.1:5556')
| 25.5
| 78
| 0.810458
| 23
| 153
| 5
| 0.652174
| 0.26087
| 0.313043
| 0.417391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072464
| 0.098039
| 153
| 5
| 79
| 30.6
| 0.76087
| 0
| 0
| 0
| 0
| 0
| 0.20915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
8ea11d382b357a5f3839336f76b007bba8878107
| 42
|
py
|
Python
|
react/__init__.py
|
Stift007/react.py
|
e0640cba48debdd745f9ee55fdc073c1927641f2
|
[
"MIT"
] | null | null | null |
react/__init__.py
|
Stift007/react.py
|
e0640cba48debdd745f9ee55fdc073c1927641f2
|
[
"MIT"
] | null | null | null |
react/__init__.py
|
Stift007/react.py
|
e0640cba48debdd745f9ee55fdc073c1927641f2
|
[
"MIT"
] | null | null | null |
from .app import *
from .globals import *
| 14
| 22
| 0.714286
| 6
| 42
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 23
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8ecb48f1ca63f88945e39e134f2bc47984ce80f8
| 29
|
py
|
Python
|
fixipy/__init__.py
|
Jaydabi/pyfix
|
289e9d2e6e541b2b92efc4009d400238683b478e
|
[
"MIT"
] | null | null | null |
fixipy/__init__.py
|
Jaydabi/pyfix
|
289e9d2e6e541b2b92efc4009d400238683b478e
|
[
"MIT"
] | null | null | null |
fixipy/__init__.py
|
Jaydabi/pyfix
|
289e9d2e6e541b2b92efc4009d400238683b478e
|
[
"MIT"
] | null | null | null |
from .Message import Message
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8ecf29ded5ed5dc435c4e78d5cbeed5cbe353d10
| 87
|
py
|
Python
|
resetpwd/models.py
|
catbei2020/ad-password-self-service
|
ffbe96425ddc56145181482ae25742f9e5e23fab
|
[
"Apache-2.0"
] | 1
|
2021-04-15T08:50:06.000Z
|
2021-04-15T08:50:06.000Z
|
resetpwd/models.py
|
catbei2020/ad-password-self-service
|
ffbe96425ddc56145181482ae25742f9e5e23fab
|
[
"Apache-2.0"
] | null | null | null |
resetpwd/models.py
|
catbei2020/ad-password-self-service
|
ffbe96425ddc56145181482ae25742f9e5e23fab
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
from django import forms
from django.contrib import auth
| 17.4
| 31
| 0.827586
| 14
| 87
| 5.142857
| 0.571429
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149425
| 87
| 4
| 32
| 21.75
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d92429496b3242685cd81c7c6893105e638aac38
| 151
|
py
|
Python
|
app/stories/admin.py
|
Sherba/AdventureReader
|
011ead234118ab13ac1fbe969c00ef91e03d46f5
|
[
"MIT"
] | null | null | null |
app/stories/admin.py
|
Sherba/AdventureReader
|
011ead234118ab13ac1fbe969c00ef91e03d46f5
|
[
"MIT"
] | 7
|
2020-07-16T22:24:54.000Z
|
2022-03-12T00:41:13.000Z
|
app/stories/admin.py
|
Sherba/AdventureReader
|
011ead234118ab13ac1fbe969c00ef91e03d46f5
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Genre, Node, Post
admin.site.register(Genre)
admin.site.register(Node)
admin.site.register(Post)
| 21.571429
| 37
| 0.801325
| 23
| 151
| 5.26087
| 0.478261
| 0.223141
| 0.421488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092715
| 151
| 6
| 38
| 25.166667
| 0.883212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d97c6c40c7faab8dc919021fba8744516dbbb201
| 4,655
|
py
|
Python
|
test/test_2_garage_compact_parking.py
|
jlarkin21/parking-garage-python
|
0a9188dc8da3bf8dc6f9534d9b1289b61608fc0f
|
[
"Apache-2.0"
] | null | null | null |
test/test_2_garage_compact_parking.py
|
jlarkin21/parking-garage-python
|
0a9188dc8da3bf8dc6f9534d9b1289b61608fc0f
|
[
"Apache-2.0"
] | null | null | null |
test/test_2_garage_compact_parking.py
|
jlarkin21/parking-garage-python
|
0a9188dc8da3bf8dc6f9534d9b1289b61608fc0f
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
from garage.garage import Garage
from garage.parking_level import ParkingLevel
from garage.parking_space import ParkingSpace
from garage.vehicle import Vehicle
from garage.vehicle_type import VehicleType
from test.utils import TestHelpers
def test_standard_cars_are_rejected_from_compact_parking_space():
parking_space_a = ParkingSpace(compact=True)
parking_space_b = ParkingSpace(compact=True)
parking_space_c = ParkingSpace(compact=True)
parking_space_d = ParkingSpace(compact=True)
parking_space_e = ParkingSpace(compact=True)
parking_space_f = ParkingSpace(compact=True)
parking_level_1 = ParkingLevel(spaces=[parking_space_a, parking_space_b])
parking_level_2 = ParkingLevel(spaces=[parking_space_c, parking_space_d])
parking_level_3 = ParkingLevel(spaces=[parking_space_e, parking_space_f])
garage = Garage(levels=[parking_level_1, parking_level_2, parking_level_3])
vehicle_1 = Vehicle(vehicle_type=VehicleType.Compact)
vehicle_2 = Vehicle(vehicle_type=VehicleType.Car)
vehicle_3 = Vehicle(vehicle_type=VehicleType.Car)
vehicle_4 = Vehicle(vehicle_type=VehicleType.Compact)
vehicle_5 = Vehicle(vehicle_type=VehicleType.Car)
vehicle_6 = Vehicle(vehicle_type=VehicleType.Car)
expected_vehicles_rejected: List[Vehicle] = [
vehicle_2,
vehicle_3,
vehicle_5,
vehicle_6,
]
actual_vehicles_rejected = garage.add_vehicles(
[vehicle_1, vehicle_2, vehicle_3, vehicle_4, vehicle_5, vehicle_6]
)
TestHelpers.assert_expected_vehicles_are_rejected(
actual=actual_vehicles_rejected, expected=expected_vehicles_rejected
)
def test_trucks_are_rejected_from_compact_parking_space():
parking_space_a = ParkingSpace(compact=True)
parking_space_b = ParkingSpace(compact=True)
parking_space_c = ParkingSpace(compact=True)
parking_space_d = ParkingSpace(compact=True)
parking_space_e = ParkingSpace(compact=True)
parking_space_f = ParkingSpace(compact=True)
parking_level_1 = ParkingLevel(spaces=[parking_space_a, parking_space_b])
parking_level_2 = ParkingLevel(spaces=[parking_space_c, parking_space_d])
parking_level_3 = ParkingLevel(spaces=[parking_space_e, parking_space_f])
garage = Garage(levels=[parking_level_1, parking_level_2, parking_level_3])
vehicle_1 = Vehicle(vehicle_type=VehicleType.Compact)
vehicle_2 = Vehicle(vehicle_type=VehicleType.Truck)
vehicle_3 = Vehicle(vehicle_type=VehicleType.Truck)
vehicle_4 = Vehicle(vehicle_type=VehicleType.Truck)
vehicle_5 = Vehicle(vehicle_type=VehicleType.Compact)
vehicle_6 = Vehicle(vehicle_type=VehicleType.Truck)
expected_vehicles_rejected: List[Vehicle] = [
vehicle_2,
vehicle_3,
vehicle_4,
vehicle_6,
]
actual_vehicles_rejected = garage.add_vehicles(
[vehicle_1, vehicle_2, vehicle_3, vehicle_4, vehicle_5, vehicle_6]
)
TestHelpers.assert_expected_vehicles_are_rejected(
actual=actual_vehicles_rejected, expected=expected_vehicles_rejected
)
def test_compact_vehicles_are_prioritized_into_compact_parking_space():
parking_space_a = ParkingSpace(compact=True)
parking_space_b = ParkingSpace()
parking_space_c = ParkingSpace()
parking_space_d = ParkingSpace(compact=True)
parking_space_e = ParkingSpace()
parking_space_f = ParkingSpace()
parking_level_1 = ParkingLevel(spaces=[parking_space_a, parking_space_b])
parking_level_2 = ParkingLevel(spaces=[parking_space_c, parking_space_d])
parking_level_3 = ParkingLevel(spaces=[parking_space_e, parking_space_f])
garage = Garage(levels=[parking_level_1, parking_level_2, parking_level_3])
vehicle_1 = Vehicle(vehicle_type=VehicleType.Car)
vehicle_2 = Vehicle(vehicle_type=VehicleType.Compact)
vehicle_3 = Vehicle(vehicle_type=VehicleType.Compact)
vehicle_4 = Vehicle(vehicle_type=VehicleType.Truck)
vehicle_5 = Vehicle(vehicle_type=VehicleType.Compact)
vehicle_6 = Vehicle(vehicle_type=VehicleType.Car)
expected_vehicles_on_level_1: List[Vehicle] = [vehicle_2, vehicle_1]
expected_vehicles_on_level_2: List[Vehicle] = [vehicle_4, vehicle_3]
expected_vehicles_on_level_3: List[Vehicle] = [vehicle_5, vehicle_6]
garage.add_vehicles(
[vehicle_1, vehicle_2, vehicle_3, vehicle_4, vehicle_5, vehicle_6]
)
TestHelpers.assert_expected_vehicles_on_levels(
levels=garage.levels,
expected_vehicles=[
expected_vehicles_on_level_1,
expected_vehicles_on_level_2,
expected_vehicles_on_level_3,
],
)
| 38.155738
| 79
| 0.766917
| 591
| 4,655
| 5.604061
| 0.081218
| 0.144928
| 0.097826
| 0.157609
| 0.869867
| 0.836655
| 0.741546
| 0.741244
| 0.741244
| 0.72343
| 0
| 0.018839
| 0.156176
| 4,655
| 121
| 80
| 38.471074
| 0.824338
| 0
| 0
| 0.53125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 1
| 0.03125
| false
| 0
| 0.072917
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d98c8843e16ed96cadd68b517440caca1f2989d3
| 7,780
|
py
|
Python
|
tests/test_mixed.py
|
rentbrella/janus
|
d7970f8b76bcac2e087067ca4575ac845e481874
|
[
"Apache-2.0"
] | null | null | null |
tests/test_mixed.py
|
rentbrella/janus
|
d7970f8b76bcac2e087067ca4575ac845e481874
|
[
"Apache-2.0"
] | null | null | null |
tests/test_mixed.py
|
rentbrella/janus
|
d7970f8b76bcac2e087067ca4575ac845e481874
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import contextlib
import sys
import threading
import pytest
import janus
class TestMixedMode:
@pytest.mark.skipif(
sys.version_info < (3, 7),
reason="forbidding implicit loop creation works on "
"Python 3.7 or higher only",
)
def test_ctor_noloop(self):
with pytest.raises(RuntimeError):
janus.Queue()
@pytest.mark.asyncio
async def test_maxsize(self):
q = janus.Queue(5)
assert 5 == q.maxsize
@pytest.mark.asyncio
async def test_maxsize_named_param(self):
q = janus.Queue(maxsize=7)
assert 7 == q.maxsize
@pytest.mark.asyncio
async def test_maxsize_default(self):
q = janus.Queue()
assert 0 == q.maxsize
@pytest.mark.asyncio
async def test_unfinished(self):
q = janus.Queue()
assert q.sync_q.unfinished_tasks == 0
assert q.async_q.unfinished_tasks == 0
q.sync_q.put(1)
assert q.sync_q.unfinished_tasks == 1
assert q.async_q.unfinished_tasks == 1
q.sync_q.get()
assert q.sync_q.unfinished_tasks == 1
assert q.async_q.unfinished_tasks == 1
q.sync_q.task_done()
assert q.sync_q.unfinished_tasks == 0
assert q.async_q.unfinished_tasks == 0
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_sync_put_async_get(self):
loop = janus.current_loop()
q = janus.Queue()
def threaded():
for i in range(5):
q.sync_q.put(i)
async def go():
f = loop.run_in_executor(None, threaded)
for i in range(5):
val = await q.async_q.get()
assert val == i
assert q.async_q.empty()
await f
for i in range(3):
await go()
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_sync_put_async_join(self):
loop = janus.current_loop()
q = janus.Queue()
for i in range(5):
q.sync_q.put(i)
async def do_work():
await asyncio.sleep(1)
while True:
await q.async_q.get()
q.async_q.task_done()
task = loop.create_task(do_work())
async def wait_for_empty_queue():
await q.async_q.join()
task.cancel()
await wait_for_empty_queue()
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_async_put_sync_get(self):
loop = janus.current_loop()
q = janus.Queue()
def threaded():
for i in range(5):
val = q.sync_q.get()
assert val == i
async def go():
f = loop.run_in_executor(None, threaded)
for i in range(5):
await q.async_q.put(i)
await f
assert q.async_q.empty()
for i in range(3):
await go()
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_sync_join_async_done(self):
loop = janus.current_loop()
q = janus.Queue()
def threaded():
for i in range(5):
q.sync_q.put(i)
q.sync_q.join()
async def go():
f = loop.run_in_executor(None, threaded)
for i in range(5):
val = await q.async_q.get()
assert val == i
q.async_q.task_done()
assert q.async_q.empty()
await f
for i in range(3):
await go()
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_async_join_async_done(self):
loop = janus.current_loop()
q = janus.Queue()
def threaded():
for i in range(5):
val = q.sync_q.get()
assert val == i
q.sync_q.task_done()
async def go():
f = loop.run_in_executor(None, threaded)
for i in range(5):
await q.async_q.put(i)
await q.async_q.join()
await f
assert q.async_q.empty()
for i in range(3):
await go()
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_wait_without_closing(self):
q = janus.Queue()
with pytest.raises(RuntimeError):
await q.wait_closed()
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_modifying_forbidden_after_closing(self):
q = janus.Queue()
q.close()
with pytest.raises(RuntimeError):
q.sync_q.put(5)
with pytest.raises(RuntimeError):
q.sync_q.get()
with pytest.raises(RuntimeError):
q.sync_q.task_done()
with pytest.raises(RuntimeError):
await q.async_q.put(5)
with pytest.raises(RuntimeError):
q.async_q.put_nowait(5)
with pytest.raises(RuntimeError):
q.async_q.get_nowait()
with pytest.raises(RuntimeError):
await q.sync_q.task_done()
await q.wait_closed()
@pytest.mark.asyncio
async def test_double_closing(self):
q = janus.Queue()
q.close()
q.close()
await q.wait_closed()
@pytest.mark.asyncio
async def test_closed(self):
q = janus.Queue()
assert not q.closed
assert not q.async_q.closed
assert not q.sync_q.closed
q.close()
assert q.closed
assert q.async_q.closed
assert q.sync_q.closed
@pytest.mark.asyncio
async def test_async_join_after_closing(self):
q = janus.Queue()
q.close()
with pytest.raises(RuntimeError), contextlib.suppress(asyncio.TimeoutError):
await asyncio.wait_for(q.async_q.join(), timeout=0.1)
await q.wait_closed()
@pytest.mark.asyncio
async def test_close_after_async_join(self):
q = janus.Queue()
q.sync_q.put(1)
task = asyncio.ensure_future(q.async_q.join())
await asyncio.sleep(0.1) # ensure tasks are blocking
q.close()
with pytest.raises(RuntimeError), contextlib.suppress(asyncio.TimeoutError):
await asyncio.wait_for(task, timeout=0.1)
await q.wait_closed()
@pytest.mark.asyncio
async def test_sync_join_after_closing(self):
q = janus.Queue()
q.sync_q.put(1)
q.close()
loop = asyncio.get_event_loop()
fut = asyncio.Future()
def sync_join():
try:
q.sync_q.join()
except Exception as exc:
loop.call_soon_threadsafe(fut.set_exception, exc)
thr = threading.Thread(target=sync_join, daemon=True)
thr.start()
with pytest.raises(RuntimeError), contextlib.suppress(asyncio.TimeoutError):
await asyncio.wait_for(fut, timeout=0.1)
await q.wait_closed()
@pytest.mark.asyncio
async def test_close_after_sync_join(self):
q = janus.Queue()
q.sync_q.put(1)
loop = asyncio.get_event_loop()
fut = asyncio.Future()
def sync_join():
try:
q.sync_q.join()
except Exception as exc:
loop.call_soon_threadsafe(fut.set_exception, exc)
thr = threading.Thread(target=sync_join, daemon=True)
thr.start()
thr.join(0.1) # ensure tasks are blocking
q.close()
with pytest.raises(RuntimeError), contextlib.suppress(asyncio.TimeoutError):
await asyncio.wait_for(fut, timeout=0.1)
await q.wait_closed()
| 25.25974
| 84
| 0.559512
| 1,014
| 7,780
| 4.119329
| 0.106509
| 0.029926
| 0.035911
| 0.089538
| 0.843668
| 0.775916
| 0.751017
| 0.725162
| 0.678717
| 0.64113
| 0
| 0.009718
| 0.338689
| 7,780
| 307
| 85
| 25.34202
| 0.802138
| 0.006555
| 0
| 0.74026
| 0
| 0
| 0.008801
| 0
| 0
| 0
| 0
| 0
| 0.108225
| 1
| 0.030303
| false
| 0
| 0.025974
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7975240dd6c18db90f391221458c7bba4562b77b
| 142
|
py
|
Python
|
myfuc/__init__.py
|
kamomehz/waveletCodingCNN
|
50c7db9d986039ded38999b7e4f4265e2250fb90
|
[
"MIT"
] | null | null | null |
myfuc/__init__.py
|
kamomehz/waveletCodingCNN
|
50c7db9d986039ded38999b7e4f4265e2250fb90
|
[
"MIT"
] | null | null | null |
myfuc/__init__.py
|
kamomehz/waveletCodingCNN
|
50c7db9d986039ded38999b7e4f4265e2250fb90
|
[
"MIT"
] | null | null | null |
from .makeData import *
from .makeImg import *
from .makeNet import *
from .makeWT import *
from .makePlot import *
from .trainScript import *
| 23.666667
| 26
| 0.753521
| 18
| 142
| 5.944444
| 0.444444
| 0.46729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161972
| 142
| 6
| 26
| 23.666667
| 0.89916
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8de6c776d17618f240864436759791cdd91db626
| 203
|
py
|
Python
|
test/input/099.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
test/input/099.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
test/input/099.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
def some_really_long_function_name(i):
return i ** i
print([(
some_really_long_function_name(i),
some_really_long_function_name(i+1),
some_really_long_function_name(i+3),
) for i in range(10)])
| 22.555556
| 38
| 0.763547
| 36
| 203
| 3.861111
| 0.416667
| 0.28777
| 0.402878
| 0.633094
| 0.776978
| 0.776978
| 0
| 0
| 0
| 0
| 0
| 0.022346
| 0.118227
| 203
| 8
| 39
| 25.375
| 0.75419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0.142857
| 0.285714
| 0.142857
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
5c2ebb255cd2f5d10a6fd9205b3b761afdfe2f05
| 44
|
py
|
Python
|
keras_dgl/__init__.py
|
michael-cowan/keras-deep-graph-learning
|
36854d374df931d063ada1c7ea3a5a2d67d3a8e4
|
[
"MIT"
] | null | null | null |
keras_dgl/__init__.py
|
michael-cowan/keras-deep-graph-learning
|
36854d374df931d063ada1c7ea3a5a2d67d3a8e4
|
[
"MIT"
] | null | null | null |
keras_dgl/__init__.py
|
michael-cowan/keras-deep-graph-learning
|
36854d374df931d063ada1c7ea3a5a2d67d3a8e4
|
[
"MIT"
] | null | null | null |
from keras_dgl._version import __version__
| 14.666667
| 42
| 0.863636
| 6
| 44
| 5.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 44
| 2
| 43
| 22
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
30b5622b2a596ba0e2f6849f492624448263a86b
| 7,615
|
py
|
Python
|
conans/test/functional/toolchains/cmake/test_cmake_toolchain_xcode_flags.py
|
Mu-L/conan
|
7c24ec4bbd6e8c16cdcd879403aae742689bc36a
|
[
"MIT"
] | 1
|
2019-11-04T17:23:09.000Z
|
2019-11-04T17:23:09.000Z
|
conans/test/functional/toolchains/cmake/test_cmake_toolchain_xcode_flags.py
|
Mu-L/conan
|
7c24ec4bbd6e8c16cdcd879403aae742689bc36a
|
[
"MIT"
] | 1
|
2020-11-05T16:16:49.000Z
|
2020-11-05T16:16:49.000Z
|
conans/test/functional/toolchains/cmake/test_cmake_toolchain_xcode_flags.py
|
Mattlk13/conan
|
005fc53485557b0a570bb71670f2ca9c66082165
|
[
"MIT"
] | null | null | null |
import textwrap
import platform
import os
import pytest
from conans.test.utils.tools import TestClient
def _add_message_status_flags(client):
cmakelists_path = os.path.join(client.current_folder, "CMakeLists.txt")
with open(cmakelists_path, "a") as cmakelists_file:
cmakelists_file.write('message(STATUS "CONAN_C_FLAGS: ${CONAN_C_FLAGS}")\n')
cmakelists_file.write('message(STATUS "CONAN_CXX_FLAGS: ${CONAN_CXX_FLAGS}")\n')
@pytest.mark.skipif(platform.system() != "Darwin", reason="Only OSX")
@pytest.mark.parametrize("op_system,os_version,sdk,arch", [
("watchOS", "8.1", "watchos", "armv7k"),
("tvOS", "13.2", "appletvos", "armv8")
])
def test_cmake_apple_bitcode_arc_and_visibility_flags_enabled(op_system, os_version, sdk, arch):
profile = textwrap.dedent("""
include(default)
[settings]
os={}
os.version={}
os.sdk={}
arch={}
[conf]
tools.apple:enable_bitcode=True
tools.apple:enable_arc=True
tools.apple:enable_visibility=True
""".format(op_system, os_version, sdk, arch))
client = TestClient(path_with_spaces=False)
client.save({"host": profile}, clean_first=True)
client.run("new hello/0.1 --template=cmake_lib")
_add_message_status_flags(client)
client.run("install . --profile:build=default --profile:host=host")
toolchain = client.load(os.path.join("build", "generators", "conan_toolchain.cmake"))
# bitcode
assert 'set(CMAKE_XCODE_ATTRIBUTE_ENABLE_BITCODE "YES")' in toolchain
assert 'set(CMAKE_XCODE_ATTRIBUTE_BITCODE_GENERATION_MODE "bitcode")' in toolchain
assert 'set(BITCODE "-fembed-bitcode")' in toolchain
# arc
assert 'set(FOBJC_ARC "-fobjc-arc")' in toolchain
assert 'set(CMAKE_XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC "YES")' in toolchain
# visibility
assert 'set(CMAKE_XCODE_ATTRIBUTE_GCC_SYMBOLS_PRIVATE_EXTERN "NO")' in toolchain
assert 'set(VISIBILITY "-fvisibility=default")' in toolchain
client.run("create . --profile:build=default --profile:host=host -tf None")
# flags
assert "-- CONAN_C_FLAGS: -fembed-bitcode -fobjc-arc" in client.out
assert "-- CONAN_CXX_FLAGS: -fembed-bitcode -fvisibility=default -fobjc-arc" in client.out
assert "[100%] Built target hello" in client.out
@pytest.mark.skipif(platform.system() != "Darwin", reason="Only OSX")
@pytest.mark.parametrize("op_system,os_version,sdk,arch", [
("watchOS", "8.1", "watchos", "armv7k"),
("tvOS", "13.2", "appletvos", "armv8")
])
def test_cmake_apple_bitcode_arc_and_visibility_flags_enabled_and_xcode_generator(op_system, os_version, sdk, arch):
"""
Testing when all the Bitcode, ARC and Visibility are enabled, and Xcode as generator.
Note: When using CMake and Xcode as generator, the C/CXX flags do not need to be appended.
"""
profile = textwrap.dedent("""
include(default)
[settings]
os={}
os.version={}
os.sdk={}
arch={}
[conf]
tools.apple:enable_bitcode=True
tools.apple:enable_arc=True
tools.apple:enable_visibility=True
""".format(op_system, os_version, sdk, arch))
client = TestClient(path_with_spaces=False)
client.save({"host": profile}, clean_first=True)
client.run("new hello/0.1 --template=cmake_lib")
_add_message_status_flags(client)
client.run("create . --profile:build=default --profile:host=host -tf None "
"-c tools.cmake.cmaketoolchain:generator=Xcode")
assert "** BUILD SUCCEEDED **" in client.out
# flags are not appended when Xcode generator is used
for line in str(client.out).splitlines():
if "CONAN_C_FLAGS:" in line:
assert "-- CONAN_C_FLAGS:" == line.strip()
if "CONAN_CXX_FLAGS:" in line:
assert "-- CONAN_CXX_FLAGS: -stdlib=libc++" == line.strip()
break
@pytest.mark.skipif(platform.system() != "Darwin", reason="Only OSX")
@pytest.mark.parametrize("op_system,os_version,sdk,arch", [
("watchOS", "8.1", "watchos", "armv7k"),
("tvOS", "13.2", "appletvos", "armv8")
])
def test_cmake_apple_bitcode_arc_and_visibility_flags_disabled(op_system, os_version, sdk, arch):
profile = textwrap.dedent("""
include(default)
[settings]
os={}
os.version={}
os.sdk={}
arch={}
[conf]
tools.apple:enable_bitcode=False
tools.apple:enable_arc=False
tools.apple:enable_visibility=False
""".format(op_system, os_version, sdk, arch))
client = TestClient(path_with_spaces=False)
client.save({"host": profile}, clean_first=True)
client.run("new hello/0.1 --template=cmake_lib")
_add_message_status_flags(client)
client.run("install . --profile:build=default --profile:host=host")
toolchain = client.load(os.path.join("build", "generators", "conan_toolchain.cmake"))
# bitcode
assert 'set(CMAKE_XCODE_ATTRIBUTE_ENABLE_BITCODE "NO")' in toolchain
assert 'set(CMAKE_XCODE_ATTRIBUTE_BITCODE_GENERATION_MODE "bitcode")' not in toolchain
assert 'set(BITCODE "-fembed-bitcode")' not in toolchain
# arc
assert 'set(FOBJC_ARC "-fno-objc-arc")' in toolchain
assert 'set(CMAKE_XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC "NO")' in toolchain
# visibility
assert 'set(CMAKE_XCODE_ATTRIBUTE_GCC_SYMBOLS_PRIVATE_EXTERN "YES")' in toolchain
assert 'set(VISIBILITY "-fvisibility=hidden -fvisibility-inlines-hidden")' in toolchain
client.run("create . --profile:build=default --profile:host=host -tf None")
# flags
assert "-- CONAN_C_FLAGS: -fno-objc-arc" in client.out
assert "-- CONAN_CXX_FLAGS: -fvisibility=hidden -fvisibility-inlines-hidden -fno-objc-arc" in client.out
assert "[100%] Built target hello" in client.out
@pytest.mark.skipif(platform.system() != "Darwin", reason="Only OSX")
@pytest.mark.parametrize("op_system,os_version,sdk,arch", [
("watchOS", "8.1", "watchos", "armv7k"),
("tvOS", "13.2", "appletvos", "armv8")
])
def test_cmake_apple_bitcode_arc_and_visibility_flags_are_none(op_system, os_version, sdk, arch):
"""
Testing what happens when any of the Bitcode, ARC or Visibility configurations are not defined.
"""
profile = textwrap.dedent("""
include(default)
[settings]
os={}
os.version={}
os.sdk={}
arch={}
""".format(op_system, os_version, sdk, arch))
client = TestClient(path_with_spaces=False)
client.save({"host": profile}, clean_first=True)
client.run("new hello/0.1 --template=cmake_lib")
_add_message_status_flags(client)
client.run("install . --profile:build=default --profile:host=host")
toolchain = client.load(os.path.join("build", "generators", "conan_toolchain.cmake"))
# bitcode
assert 'set(CMAKE_XCODE_ATTRIBUTE_ENABLE_BITCODE "NO")' not in toolchain
assert 'set(CMAKE_XCODE_ATTRIBUTE_BITCODE_GENERATION_MODE "bitcode")' not in toolchain
assert 'set(BITCODE "-fembed-bitcode")' not in toolchain
# arc
assert 'set(FOBJC_ARC "-' not in toolchain
assert 'set(CMAKE_XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC' not in toolchain
# visibility
assert 'set(CMAKE_XCODE_ATTRIBUTE_GCC_SYMBOLS_PRIVATE_EXTERN' not in toolchain
assert 'set(VISIBILITY "-' not in toolchain
client.run("create . --profile:build=default --profile:host=host -tf None")
# flags are not appended
for flag in ["-fembed-bitcode", "-fno-objc-arc", "-fobjc-arc", "-fvisibility"]:
assert flag not in client.out
assert "[100%] Built target hello" in client.out
| 41.612022
| 116
| 0.686014
| 994
| 7,615
| 5.050302
| 0.151911
| 0.037649
| 0.023904
| 0.040637
| 0.824104
| 0.787649
| 0.754582
| 0.725299
| 0.710956
| 0.710956
| 0
| 0.007183
| 0.177282
| 7,615
| 182
| 117
| 41.840659
| 0.794094
| 0.056468
| 0
| 0.65493
| 0
| 0
| 0.482628
| 0.185766
| 0
| 0
| 0
| 0
| 0.225352
| 1
| 0.035211
| false
| 0
| 0.035211
| 0
| 0.070423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
30c82617c258f8506e1b850d84c5f2fb41fb2b3b
| 19,963
|
py
|
Python
|
src/jgikbase/test/idmapping/core/user_lookup_test.py
|
jgi-kbase/IDMappingService
|
9d9f01662c4b09ac873174b7119d62828965e116
|
[
"MIT"
] | null | null | null |
src/jgikbase/test/idmapping/core/user_lookup_test.py
|
jgi-kbase/IDMappingService
|
9d9f01662c4b09ac873174b7119d62828965e116
|
[
"MIT"
] | 118
|
2018-07-13T18:43:07.000Z
|
2019-11-13T02:52:48.000Z
|
src/jgikbase/test/idmapping/core/user_lookup_test.py
|
jgi-kbase/IDMappingService
|
9d9f01662c4b09ac873174b7119d62828965e116
|
[
"MIT"
] | 1
|
2018-07-02T17:56:57.000Z
|
2018-07-02T17:56:57.000Z
|
from unittest.mock import create_autospec
from jgikbase.idmapping.storage.id_mapping_storage import IDMappingStorage
from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup
from jgikbase.idmapping.core.user import AuthsourceID, User, Username
from jgikbase.idmapping.core.tokens import Token, HashedToken
from jgikbase.test.idmapping.test_utils import assert_exception_correct
from pytest import raises
from jgikbase.test.idmapping.core.tokens_test import is_base64
import time
from jgikbase.idmapping.core.errors import NoSuchAuthsourceError
def test_set_init_fail():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
fail_set_init(None, TypeError('user_lookup cannot be None'))
fail_set_init(set([handler, None]), TypeError('None item in user_lookup'))
def fail_set_init(handlers, expected):
with raises(Exception) as got:
UserLookupSet(handlers)
assert_exception_correct(got.value, expected)
def test_set_get_user_default_cache_ttl():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
check_set_get_user_default_cache_ttl(hset, handler, timer, [0, 299, 300, 301])
def test_set_get_user_default_cache_ttl_set_ttl():
check_set_get_user_default_cache_ttl_set_ttl(100, [0, 99, 100, 101])
check_set_get_user_default_cache_ttl_set_ttl(500, [0, 499, 500, 501])
def check_set_get_user_default_cache_ttl_set_ttl(ttl, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_user_expiration=ttl)
check_set_get_user_default_cache_ttl(hset, handler, timer, timervals)
def check_set_get_user_default_cache_ttl(hset, handler, timer, timervals):
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u')), False, None, None)
timer.return_value = timervals[0]
# user will not be in cache
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), False)
# user is now cached
handler.get_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[1] # just below default cache time
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), False)
# now expire the user
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u')), True, None, None)
timer.return_value = timervals[2]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), True)
# get the user again, should be cached.
handler.get_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[3]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u')), True)
assert handler.get_user.call_args_list == [((Token('t'),), {}), ((Token('t'),), {})]
def test_set_get_user_cache_max_count():
# testing the default of 10k is just silly, not going to bother.
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_max_size=2)
# add user 1
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), False, None, None)
timer.return_value = 0
assert hset.get_user(AuthsourceID('as'), Token('t1')) == \
(User(AuthsourceID('as'), Username('u1')), False)
# add user 2
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u2')), True, None, None)
timer.return_value = 1
assert hset.get_user(AuthsourceID('as'), Token('t2')) == \
(User(AuthsourceID('as'), Username('u2')), True)
# add user 3, user 1 should now be evicted from the cache
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u3')), False, None, None)
timer.return_value = 2
assert hset.get_user(AuthsourceID('as'), Token('t3')) == \
(User(AuthsourceID('as'), Username('u3')), False)
# should only need a handler call for user 1 at this point
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), True, None, None)
timer.return_value = 3
# get the 3 users. Get user 1 last otherwise it'll evict user 2 from the cache
assert hset.get_user(AuthsourceID('as'), Token('t2')) == \
(User(AuthsourceID('as'), Username('u2')), True)
assert hset.get_user(AuthsourceID('as'), Token('t3')) == \
(User(AuthsourceID('as'), Username('u3')), False)
assert hset.get_user(AuthsourceID('as'), Token('t1')) == \
(User(AuthsourceID('as'), Username('u1')), True)
# check that the calls to get_user are as expected:
assert handler.get_user.call_args_list == [((Token('t1'),), {}),
((Token('t2'),), {}),
((Token('t3'),), {}),
((Token('t1'),), {})]
def test_set_get_user_rel_ttl():
check_set_get_user_handler_ttl(None, 3, [100, 102, 103])
def test_set_get_user_epoch_ttl():
check_set_get_user_handler_ttl(1003, None, [1000, 1002, 1003])
def test_set_get_user_epoch_lt_rel_ttl():
# tests the case where both epoch and relative ttls are provided, but the epoch ttl is
# closer than the relative ttl.
check_set_get_user_handler_ttl(1003, 6, [1000, 1002, 1003])
def test_set_get_user_rel_lt_epoch_ttl():
# tests the case where both epoch and relative ttls are provided, but the relative ttl is
# closer than the epoch ttl.
check_set_get_user_handler_ttl(1007, 4, [1000, 1003, 1004])
def check_set_get_user_handler_ttl(epoch, rel, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), False, epoch, rel)
timer.return_value = timervals[0]
# cache user for X secs
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u1')), False)
# force an error if the handler is called
handler.get_user.return_value = None
timer.return_value = timervals[1]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u1')), False)
# expect handler call at Y sec
handler.get_user.return_value = (User(AuthsourceID('as'), Username('u1')), True, epoch, rel)
timer.return_value = timervals[2]
assert hset.get_user(AuthsourceID('as'), Token('t')) == \
(User(AuthsourceID('as'), Username('u1')), True)
# check correct number of calls to get_user
assert handler.get_user.call_args_list == [((Token('t'),), {}), ((Token('t'),), {})]
def test_set_get_user_fail_None_input():
hset = UserLookupSet(set())
fail_set_get_user(hset, None, Token('t'), TypeError('authsource_id cannot be None'))
fail_set_get_user(hset, AuthsourceID('a'), None, TypeError('token cannot be None'))
def test_set_get_user_no_authsource():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
fail_set_get_user(UserLookupSet(set([handler])),
AuthsourceID('bs'),
Token('t'),
NoSuchAuthsourceError('bs'))
def fail_set_get_user(hset, authsource_id, token, expected):
with raises(Exception) as got:
hset.get_user(authsource_id, token)
assert_exception_correct(got.value, expected)
def test_set_is_valid_user_default_cache_ttl():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
check_set_is_valid_user_default_cache_ttl(hset, handler, timer, [0, 3599, 3600, 3601])
def test_set_is_valid_user_default_cache_ttl_set_ttl():
check_set_is_valid_user_default_cache_ttl_set_ttl(100, [0, 99, 100, 101])
check_set_is_valid_user_default_cache_ttl_set_ttl(10000, [0, 9999, 10000, 10001])
def check_set_is_valid_user_default_cache_ttl_set_ttl(ttl, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_is_valid_expiration=ttl)
check_set_is_valid_user_default_cache_ttl(hset, handler, timer, timervals)
def check_set_is_valid_user_default_cache_ttl(hset, handler, timer, timervals):
handler.is_valid_user.return_value = (True, None, None)
timer.return_value = timervals[0]
# user will not be in cache
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
# user is now cached
handler.is_valid_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[1] # just below default cache time
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
# now expire the user
handler.is_valid_user.return_value = (True, None, None)
timer.return_value = timervals[2]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
# get the user again, should be cached
handler.is_valid_user.return_value = None # should cause error if called from now on
timer.return_value = timervals[3]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is True
assert handler.is_valid_user.call_args_list == [((Username('u'),), {}), ((Username('u'),), {})]
def test_set_is_valid_user_invalid_user():
# invalid users shouldn't get cached.
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
handler.is_valid_user.return_value = (False, None, None)
timer.return_value = 0
# user will not be in cache
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is False
# would normally expect a cache time of 3600s, but should not be cached here.
timer.return_value = 10
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u'))) is False
assert handler.is_valid_user.call_args_list == [((Username('u'),), {}), ((Username('u'),), {})]
def test_set_is_valid_user_cache_max_count():
# testing the default of 10k is just silly, not going to bother.
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer, cache_max_size=2)
# add user 1
handler.is_valid_user.return_value = (True, None, None)
timer.return_value = 0
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# add user 2. Don't need another return value for is_valid_user, has to be True to cache
timer.return_value = 1
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u2'))) is True
# add user 3, user 1 should now be evicted from the cache
timer.return_value = 2
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u3'))) is True
# force an assert fail if is_valid_user is called early:
handler.is_valid_user.return_value = (False, None, None)
timer.return_value = 3
# get the 3 users. Get user 1 last otherwise it'll evict user 2 from the cache
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u2'))) is True
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u3'))) is True
# get user 1
handler.is_valid_user.return_value = (True, None, None)
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# check that the calls to is_valid_user are as expected:
assert handler.is_valid_user.call_args_list == [((Username('u1'),), {}),
((Username('u2'),), {}),
((Username('u3'),), {}),
((Username('u1'),), {})]
def test_set_is_valid_user_rel_ttl():
check_set_is_valid_user_handler_ttl(None, 3, [100, 102, 103])
def test_set_is_valid_user_epoch_ttl():
check_set_is_valid_user_handler_ttl(1003, None, [1000, 1002, 1003])
def test_set_is_valid_user_epoch_lt_rel_ttl():
# tests the case where both epoch and relative ttls are provided, but the epoch ttl is
# closer than the relative ttl.
check_set_is_valid_user_handler_ttl(1003, 6, [1000, 1002, 1003])
def test_set_is_valid_user_rel_lt_epoch_ttl():
# tests the case where both epoch and relative ttls are provided, but the relative ttl is
# closer than the epoch ttl.
check_set_is_valid_user_handler_ttl(1007, 4, [1000, 1003, 1004])
def check_set_is_valid_user_handler_ttl(epoch, rel, timervals):
handler = create_autospec(UserLookup, spec_set=True, instance=True)
timer = create_autospec(time.time, spec_set=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
hset = UserLookupSet(set([handler]), timer)
handler.is_valid_user.return_value = (True, epoch, rel)
timer.return_value = timervals[0]
# cache user for X secs
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# force an error if the handler is called
handler.is_valid_user.return_value = None
timer.return_value = timervals[1]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# expect handler call at Y sec
handler.is_valid_user.return_value = (True, epoch, rel)
timer.return_value = timervals[2]
assert hset.is_valid_user(User(AuthsourceID('as'), Username('u1'))) is True
# check correct number of calls to get_user
assert handler.is_valid_user.call_args_list == [((Username('u1'),), {}),
((Username('u1'),), {})]
def test_set_is_valid_user_None_inputs():
hset = UserLookupSet(set())
fail_set_is_valid_user(hset, None, TypeError('user cannot be None'))
def test_set_is_valid_user_no_authsource():
handler = create_autospec(UserLookup, spec_set=True, instance=True)
handler.get_authsource_id.return_value = AuthsourceID('as')
fail_set_is_valid_user(UserLookupSet(set([handler])),
User(AuthsourceID('bs'), Username('n')),
NoSuchAuthsourceError('bs'))
def fail_set_is_valid_user(hset, user, expected):
with raises(Exception) as got:
hset.is_valid_user(user)
assert_exception_correct(got.value, expected)
def test_local_init_fail():
with raises(Exception) as got:
LocalUserLookup(None)
assert_exception_correct(got.value, TypeError('storage cannot be None'))
def test_local_get_authsource():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
assert LocalUserLookup(storage).get_authsource_id() == AuthsourceID('local')
def test_local_get_user_admin():
check_local_get_user_admin(True)
check_local_get_user_admin(False)
def check_local_get_user_admin(isadmin):
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
storage.get_user.return_value = (Username('bar'), isadmin)
assert LocalUserLookup(storage).get_user(Token('foo')) == \
(User(AuthsourceID('local'), Username('bar')), isadmin, None, 300)
thash = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'
assert storage.get_user.call_args_list == [((HashedToken(thash),), {})]
def test_local_get_user_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).get_user(None)
assert_exception_correct(got.value, TypeError('token cannot be None'))
def test_local_is_valid_user():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
storage.user_exists.return_value = True
luh = LocalUserLookup(storage)
assert luh.is_valid_user(Username('foo')) == (True, None, 3600)
storage.user_exists.return_value = False
assert luh.is_valid_user(Username('bar')) == (False, None, 3600)
assert storage.user_exists.call_args_list == [
((Username('foo'),), {}),
((Username('bar'),), {})]
def test_local_is_valid_user_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).is_valid_user(None)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_create_user():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
t = LocalUserLookup(storage).create_user(Username('foo'))
assert is_base64(t.token) is True
assert len(t.token) is 28
assert storage.create_local_user.call_args_list == \
[((Username('foo'), t.get_hashed_token()), {})]
def test_local_create_user_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).create_user(None)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_new_token():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
t = LocalUserLookup(storage).new_token(Username('bar'))
assert is_base64(t.token) is True
assert len(t.token) is 28
assert storage.update_local_user_token.call_args_list == \
[((Username('bar'), t.get_hashed_token()), {})]
def test_local_new_token_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).new_token(None)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_set_user_as_admin():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
LocalUserLookup(storage).set_user_as_admin(Username('n'), True)
LocalUserLookup(storage).set_user_as_admin(Username('r'), False)
assert storage.set_local_user_as_admin.call_args_list == [((Username('n'), True), {}),
((Username('r'), False), {})]
def test_local_set_user_as_admin_fail():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
with raises(Exception) as got:
LocalUserLookup(storage).set_user_as_admin(None, True)
assert_exception_correct(got.value, TypeError('username cannot be None'))
def test_local_get_users():
storage = create_autospec(IDMappingStorage, spec_set=True, instance=True)
storage.get_users.return_value = {Username('foo'): False, Username('bar'): True}
assert LocalUserLookup(storage).get_users() == {Username('foo'): False,
Username('bar'): True}
assert storage.get_users.call_args_list == [((), {})]
| 38.390385
| 99
| 0.699194
| 2,737
| 19,963
| 4.83449
| 0.074169
| 0.035444
| 0.052373
| 0.070738
| 0.857391
| 0.810837
| 0.777131
| 0.741838
| 0.711533
| 0.662258
| 0
| 0.021615
| 0.177278
| 19,963
| 519
| 100
| 38.464355
| 0.784036
| 0.101788
| 0
| 0.540268
| 0
| 0
| 0.03428
| 0.003579
| 0
| 0
| 0
| 0
| 0.201342
| 1
| 0.144295
| false
| 0
| 0.033557
| 0
| 0.177852
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
30ca96ad6ea6a5db40e993b333bee549a62513b3
| 35
|
py
|
Python
|
sqlalchemy_ag_grid/__init__.py
|
ytkj/sqlalchemy-ag-grid
|
792675dae3821915a67be8dc89a6519755b1d530
|
[
"MIT"
] | 5
|
2019-10-17T10:33:00.000Z
|
2021-11-18T18:07:48.000Z
|
sqlalchemy_ag_grid/__init__.py
|
ytkj/sqlalchemy-ag-grid
|
792675dae3821915a67be8dc89a6519755b1d530
|
[
"MIT"
] | 3
|
2019-06-11T12:54:14.000Z
|
2022-03-27T13:59:40.000Z
|
sqlalchemy_ag_grid/__init__.py
|
ytkj/sqlalchemy-ag-grid
|
792675dae3821915a67be8dc89a6519755b1d530
|
[
"MIT"
] | 1
|
2021-11-18T18:08:01.000Z
|
2021-11-18T18:08:01.000Z
|
from .query import SortFilterQuery
| 17.5
| 34
| 0.857143
| 4
| 35
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eb55dc2601987978ce5d49985dfac62aa8d4ae2c
| 99
|
py
|
Python
|
Exercise 09/exercise_code/util/__init__.py
|
CornellLenard/Deep-Learning-Course-Exercises
|
db32f2b9ab93a50580e93e9dd83be1db7c4c4a19
|
[
"MIT"
] | null | null | null |
Exercise 09/exercise_code/util/__init__.py
|
CornellLenard/Deep-Learning-Course-Exercises
|
db32f2b9ab93a50580e93e9dd83be1db7c4c4a19
|
[
"MIT"
] | null | null | null |
Exercise 09/exercise_code/util/__init__.py
|
CornellLenard/Deep-Learning-Course-Exercises
|
db32f2b9ab93a50580e93e9dd83be1db7c4c4a19
|
[
"MIT"
] | null | null | null |
"""Util functions"""
from .vis_utils import show_all_keypoints
from .save_model import save_model
| 19.8
| 41
| 0.808081
| 15
| 99
| 5
| 0.733333
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 99
| 4
| 42
| 24.75
| 0.852273
| 0.141414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ebbc58bde641f04d93c949d524f2ee643a87716c
| 172
|
py
|
Python
|
tests/conftest.py
|
gsvolt/cle_parcel_lookup
|
5dbd7a6527428ab66403db0bea19c26053728e62
|
[
"Unlicense"
] | 1
|
2020-10-12T12:24:48.000Z
|
2020-10-12T12:24:48.000Z
|
tests/conftest.py
|
gsvolt/cle_parcel_lookup
|
5dbd7a6527428ab66403db0bea19c26053728e62
|
[
"Unlicense"
] | 2
|
2020-10-12T12:09:27.000Z
|
2020-10-12T16:45:09.000Z
|
tests/conftest.py
|
gsvolt/cle_parcel_lookup
|
5dbd7a6527428ab66403db0bea19c26053728e62
|
[
"Unlicense"
] | null | null | null |
import pytest
from cle_parcel_lookup import create_app
@pytest.fixture
def app():
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
| 13.230769
| 40
| 0.75
| 25
| 172
| 4.96
| 0.52
| 0.145161
| 0.241935
| 0.354839
| 0.403226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 172
| 12
| 41
| 14.333333
| 0.861111
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.