hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
534843a13bac167037ca6701e9e5332c6dec3235 | 2,986 | py | Python | src/vanilla_pytorch/prune_model.py | f2010126/LTH_Master | 709472e7e7962fbf3a56a620c536fb03d359734f | [
"MIT"
] | null | null | null | src/vanilla_pytorch/prune_model.py | f2010126/LTH_Master | 709472e7e7962fbf3a56a620c536fb03d359734f | [
"MIT"
] | 1 | 2021-06-30T13:35:32.000Z | 2021-06-30T13:35:32.000Z | src/vanilla_pytorch/prune_model.py | f2010126/LTH_Master | 709472e7e7962fbf3a56a620c536fb03d359734f | [
"MIT"
] | 1 | 2021-06-30T13:22:15.000Z | 2021-06-30T13:22:15.000Z | import torch.nn.utils.prune as prune
import torch
from src.vanilla_pytorch.utils import count_rem_weights
from src.vanilla_pytorch.models.linearnets import LeNet, init_weights
from src.vanilla_pytorch.models.resnets import Resnets
def remove_pruning(model):
for i, (name, module) in enumerate(model.named_modules()):
# name and val
if any([isinstance(module, cl) for cl in [torch.nn.Conv2d, torch.nn.Linear]]):
prune.remove(module, 'weight')
def get_masks(model, prune_amts=None):
"""
prune the lowest p% weights by magnitude per layer
:param model: model to prune
:param p_rate: prune rate = 0.2 as per paper
:param prune_amts: dictionary
:return: the created mask. model has served it's purpose.
"""
# TODO: Adjust pruning with output layer
if prune_amts is None: # ie dict is empty, use the default prune rate = 0.2
prune_amts = {"linear": 0.2, "conv": 0.2, "last": 0.2}
for i, (name, module) in enumerate(model.named_modules()):
# prune 20% of connections in all 2D-conv layers
if isinstance(module, torch.nn.Conv2d):
module = prune.l1_unstructured(module, name='weight', amount=prune_amts['conv'])
# prune 20% of connections in all linear layers
elif isinstance(module, torch.nn.Linear):
module = prune.l1_unstructured(module, name='weight', amount=prune_amts['linear'])
masks = list(model.named_buffers())
remove_pruning(model)
return masks
def update_apply_masks(model, masks):
# doesn't seem to be needed.
# for key, val in masks.items():
# print(f"key {key}")
# layer = getattr(model, key.split('.')[0])
# layer.weight_mask = val
for name, module in model.named_modules():
if any([isinstance(module, cl) for cl in [torch.nn.Conv2d, torch.nn.Linear]]):
module = prune.custom_from_mask(module, name='weight', mask=masks[name + ".weight_mask"])
# remove_pruning(model)
return model
def prune_random(model, prune_amts=None):
if prune_amts is None: # ie dict is empty, use the default prune rate =0.2
prune_amts = {"linear": 0.2, "conv": 0.2, "last": 0.2}
for name, module in model.named_modules():
# prune 20% of connections in all 2D-conv layers
if isinstance(module, torch.nn.Conv2d):
module = prune.random_unstructured(module, name='weight', amount=prune_amts['conv'])
# prune 20% of connections in all linear layers
elif isinstance(module, torch.nn.Linear):
module = prune.random_unstructured(module, name='weight', amount=prune_amts['linear'])
remove_pruning(model)
if __name__ == '__main__':
net = Resnets(in_channels=3)
net.apply(init_weights)
prune_rate = 0.8
prune_custom = {"linear": 0.2, "conv": 0.2, "last": 0.1}
for i in range(3):
masks = get_masks(net, prune_amts=prune_custom)
print(f"Count zero : {count_rem_weights(net)}")
| 38.779221 | 101 | 0.662425 | 432 | 2,986 | 4.449074 | 0.259259 | 0.056191 | 0.041623 | 0.041623 | 0.547347 | 0.541623 | 0.506244 | 0.483351 | 0.467222 | 0.433923 | 0 | 0.019239 | 0.216678 | 2,986 | 76 | 102 | 39.289474 | 0.80248 | 0.244809 | 0 | 0.380952 | 0 | 0 | 0.070136 | 0.01086 | 0 | 0 | 0 | 0.013158 | 0 | 1 | 0.095238 | false | 0 | 0.119048 | 0 | 0.261905 | 0.02381 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
53487b0b2e562895d1a372a23c376324cd33f385 | 3,484 | py | Python | tensorflow_federated/python/research/utils/checkpoint_utils_test.py | mcognetta/federated | fa0c1a00b5d77768bc2f38f503f3ef1a65693945 | [
"Apache-2.0"
] | null | null | null | tensorflow_federated/python/research/utils/checkpoint_utils_test.py | mcognetta/federated | fa0c1a00b5d77768bc2f38f503f3ef1a65693945 | [
"Apache-2.0"
] | null | null | null | tensorflow_federated/python/research/utils/checkpoint_utils_test.py | mcognetta/federated | fa0c1a00b5d77768bc2f38f503f3ef1a65693945 | [
"Apache-2.0"
] | null | null | null | # Lint as: python3
# Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ServerState save."""
import functools
import os
import attr
import tensorflow as tf
import tensorflow_federated as tff
from tensorflow_federated.python.examples.mnist import models
from tensorflow_federated.python.research.utils import checkpoint_utils
@attr.s(cmp=False, frozen=False)
class Obj(object):
"""Container for all state that need to be stored in the checkpoint.
Attributes:
model: A ModelWeights structure, containing Tensors or Variables.
optimizer_state: A list of Tensors or Variables, in the order returned by
optimizer.variables().
round_num: Training round_num.
"""
model = attr.ib()
optimizer_state = attr.ib()
round_num = attr.ib()
@classmethod
def from_anon_tuple(cls, anon_tuple, round_num):
# TODO(b/130724878): These conversions should not be needed.
return cls(
model=anon_tuple.model._asdict(recursive=True),
optimizer_state=list(anon_tuple.optimizer_state),
round_num=round_num)
class SavedStateTest(tf.test.TestCase):
def test_save_and_load(self):
server_optimizer_fn = functools.partial(
tf.keras.optimizers.SGD, learning_rate=0.1, momentum=0.9)
iterative_process = tff.learning.build_federated_averaging_process(
models.model_fn, server_optimizer_fn=server_optimizer_fn)
server_state = iterative_process.initialize()
# TODO(b/130724878): These conversions should not be needed.
obj = Obj.from_anon_tuple(server_state, 1)
export_dir = os.path.join(self.get_temp_dir(), 'ckpt_1')
checkpoint_utils.save(obj, export_dir)
loaded_obj = checkpoint_utils.load(export_dir, obj)
self.assertAllClose(tf.nest.flatten(obj), tf.nest.flatten(loaded_obj))
def test_load_latest_state(self):
server_optimizer_fn = functools.partial(
tf.keras.optimizers.SGD, learning_rate=0.1, momentum=0.9)
iterative_process = tff.learning.build_federated_averaging_process(
models.model_fn, server_optimizer_fn=server_optimizer_fn)
server_state = iterative_process.initialize()
# TODO(b/130724878): These conversions should not be needed.
obj_1 = Obj.from_anon_tuple(server_state, 1)
export_dir = os.path.join(self.get_temp_dir(), 'ckpt_1')
checkpoint_utils.save(obj_1, export_dir)
# TODO(b/130724878): These conversions should not be needed.
obj_2 = Obj.from_anon_tuple(server_state, 2)
export_dir = os.path.join(self.get_temp_dir(), 'ckpt_2')
checkpoint_utils.save(obj_2, export_dir)
export_dir = checkpoint_utils.latest_checkpoint(self.get_temp_dir())
loaded_obj = checkpoint_utils.load(export_dir, obj_1)
self.assertEqual(os.path.join(self.get_temp_dir(), 'ckpt_2'), export_dir)
self.assertAllClose(tf.nest.flatten(obj_2), tf.nest.flatten(loaded_obj))
if __name__ == '__main__':
tf.compat.v1.enable_v2_behavior()
tf.test.main()
| 35.55102 | 77 | 0.751148 | 505 | 3,484 | 4.956436 | 0.336634 | 0.035957 | 0.040751 | 0.027966 | 0.443068 | 0.425489 | 0.387535 | 0.387535 | 0.387535 | 0.322413 | 0 | 0.023074 | 0.154133 | 3,484 | 97 | 78 | 35.917526 | 0.826264 | 0.326349 | 0 | 0.244898 | 0 | 0 | 0.013877 | 0 | 0 | 0 | 0 | 0.010309 | 0.061224 | 1 | 0.061224 | false | 0 | 0.142857 | 0.020408 | 0.326531 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
534883bea976b0a78d54a9c4ba718667cfc4884f | 2,923 | py | Python | website/models/user.py | alexli0707/pyforum | 4f5ea4a0b07e094e24410ae699016590b9c20d59 | [
"Apache-2.0"
] | 4 | 2016-10-13T02:03:55.000Z | 2017-04-05T03:21:46.000Z | website/models/user.py | alexli0707/pyforum | 4f5ea4a0b07e094e24410ae699016590b9c20d59 | [
"Apache-2.0"
] | null | null | null | website/models/user.py | alexli0707/pyforum | 4f5ea4a0b07e094e24410ae699016590b9c20d59 | [
"Apache-2.0"
] | 1 | 2019-01-01T09:36:28.000Z | 2019-01-01T09:36:28.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import peewee
from flask import current_app,abort
from flask.ext.login import AnonymousUserMixin, UserMixin
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from peewee import Model, IntegerField, CharField,PrimaryKeyField
from website.app import db_wrapper, login_manager
from website.http.main_exception import MainException
from werkzeug.security import check_password_hash,generate_password_hash
class User(UserMixin, db_wrapper.Model):
id = PrimaryKeyField()
email = CharField(index=True)
username = CharField(index=True)
password_hash = CharField()
role_id = IntegerField()
confirmed = IntegerField()
class Meta:
db_table = 'users'
def register(self,email,password,username):
user = User(email=email, username=username, password_hash=generate_password_hash(password))
try:
user.save()
except peewee.IntegrityError as err:
print(err.args)
if err.args[0] == 1062:
if 'ix_users_email' in err.args[1]:
raise MainException.DUPLICATE_EMAIL
if 'ix_users_username' in err.args[1]:
raise MainException.DUPLICATE_USERNAME
return user
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, expiration=3600):
"""生成验证邮箱的token"""
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
"""验证邮箱"""
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
print(data)
except:
return False
if data.get('confirm') != self.id:
return False
# 验证成功,写入数据库
self.confirmed = True
self.save()
return True
def generate_reset_token(self, expiration=3600):
"""生成重置密码的token"""
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def reset_password(self, token, new_password):
"""重置密码"""
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('reset') != self.id:
return False
# 验证成功,写入数据库
self.password = new_password
self.save()
return True
"""
匿名用户
"""
class AnonymousUser(AnonymousUserMixin):
def can(self, permissions):
return False
def is_administrator(self):
return False
login_manager.anonymous_user = AnonymousUser
@login_manager.user_loader
def load_user(user_id):
user = User.get(User.id == int(user_id))
if not user:
abort(404)
else:
return user
| 27.317757 | 99 | 0.63599 | 332 | 2,923 | 5.454819 | 0.322289 | 0.046383 | 0.039757 | 0.046383 | 0.261734 | 0.226394 | 0.197681 | 0.123689 | 0.123689 | 0.123689 | 0 | 0.008858 | 0.266165 | 2,923 | 106 | 100 | 27.575472 | 0.835431 | 0.034211 | 0 | 0.315068 | 0 | 0 | 0.03592 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.123288 | false | 0.109589 | 0.109589 | 0.041096 | 0.534247 | 0.027397 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
534aebd1f9c4e46d72dc93169bc74d5b8daf04ea | 2,088 | py | Python | nexula/nexula_utility/utility_extract_func.py | haryoa/nexula | cc3b5a9b8dd8294bdc47150a1971cb49c4dde225 | [
"MIT"
] | 3 | 2020-05-06T08:53:22.000Z | 2020-09-24T07:45:38.000Z | nexula/nexula_utility/utility_extract_func.py | haryoa/nexula | cc3b5a9b8dd8294bdc47150a1971cb49c4dde225 | [
"MIT"
] | null | null | null | nexula/nexula_utility/utility_extract_func.py | haryoa/nexula | cc3b5a9b8dd8294bdc47150a1971cb49c4dde225 | [
"MIT"
] | null | null | null | from nexula.nexula_utility.utility_import_var import import_class
class NexusFunctionModuleExtractor():
"""
Used for constructing pipeline data preporcessing and feature representer
"""
def __init__(self, module_class_list, args_dict, **kwargs):
"""
Instantiate class(es) object in pipeline
Parameters
----------
module_class_list
args_dict
kwargs
"""
# self.list_of_cls = self._search_module_function(module_class_list)
self.list_of_cls = module_class_list
if 'logger' in kwargs:
self.logger = kwargs['logger']
self.logger.debug(args_dict) if 'logger' in self.__dict__ else None
self.args_init = [arg['init'] for arg in args_dict]
self.args_call = [arg['call'] for arg in args_dict]
self._construct_object()
# Extract call
def _construct_object(self):
"""
Instantiate object of all pipeline
"""
import logging
logger = logging.getLogger('nexula')
logger.debug(self.list_of_cls)
new_list_of_cls = []
for i, cls in enumerate(self.list_of_cls): # REFACTOR
logger.debug(cls)
new_list_of_cls.append(cls(**self.args_init[i]))
self.list_of_cls = new_list_of_cls
def _search_module_function(self, module_function_list):
"""
Search the module in the library
Parameters
----------
module_function_list
Returns
-------
"""
list_of_cls = []
for module, function in module_function_list:
# TODO Raise exception if empty
list_of_cls.append(import_class(function, module))
return list_of_cls
def __call__(self, x, y, *args, **kwargs):
"""
Call the object by evoking __call__ function
Returns
-------
"""
for i,cls in enumerate(self.list_of_cls):
current_args = self.args_call[i]
x, y = cls(x, y, **kwargs, **current_args)
return x, y
| 29.408451 | 77 | 0.594828 | 249 | 2,088 | 4.654618 | 0.253012 | 0.062123 | 0.093184 | 0.067299 | 0.194133 | 0.181191 | 0.096635 | 0.096635 | 0.053494 | 0 | 0 | 0 | 0.306034 | 2,088 | 70 | 78 | 29.828571 | 0.799862 | 0.230364 | 0 | 0.068966 | 0 | 0 | 0.02289 | 0 | 0 | 0 | 0 | 0.014286 | 0 | 1 | 0.137931 | false | 0 | 0.103448 | 0 | 0.344828 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
5355dedf12aa8e15115b8c77564d80f57eb0ec2a | 1,577 | py | Python | set-env.py | sajaldebnath/vrops-custom-group-creation | e3c821336832445e93706ad29afe216867660123 | [
"MIT"
] | 1 | 2017-08-14T07:51:42.000Z | 2017-08-14T07:51:42.000Z | set-env.py | sajaldebnath/vrops-custom-group-creation | e3c821336832445e93706ad29afe216867660123 | [
"MIT"
] | null | null | null | set-env.py | sajaldebnath/vrops-custom-group-creation | e3c821336832445e93706ad29afe216867660123 | [
"MIT"
] | null | null | null | # !/usr/bin python
"""
#
# set-env - a small python program to setup the configuration environment for data-push.py
# data-push.py contains the python program to push attribute values to vROps
# Author Sajal Debnath <sdebnath@vmware.com>
#
"""
# Importing the required modules
import json
import base64
import os,sys
# Getting the absolute path from where the script is being run
def get_script_path():
return os.path.dirname(os.path.realpath(sys.argv[0]))
# Getting the inputs from user
def get_the_inputs():
adapterkind = raw_input("Please enter Adapter Kind: ")
resourceKind = raw_input("Please enter Resource Kind: ")
servername = raw_input("Enter enter Server IP/FQDN: ")
serveruid = raw_input("Please enter user id: ")
serverpasswd = raw_input("Please enter vRops password: ")
encryptedvar = base64.b64encode(serverpasswd)
data = {}
data["adapterKind"] = adapterkind
data["resourceKind"] = resourceKind
serverdetails = {}
serverdetails["name"] = servername
serverdetails["userid"] = serveruid
serverdetails["password"] = encryptedvar
data["server"] = serverdetails
return data
# Getting the path where env.json file should be kept
path = get_script_path()
fullpath = path+"/"+"env.json"
# Getting the data for the env.json file
final_data = get_the_inputs()
# Saving the data to env.json file
with open(fullpath, 'w') as outfile:
json.dump(final_data, outfile, sort_keys = True, indent = 2, separators=(',', ':'), ensure_ascii=False) | 28.672727 | 107 | 0.689918 | 202 | 1,577 | 5.30198 | 0.480198 | 0.037348 | 0.052288 | 0.070962 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006385 | 0.205453 | 1,577 | 55 | 107 | 28.672727 | 0.848364 | 0.304375 | 0 | 0 | 0 | 0 | 0.187379 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0.115385 | 0.115385 | 0.038462 | 0.269231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
535b6a1790a4b33142e1922aac85ef30e05ce452 | 1,487 | gyp | Python | binding.gyp | terrorizer1980/fs-admin | e21216161c56def4ca76a3ef4e71844e2ba26074 | [
"MIT"
] | 25 | 2017-10-14T22:54:00.000Z | 2022-02-28T16:45:44.000Z | binding.gyp | icecream17/fs-admin | e21216161c56def4ca76a3ef4e71844e2ba26074 | [
"MIT"
] | 46 | 2019-02-22T15:17:32.000Z | 2022-03-15T16:04:38.000Z | binding.gyp | icecream17/fs-admin | e21216161c56def4ca76a3ef4e71844e2ba26074 | [
"MIT"
] | 19 | 2018-01-04T00:52:17.000Z | 2022-02-05T17:18:17.000Z | {
'target_defaults': {
'win_delay_load_hook': 'false',
'conditions': [
['OS=="win"', {
'msvs_disabled_warnings': [
4530, # C++ exception handler used, but unwind semantics are not enabled
4506, # no definition for inline function
],
}],
],
},
'targets': [
{
'target_name': 'fs_admin',
'defines': [
"NAPI_VERSION=<(napi_build_version)",
],
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
'xcode_settings': { 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'CLANG_CXX_LIBRARY': 'libc++',
'MACOSX_DEPLOYMENT_TARGET': '10.7',
},
'msvs_settings': {
'VCCLCompilerTool': { 'ExceptionHandling': 1 },
},
'sources': [
'src/main.cc',
],
'include_dirs': [
'<!(node -p "require(\'node-addon-api\').include_dir")',
],
'conditions': [
['OS=="win"', {
'sources': [
'src/fs-admin-win.cc',
],
'libraries': [
'-lole32.lib',
'-lshell32.lib',
],
}],
['OS=="mac"', {
'sources': [
'src/fs-admin-darwin.cc',
],
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
],
}],
['OS=="linux"', {
'sources': [
'src/fs-admin-linux.cc',
],
}],
],
}
]
}
| 24.377049 | 83 | 0.438467 | 120 | 1,487 | 5.241667 | 0.675 | 0.044515 | 0.057234 | 0.081081 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017076 | 0.369872 | 1,487 | 60 | 84 | 24.783333 | 0.654216 | 0.065905 | 0 | 0.416667 | 0 | 0 | 0.445887 | 0.146465 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
535e920c95d9b042b1a45ee54769faf051d34c56 | 1,013 | py | Python | app/domains/users/views.py | Geo-Gabriel/eccomerce_nestle_mongodb | 97bf5dbdc7bee20a9ca2f7cad98afc6e8f11bd3e | [
"MIT"
] | 3 | 2020-06-21T15:51:25.000Z | 2021-01-24T21:19:27.000Z | app/domains/users/views.py | Geo-Gabriel/eccomerce_nestle_mongodb | 97bf5dbdc7bee20a9ca2f7cad98afc6e8f11bd3e | [
"MIT"
] | null | null | null | app/domains/users/views.py | Geo-Gabriel/eccomerce_nestle_mongodb | 97bf5dbdc7bee20a9ca2f7cad98afc6e8f11bd3e | [
"MIT"
] | null | null | null | from flask import Blueprint, request, jsonify
from app.domains.users.actions import get_all_users, insert_user, get_user_by_id, update_user, delete_user
app_users = Blueprint('app.users', __name__)
@app_users.route('/users', methods=['GET'])
def get_users():
return jsonify([user.serialize() for user in get_all_users()]), 200
@app_users.route('/users/<id>', methods=["GET"])
def get_by_id(id: str):
user = get_user_by_id(id_user=id)
return jsonify(user.serialize()), 200
@app_users.route('/users', methods=["POST"])
def post_user():
payload = request.get_json()
user = insert_user(payload)
return jsonify(user.serialize()), 201
@app_users.route('/users/<id>', methods=["PUT"])
def update(id: str):
payload = request.get_json()
user = update_user(id_user=id, data=payload)
return jsonify(user.serialize()), 200
@app_users.route('/users/<id>', methods=["DELETE"])
def delete(id: str):
delete_user(id_user=id)
return jsonify({"message": "user deleted"}), 200
| 27.378378 | 106 | 0.698914 | 149 | 1,013 | 4.516779 | 0.241611 | 0.08321 | 0.096582 | 0.13373 | 0.506686 | 0.237741 | 0.197623 | 0.139673 | 0.139673 | 0 | 0 | 0.017084 | 0.133268 | 1,013 | 36 | 107 | 28.138889 | 0.749431 | 0 | 0 | 0.166667 | 0 | 0 | 0.090819 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.208333 | false | 0 | 0.083333 | 0.041667 | 0.5 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
536501345147bcbb0b1035da0ccdac716533b14a | 2,557 | py | Python | wired_version/mcs_wired.py | Harri-Renney/Mind_Control_Synth | 5a892a81a3f37444ef154f29a62d44fa1476bfbd | [
"MIT"
] | 1 | 2020-12-20T09:53:20.000Z | 2020-12-20T09:53:20.000Z | wired_version/mcs_wired.py | Harri-Renney/Mind_Control_Synth | 5a892a81a3f37444ef154f29a62d44fa1476bfbd | [
"MIT"
] | null | null | null | wired_version/mcs_wired.py | Harri-Renney/Mind_Control_Synth | 5a892a81a3f37444ef154f29a62d44fa1476bfbd | [
"MIT"
] | null | null | null | import time
import mido
from pinaps.piNapsController import PiNapsController
from NeuroParser import NeuroParser
"""
Equation of motion used to modify virbato.
"""
def positionStep(pos, vel, acc):
return pos + vel * 2 + (1/2) * acc * 4
def velocityStep(vel, acc):
return acc * 2 + vel
CTRL_LFO_PITCH = 26
CTRL_LFO_RATE = 29
MIDI_MESSAGE_PERIOD = 1
vibratoPos = 0
vibratoVel = 0
vibratoAcc = 4
def parserUpdateVibrato(packet):
global vibratoPos
global vibratoVel
global vibratoAcc
if(packet.code == NeuroParser.DataPacket.kPoorQuality):
print("Poor quality: " + str(packet.poorQuality))
if(packet.code == NeuroParser.DataPacket.kAttention):
print("Attention: " + str(packet.attention))
##Change in vibratoStrength depending on meditation values##
##@ToDo - Change to include more momentum build up etc##
if(packet.attention > 50):
vibratoPos = positionStep(vibratoPos, vibratoVel, vibratoAcc)
vibratoVel = velocityStep(vibratoVel, vibratoAcc)
vibratoPos = 100 if vibratoPos > 100 else vibratoPos
vibratoPos = 0 if vibratoPos < 0 else vibratoPos
else:
vibratoPos = positionStep(vibratoPos, vibratoVel, -vibratoAcc)
vibratoVel = velocityStep(vibratoVel, -vibratoAcc)
vibratoPos = 100 if vibratoPos > 100 else vibratoPos
vibratoPos = 0 if vibratoPos < 0 else vibratoPos
def main():
#Init USB:MIDI interface.
#print(mido.get_output_names()) #Used to originally find correct serial port.
port = mido.open_output('USB Midi:USB Midi MIDI 1 20:0')
msgModulate = mido.Message('control_change', control=CTRL_LFO_PITCH, value=100)
port.send(msgModulate)
#Init Pinaps.
pinapsController = PiNapsController()
pinapsController.defaultInitialise()
pinapsController.deactivateAllLEDs()
aParser = NeuroParser()
#Parse all available Pinaps EEG data. Calculate vibrato value and send as MIDI message.
while True:
data = pinapsController.readEEGSensor()
aParser.parse(data, parserUpdateVibrato)
print("Message vibrato strength: ", vibratoPos)
msgModulate = mido.Message('control_change', control=CTRL_LFO_RATE, value=vibratoPos)
port.send(msgModulate)
#Sleep for defined message period.
time.sleep(MIDI_MESSAGE_PERIOD)
if __name__ == '__main__':
main() | 35.027397 | 134 | 0.658193 | 268 | 2,557 | 6.186567 | 0.395522 | 0.033173 | 0.014475 | 0.027744 | 0.308806 | 0.268999 | 0.268999 | 0.268999 | 0.209891 | 0.209891 | 0 | 0.020116 | 0.261244 | 2,557 | 73 | 135 | 35.027397 | 0.857597 | 0.152522 | 0 | 0.122449 | 0 | 0 | 0.057115 | 0 | 0 | 0 | 0 | 0.013699 | 0 | 1 | 0.081633 | false | 0 | 0.081633 | 0.040816 | 0.204082 | 0.061224 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7258cd5e14cfcac3370c20a51efc82ed53ffd2ed | 26,052 | py | Python | functest/tests/unit/odl/test_odl.py | hashnfv/hashnfv-functest | ff34df7ec7be6cd5fcf0f7557b393bd5d6266047 | [
"Apache-2.0"
] | null | null | null | functest/tests/unit/odl/test_odl.py | hashnfv/hashnfv-functest | ff34df7ec7be6cd5fcf0f7557b393bd5d6266047 | [
"Apache-2.0"
] | null | null | null | functest/tests/unit/odl/test_odl.py | hashnfv/hashnfv-functest | ff34df7ec7be6cd5fcf0f7557b393bd5d6266047 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2016 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
"""Define the classes required to fully cover odl."""
import errno
import logging
import os
import unittest
from keystoneauth1.exceptions import auth_plugins
import mock
from robot.errors import DataError, RobotError
from robot.result import model
from robot.utils.robottime import timestamp_to_secs
import six
from six.moves import urllib
from functest.core import testcase
from functest.opnfv_tests.sdn.odl import odl
__author__ = "Cedric Ollivier <cedric.ollivier@orange.com>"
class ODLVisitorTesting(unittest.TestCase):
"""The class testing ODLResultVisitor."""
# pylint: disable=missing-docstring
def setUp(self):
self.visitor = odl.ODLResultVisitor()
def test_empty(self):
self.assertFalse(self.visitor.get_data())
def test_ok(self):
data = {'name': 'foo',
'parent': 'bar',
'status': 'PASS',
'starttime': "20161216 16:00:00.000",
'endtime': "20161216 16:00:01.000",
'elapsedtime': 1000,
'text': 'Hello, World!',
'critical': True}
test = model.TestCase(
name=data['name'], status=data['status'], message=data['text'],
starttime=data['starttime'], endtime=data['endtime'])
test.parent = mock.Mock()
config = {'name': data['parent'],
'criticality.test_is_critical.return_value': data[
'critical']}
test.parent.configure_mock(**config)
self.visitor.visit_test(test)
self.assertEqual(self.visitor.get_data(), [data])
class ODLTesting(unittest.TestCase):
"""The super class which testing classes could inherit."""
# pylint: disable=missing-docstring
logging.disable(logging.CRITICAL)
_keystone_ip = "127.0.0.1"
_neutron_url = "http://127.0.0.2:9696"
_sdn_controller_ip = "127.0.0.3"
_os_auth_url = "http://{}:5000/v3".format(_keystone_ip)
_os_projectname = "admin"
_os_username = "admin"
_os_password = "admin"
_odl_webport = "8080"
_odl_restconfport = "8181"
_odl_username = "admin"
_odl_password = "admin"
_os_userdomainname = 'Default'
_os_projectdomainname = 'Default'
def setUp(self):
for var in ("INSTALLER_TYPE", "SDN_CONTROLLER", "SDN_CONTROLLER_IP"):
if var in os.environ:
del os.environ[var]
os.environ["OS_AUTH_URL"] = self._os_auth_url
os.environ["OS_USERNAME"] = self._os_username
os.environ["OS_USER_DOMAIN_NAME"] = self._os_userdomainname
os.environ["OS_PASSWORD"] = self._os_password
os.environ["OS_PROJECT_NAME"] = self._os_projectname
os.environ["OS_PROJECT_DOMAIN_NAME"] = self._os_projectdomainname
os.environ["OS_PASSWORD"] = self._os_password
self.test = odl.ODLTests(case_name='odl', project_name='functest')
self.defaultargs = {'odlusername': self._odl_username,
'odlpassword': self._odl_password,
'neutronurl': "http://{}:9696".format(
self._keystone_ip),
'osauthurl': self._os_auth_url,
'osusername': self._os_username,
'osuserdomainname': self._os_userdomainname,
'osprojectname': self._os_projectname,
'osprojectdomainname': self._os_projectdomainname,
'ospassword': self._os_password,
'odlip': self._keystone_ip,
'odlwebport': self._odl_webport,
'odlrestconfport': self._odl_restconfport,
'pushtodb': False}
class ODLParseResultTesting(ODLTesting):
"""The class testing ODLTests.parse_results()."""
# pylint: disable=missing-docstring
_config = {'name': 'dummy', 'starttime': '20161216 16:00:00.000',
'endtime': '20161216 16:00:01.000'}
@mock.patch('robot.api.ExecutionResult', side_effect=DataError)
def test_raises_exc(self, mock_method):
with self.assertRaises(DataError):
self.test.parse_results()
mock_method.assert_called_once_with(
os.path.join(odl.ODLTests.res_dir, 'output.xml'))
def _test_result(self, config, result):
suite = mock.Mock()
suite.configure_mock(**config)
with mock.patch('robot.api.ExecutionResult',
return_value=mock.Mock(suite=suite)):
self.test.parse_results()
self.assertEqual(self.test.result, result)
self.assertEqual(self.test.start_time,
timestamp_to_secs(config['starttime']))
self.assertEqual(self.test.stop_time,
timestamp_to_secs(config['endtime']))
self.assertEqual(self.test.details,
{'description': config['name'], 'tests': []})
def test_null_passed(self):
self._config.update({'statistics.critical.passed': 0,
'statistics.critical.total': 20})
self._test_result(self._config, 0)
def test_no_test(self):
self._config.update({'statistics.critical.passed': 20,
'statistics.critical.total': 0})
self._test_result(self._config, 0)
def test_half_success(self):
self._config.update({'statistics.critical.passed': 10,
'statistics.critical.total': 20})
self._test_result(self._config, 50)
def test_success(self):
self._config.update({'statistics.critical.passed': 20,
'statistics.critical.total': 20})
self._test_result(self._config, 100)
class ODLRobotTesting(ODLTesting):
"""The class testing ODLTests.set_robotframework_vars()."""
# pylint: disable=missing-docstring
@mock.patch('fileinput.input', side_effect=Exception())
def test_set_vars_ko(self, mock_method):
self.assertFalse(self.test.set_robotframework_vars())
mock_method.assert_called_once_with(
os.path.join(odl.ODLTests.odl_test_repo,
'csit/variables/Variables.robot'), inplace=True)
@mock.patch('fileinput.input', return_value=[])
def test_set_vars_empty(self, mock_method):
self.assertTrue(self.test.set_robotframework_vars())
mock_method.assert_called_once_with(
os.path.join(odl.ODLTests.odl_test_repo,
'csit/variables/Variables.robot'), inplace=True)
@mock.patch('sys.stdout', new_callable=six.StringIO)
def _test_set_vars(self, msg1, msg2, *args):
line = mock.MagicMock()
line.__iter__.return_value = [msg1]
with mock.patch('fileinput.input', return_value=line) as mock_method:
self.assertTrue(self.test.set_robotframework_vars())
mock_method.assert_called_once_with(
os.path.join(odl.ODLTests.odl_test_repo,
'csit/variables/Variables.robot'), inplace=True)
self.assertEqual(args[0].getvalue(), "{}\n".format(msg2))
def test_set_vars_auth_default(self):
self._test_set_vars(
"@{AUTH} ",
"@{AUTH} admin admin")
def test_set_vars_auth1(self):
self._test_set_vars(
"@{AUTH1} foo bar",
"@{AUTH1} foo bar")
@mock.patch('sys.stdout', new_callable=six.StringIO)
def test_set_vars_auth_foo(self, *args):
line = mock.MagicMock()
line.__iter__.return_value = ["@{AUTH} "]
with mock.patch('fileinput.input', return_value=line) as mock_method:
self.assertTrue(self.test.set_robotframework_vars('foo', 'bar'))
mock_method.assert_called_once_with(
os.path.join(odl.ODLTests.odl_test_repo,
'csit/variables/Variables.robot'), inplace=True)
self.assertEqual(
args[0].getvalue(),
"@{AUTH} foo bar\n")
class ODLMainTesting(ODLTesting):
"""The class testing ODLTests.run_suites()."""
# pylint: disable=missing-docstring
def _get_run_suites_kwargs(self, key=None):
kwargs = {'odlusername': self._odl_username,
'odlpassword': self._odl_password,
'neutronurl': self._neutron_url,
'osauthurl': self._os_auth_url,
'osusername': self._os_username,
'osuserdomainname': self._os_userdomainname,
'osprojectname': self._os_projectname,
'osprojectdomainname': self._os_projectdomainname,
'ospassword': self._os_password,
'odlip': self._sdn_controller_ip,
'odlwebport': self._odl_webport,
'odlrestconfport': self._odl_restconfport}
if key:
del kwargs[key]
return kwargs
def _test_run_suites(self, status, *args):
kwargs = self._get_run_suites_kwargs()
self.assertEqual(self.test.run_suites(**kwargs), status)
if len(args) > 0:
args[0].assert_called_once_with(
odl.ODLTests.res_dir)
if len(args) > 1:
variable = [
'KEYSTONEURL:{}://{}'.format(
urllib.parse.urlparse(self._os_auth_url).scheme,
urllib.parse.urlparse(self._os_auth_url).netloc),
'NEUTRONURL:{}'.format(self._neutron_url),
'OS_AUTH_URL:"{}"'.format(self._os_auth_url),
'OSUSERNAME:"{}"'.format(self._os_username),
'OSUSERDOMAINNAME:"{}"'.format(self._os_userdomainname),
'OSTENANTNAME:"{}"'.format(self._os_projectname),
'OSPROJECTDOMAINNAME:"{}"'.format(self._os_projectdomainname),
'OSPASSWORD:"{}"'.format(self._os_password),
'ODL_SYSTEM_IP:{}'.format(self._sdn_controller_ip),
'PORT:{}'.format(self._odl_webport),
'RESTCONFPORT:{}'.format(self._odl_restconfport)]
args[1].assert_called_once_with(
odl.ODLTests.basic_suite_dir,
odl.ODLTests.neutron_suite_dir,
log='NONE',
output=os.path.join(odl.ODLTests.res_dir, 'output.xml'),
report='NONE',
stdout=mock.ANY,
variable=variable)
if len(args) > 2:
args[2].assert_called_with(
os.path.join(odl.ODLTests.res_dir, 'stdout.txt'))
def _test_no_keyword(self, key):
kwargs = self._get_run_suites_kwargs(key)
self.assertEqual(self.test.run_suites(**kwargs),
testcase.TestCase.EX_RUN_ERROR)
def test_no_odlusername(self):
self._test_no_keyword('odlusername')
def test_no_odlpassword(self):
self._test_no_keyword('odlpassword')
def test_no_neutronurl(self):
self._test_no_keyword('neutronurl')
def test_no_osauthurl(self):
self._test_no_keyword('osauthurl')
def test_no_osusername(self):
self._test_no_keyword('osusername')
def test_no_osprojectname(self):
self._test_no_keyword('osprojectname')
def test_no_ospassword(self):
self._test_no_keyword('ospassword')
def test_no_odlip(self):
self._test_no_keyword('odlip')
def test_no_odlwebport(self):
self._test_no_keyword('odlwebport')
def test_no_odlrestconfport(self):
self._test_no_keyword('odlrestconfport')
def test_set_vars_ko(self):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=False) as mock_object:
self._test_run_suites(testcase.TestCase.EX_RUN_ERROR)
mock_object.assert_called_once_with(
self._odl_username, self._odl_password)
@mock.patch('os.makedirs', side_effect=Exception)
def test_makedirs_exc(self, mock_method):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=True), \
self.assertRaises(Exception):
self._test_run_suites(testcase.TestCase.EX_RUN_ERROR,
mock_method)
@mock.patch('os.makedirs', side_effect=OSError)
def test_makedirs_oserror(self, mock_method):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=True):
self._test_run_suites(testcase.TestCase.EX_RUN_ERROR,
mock_method)
@mock.patch('robot.run', side_effect=RobotError)
@mock.patch('os.makedirs')
def test_run_ko(self, *args):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=True), \
self.assertRaises(RobotError):
self._test_run_suites(testcase.TestCase.EX_RUN_ERROR, *args)
@mock.patch('robot.run')
@mock.patch('os.makedirs')
def test_parse_results_ko(self, *args):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=True), \
mock.patch.object(self.test, 'parse_results',
side_effect=RobotError):
self._test_run_suites(testcase.TestCase.EX_RUN_ERROR, *args)
@mock.patch('robot.run')
@mock.patch('os.makedirs')
def test_ok(self, *args):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=True), \
mock.patch.object(self.test, 'parse_results'):
self._test_run_suites(testcase.TestCase.EX_OK, *args)
@mock.patch('robot.run')
@mock.patch('os.makedirs', side_effect=OSError(errno.EEXIST, ''))
def test_makedirs_oserror17(self, *args):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=True), \
mock.patch.object(self.test, 'parse_results'):
self._test_run_suites(testcase.TestCase.EX_OK, *args)
@mock.patch('robot.run', return_value=1)
@mock.patch('os.makedirs')
def test_testcases_in_failure(self, *args):
with mock.patch.object(self.test, 'set_robotframework_vars',
return_value=True), \
mock.patch.object(self.test, 'parse_results'):
self._test_run_suites(testcase.TestCase.EX_OK, *args)
class ODLRunTesting(ODLTesting):
"""The class testing ODLTests.run()."""
# pylint: disable=missing-docstring
def _test_no_env_var(self, var):
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
del os.environ[var]
self.assertEqual(self.test.run(),
testcase.TestCase.EX_RUN_ERROR)
def _test_run(self, status=testcase.TestCase.EX_OK,
exception=None, **kwargs):
odlip = kwargs['odlip'] if 'odlip' in kwargs else '127.0.0.3'
odlwebport = kwargs['odlwebport'] if 'odlwebport' in kwargs else '8080'
odlrestconfport = (kwargs['odlrestconfport']
if 'odlrestconfport' in kwargs else '8181')
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
if exception:
self.test.run_suites = mock.Mock(side_effect=exception)
else:
self.test.run_suites = mock.Mock(return_value=status)
self.assertEqual(self.test.run(), status)
self.test.run_suites.assert_called_once_with(
odl.ODLTests.default_suites,
neutronurl=self._neutron_url,
odlip=odlip, odlpassword=self._odl_password,
odlrestconfport=odlrestconfport,
odlusername=self._odl_username, odlwebport=odlwebport,
osauthurl=self._os_auth_url,
ospassword=self._os_password,
osprojectname=self._os_projectname,
osusername=self._os_username,
osprojectdomainname=self._os_projectdomainname,
osuserdomainname=self._os_userdomainname)
def _test_multiple_suites(self, suites,
status=testcase.TestCase.EX_OK, **kwargs):
odlip = kwargs['odlip'] if 'odlip' in kwargs else '127.0.0.3'
odlwebport = kwargs['odlwebport'] if 'odlwebport' in kwargs else '8080'
odlrestconfport = (kwargs['odlrestconfport']
if 'odlrestconfport' in kwargs else '8181')
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
self.test.run_suites = mock.Mock(return_value=status)
self.assertEqual(self.test.run(suites=suites), status)
self.test.run_suites.assert_called_once_with(
suites,
neutronurl=self._neutron_url,
odlip=odlip, odlpassword=self._odl_password,
odlrestconfport=odlrestconfport,
odlusername=self._odl_username, odlwebport=odlwebport,
osauthurl=self._os_auth_url,
ospassword=self._os_password,
osprojectname=self._os_projectname,
osusername=self._os_username,
osprojectdomainname=self._os_projectdomainname,
osuserdomainname=self._os_userdomainname)
def test_exc(self):
with mock.patch('functest.utils.openstack_utils.get_endpoint',
side_effect=auth_plugins.MissingAuthPlugin()):
self.assertEqual(self.test.run(),
testcase.TestCase.EX_RUN_ERROR)
def test_no_os_auth_url(self):
self._test_no_env_var("OS_AUTH_URL")
def test_no_os_username(self):
self._test_no_env_var("OS_USERNAME")
def test_no_os_password(self):
self._test_no_env_var("OS_PASSWORD")
def test_no_os__name(self):
self._test_no_env_var("OS_PROJECT_NAME")
def test_run_suites_false(self):
os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
self._test_run(testcase.TestCase.EX_RUN_ERROR,
odlip=self._sdn_controller_ip,
odlwebport=self._odl_webport)
def test_run_suites_exc(self):
with self.assertRaises(Exception):
os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
self._test_run(status=testcase.TestCase.EX_RUN_ERROR,
exception=Exception(),
odlip=self._sdn_controller_ip,
odlwebport=self._odl_webport)
def test_no_sdn_controller_ip(self):
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
self.assertEqual(self.test.run(),
testcase.TestCase.EX_RUN_ERROR)
def test_without_installer_type(self):
os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
self._test_run(testcase.TestCase.EX_OK,
odlip=self._sdn_controller_ip,
odlwebport=self._odl_webport)
def test_suites(self):
os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
self._test_multiple_suites(
[odl.ODLTests.basic_suite_dir],
testcase.TestCase.EX_OK,
odlip=self._sdn_controller_ip,
odlwebport=self._odl_webport)
def test_fuel(self):
os.environ["INSTALLER_TYPE"] = "fuel"
self._test_run(testcase.TestCase.EX_OK,
odlip=urllib.parse.urlparse(self._neutron_url).hostname,
odlwebport='8181',
odlrestconfport='8282')
def test_apex_no_controller_ip(self):
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
os.environ["INSTALLER_TYPE"] = "apex"
self.assertEqual(self.test.run(),
testcase.TestCase.EX_RUN_ERROR)
def test_apex(self):
os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
os.environ["INSTALLER_TYPE"] = "apex"
self._test_run(testcase.TestCase.EX_OK,
odlip=self._sdn_controller_ip, odlwebport='8081',
odlrestconfport='8081')
def test_netvirt_no_controller_ip(self):
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
os.environ["INSTALLER_TYPE"] = "netvirt"
self.assertEqual(self.test.run(),
testcase.TestCase.EX_RUN_ERROR)
def test_netvirt(self):
os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
os.environ["INSTALLER_TYPE"] = "netvirt"
self._test_run(testcase.TestCase.EX_OK,
odlip=self._sdn_controller_ip, odlwebport='8081',
odlrestconfport='8081')
def test_joid_no_controller_ip(self):
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
os.environ["INSTALLER_TYPE"] = "joid"
self.assertEqual(self.test.run(),
testcase.TestCase.EX_RUN_ERROR)
def test_joid(self):
os.environ["SDN_CONTROLLER"] = self._sdn_controller_ip
os.environ["INSTALLER_TYPE"] = "joid"
self._test_run(testcase.TestCase.EX_OK,
odlip=self._sdn_controller_ip, odlwebport='8080')
def test_compass(self):
os.environ["INSTALLER_TYPE"] = "compass"
self._test_run(testcase.TestCase.EX_OK,
odlip=urllib.parse.urlparse(self._neutron_url).hostname,
odlrestconfport='8080')
def test_daisy_no_controller_ip(self):
with mock.patch('functest.utils.openstack_utils.get_endpoint',
return_value=ODLTesting._neutron_url):
os.environ["INSTALLER_TYPE"] = "daisy"
self.assertEqual(self.test.run(),
testcase.TestCase.EX_RUN_ERROR)
def test_daisy(self):
os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
os.environ["INSTALLER_TYPE"] = "daisy"
self._test_run(testcase.TestCase.EX_OK,
odlip=self._sdn_controller_ip, odlwebport='8181',
odlrestconfport='8087')
class ODLArgParserTesting(ODLTesting):
"""The class testing ODLParser."""
# pylint: disable=missing-docstring
def setUp(self):
self.parser = odl.ODLParser()
super(ODLArgParserTesting, self).setUp()
def test_default(self):
self.assertEqual(self.parser.parse_args(), self.defaultargs)
def test_basic(self):
self.defaultargs['neutronurl'] = self._neutron_url
self.defaultargs['odlip'] = self._sdn_controller_ip
self.assertEqual(
self.parser.parse_args(
["--neutronurl={}".format(self._neutron_url),
"--odlip={}".format(self._sdn_controller_ip)]),
self.defaultargs)
@mock.patch('sys.stderr', new_callable=six.StringIO)
def test_fail(self, mock_method):
self.defaultargs['foo'] = 'bar'
with self.assertRaises(SystemExit):
self.parser.parse_args(["--foo=bar"])
self.assertTrue(mock_method.getvalue().startswith("usage:"))
def _test_arg(self, arg, value):
self.defaultargs[arg] = value
self.assertEqual(
self.parser.parse_args(["--{}={}".format(arg, value)]),
self.defaultargs)
def test_odlusername(self):
self._test_arg('odlusername', 'foo')
def test_odlpassword(self):
self._test_arg('odlpassword', 'foo')
def test_osauthurl(self):
self._test_arg('osauthurl', 'http://127.0.0.4:5000/v2')
def test_neutronurl(self):
self._test_arg('neutronurl', 'http://127.0.0.4:9696')
def test_osusername(self):
self._test_arg('osusername', 'foo')
def test_osuserdomainname(self):
self._test_arg('osuserdomainname', 'domain')
def test_osprojectname(self):
self._test_arg('osprojectname', 'foo')
def test_osprojectdomainname(self):
self._test_arg('osprojectdomainname', 'domain')
def test_ospassword(self):
self._test_arg('ospassword', 'foo')
def test_odlip(self):
self._test_arg('odlip', '127.0.0.4')
def test_odlwebport(self):
self._test_arg('odlwebport', '80')
def test_odlrestconfport(self):
self._test_arg('odlrestconfport', '80')
def test_pushtodb(self):
self.defaultargs['pushtodb'] = True
self.assertEqual(self.parser.parse_args(["--{}".format('pushtodb')]),
self.defaultargs)
def test_multiple_args(self):
self.defaultargs['neutronurl'] = self._neutron_url
self.defaultargs['odlip'] = self._sdn_controller_ip
self.assertEqual(
self.parser.parse_args(
["--neutronurl={}".format(self._neutron_url),
"--odlip={}".format(self._sdn_controller_ip)]),
self.defaultargs)
if __name__ == "__main__":
logging.disable(logging.CRITICAL)
unittest.main(verbosity=2)
| 40.642746 | 79 | 0.609857 | 2,858 | 26,052 | 5.252624 | 0.108467 | 0.047429 | 0.024181 | 0.027844 | 0.63769 | 0.575873 | 0.543832 | 0.513056 | 0.48741 | 0.451639 | 0 | 0.013743 | 0.276601 | 26,052 | 640 | 80 | 40.70625 | 0.782819 | 0.033395 | 0 | 0.411067 | 0 | 0 | 0.139924 | 0.04302 | 0 | 0 | 0 | 0 | 0.088933 | 1 | 0.156126 | false | 0.063241 | 0.025692 | 0 | 0.225296 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
7263c0e12b1f9385bffd20a482055a91cac00beb | 996 | py | Python | backend/server/server/wsgi.py | Stinger101/my_uno_ml_service | 47d19f6e5e19e73c465b7ddca889324c9bd5862f | [
"MIT"
] | null | null | null | backend/server/server/wsgi.py | Stinger101/my_uno_ml_service | 47d19f6e5e19e73c465b7ddca889324c9bd5862f | [
"MIT"
] | null | null | null | backend/server/server/wsgi.py | Stinger101/my_uno_ml_service | 47d19f6e5e19e73c465b7ddca889324c9bd5862f | [
"MIT"
] | null | null | null | """
WSGI config for server project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'server.settings')
application = get_wsgi_application()
import inspect
from apps.ml.registry import MLRegistry
from apps.ml.income_classifier.random_forest import RandomForestClassifier
try:
registry = MLRegistry()
rf = RandomForestClassifier()
registry.add_algorithm(endpoint_name="income_classifier",algorithm_object=rf,algorithm_name="random forest", algorithm_status="production", algorithm_version="0.0.1",owner="Piotr",algorithm_description="Random forest with simple pre and post processing",algorithm_code=inspect.getsource(RandomForestClassifier))
except Exception as e:
print ("Error while loading algorithm to the registry",str(e))
| 33.2 | 315 | 0.800201 | 131 | 996 | 5.954198 | 0.618321 | 0.046154 | 0.046154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005599 | 0.103414 | 996 | 29 | 316 | 34.344828 | 0.867861 | 0.212851 | 0 | 0 | 0 | 0 | 0.233247 | 0.028351 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.384615 | 0 | 0.384615 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
7265b89cf3b023b36a24bc0d387a352f1ee8492b | 1,881 | py | Python | models/toolscontext/errorhandler.py | vinirossa/password_generator_test | dd2f43540c6f58ff9217320c21b246c0be3fc55f | [
"MIT"
] | 2 | 2021-09-10T00:11:00.000Z | 2021-09-10T02:47:54.000Z | models/toolscontext/errorhandler.py | vinirossa/password_generator_test | dd2f43540c6f58ff9217320c21b246c0be3fc55f | [
"MIT"
] | null | null | null | models/toolscontext/errorhandler.py | vinirossa/password_generator_test | dd2f43540c6f58ff9217320c21b246c0be3fc55f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
""" Module Name
Description...
"""
__author__ = "Vinícius Pereira"
__copyright__ = "Copyright 2021, Vinícius Pereira"
__credits__ = ["Vinícius Pereira","etc."]
__date__ = "2021/04/12"
__license__ = "GPL"
__version__ = "1.0.0"
__pythonversion__ = "3.9.1"
__maintainer__ = "Vinícius Pereira"
__contact__ = "viniciuspsb@gmail.com"
__status__ = "Development"
import sys, os
import logging
import inspect
import datetime
STD_LOG_FORMAT = ("%(asctime)s - %(levelname)s - %(name)s - %(filename)s - %(funcName)s() - ln.%(lineno)d"
" - %(message)s")
def file_logger(filename: str,
level:int = logging.DEBUG,
format: str = STD_LOG_FORMAT):
logger = logging.getLogger(__name__)
logger.setLevel(level)
formatter = logging.Formatter(format)
file_handler = logging.FileHandler(filename)
file_handler.setLevel(level)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
def prompt_logger(error):
caller = inspect.getframeinfo(inspect.stack()[1][0])
error_log = {"error_type": error.__class__.__name__,
"error_info": error.__doc__,
"error_line": error.__traceback__.tb_lineno,
"error_file": os.path.basename(caller.filename),
"error_time": datetime.datetime.now(),
"error_details": str(error).capitalize()}
print("----- ERROR -----")
print("Type:",error_log["error_type"])
print("Info:",error_log["error_info"])
print("Line:",error_log["error_line"])
print("File:",error_log["error_file"])
print("Time:",error_log["error_time"])
print("Details:",error_log["error_details"])
return error_log
def error_box():
pass
def sql_logger():
pass
if __name__ == "__main__":
pass | 24.115385 | 106 | 0.640617 | 214 | 1,881 | 5.158879 | 0.429907 | 0.057971 | 0.082428 | 0.030797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014765 | 0.207868 | 1,881 | 78 | 107 | 24.115385 | 0.726175 | 0.037746 | 0 | 0.061224 | 0 | 0.020408 | 0.235392 | 0.011686 | 0 | 0 | 0 | 0 | 0 | 1 | 0.081633 | false | 0.061224 | 0.081633 | 0 | 0.204082 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
726cd8989837300a91d84b0ca0157304eb9a9398 | 821 | py | Python | src/metrics.py | dmitryrubtsov/Recommender-systems | 9debd7b1c2d67ebc508263a483c81da57521dea0 | [
"MIT"
] | null | null | null | src/metrics.py | dmitryrubtsov/Recommender-systems | 9debd7b1c2d67ebc508263a483c81da57521dea0 | [
"MIT"
] | null | null | null | src/metrics.py | dmitryrubtsov/Recommender-systems | 9debd7b1c2d67ebc508263a483c81da57521dea0 | [
"MIT"
] | 1 | 2021-09-11T09:12:34.000Z | 2021-09-11T09:12:34.000Z | import pandas as pd
import numpy as np
import swifter
def money_precision_at_k(y_pred: pd.Series, y_true: pd.Series, item_price, k=5):
y_pred = y_pred.swifter.progress_bar(False).apply(pd.Series)
user_filter = ~(y_true.swifter.progress_bar(False).apply(len) < k)
y_pred = y_pred.loc[user_filter]
y_true = y_true.loc[user_filter]
prices_recommended = y_pred.swifter.progress_bar(False).applymap(lambda item: item_price.price.get(item))
flags = y_pred.loc[:, :k - 1].swifter.progress_bar(False) \
.apply(lambda row: np.isin(np.array(row), y_true.get(row.name)), axis=1) \
.swifter.progress_bar(False).apply(pd.Series)
metric = (
(flags * prices_recommended.loc[:, :k - 1]).sum(axis=1) / prices_recommended.loc[:, :k - 1].sum(axis=1)
).mean()
return metric
| 34.208333 | 111 | 0.685749 | 132 | 821 | 4.05303 | 0.325758 | 0.065421 | 0.168224 | 0.214953 | 0.416822 | 0.364486 | 0.246729 | 0.11215 | 0 | 0 | 0 | 0.010189 | 0.163216 | 821 | 23 | 112 | 35.695652 | 0.768559 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.1875 | 0 | 0.3125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
727ca773247407a0b44c8ae5a52c27e130f63397 | 6,165 | py | Python | sqlmat/utils.py | haobtc/sqlmat | c6b6ef966ba01173b6a485afb932ed438c35b211 | [
"MIT"
] | null | null | null | sqlmat/utils.py | haobtc/sqlmat | c6b6ef966ba01173b6a485afb932ed438c35b211 | [
"MIT"
] | null | null | null | sqlmat/utils.py | haobtc/sqlmat | c6b6ef966ba01173b6a485afb932ed438c35b211 | [
"MIT"
] | null | null | null | from typing import Tuple, List, Optional
import json
import sys
import os
import shlex
import asyncio
import argparse
import logging
import tempfile
from urllib.parse import urlparse
logger = logging.getLogger(__name__)
def find_sqlmat_json() -> Optional[dict]:
json_path = os.getenv('SQLMAT_JSON_PATH')
if json_path:
with open(json_path) as f:
cfg = json.load(f)
return cfg
# iterate through the current dir up to the root dir "/" to find a
# .sqlmat.json
workdir = os.path.abspath(os.getcwd())
while workdir:
json_path = os.path.join(workdir, '.sqlmat.json')
if os.path.exists(json_path):
with open(json_path) as f:
cfg = json.load(f)
return cfg
parentdir = os.path.abspath(os.path.join(workdir, '..'))
if parentdir == workdir:
break
workdir = parentdir
logger.warning('fail to find .sqlmat.json')
return None
def find_dsn(prog: str, desc: str) -> Tuple[str, List[str]]:
parser = argparse.ArgumentParser(
prog=prog,
description=desc)
parser.add_argument('-d', '--dsn',
type=str,
help='postgresql dsn')
parser.add_argument('-g', '--db',
type=str,
default='default',
help='postgresql db instance defined in .sqlmat.json')
parser.add_argument('callee_args',
type=str,
nargs='*',
help='command line arguments of callee programs')
# from arguments
args = parser.parse_args()
if args.dsn:
return args.dsn, args.callee_args
# find dsn from ./.sqlmat.json
cfg = find_sqlmat_json()
if cfg:
dsn = cfg['databases'][args.db]['dsn']
assert isinstance(dsn, str)
return dsn, args.callee_args
# default dsn using username
user = os.getenv('USER', '')
default_dsn = f'postgres://{user}@127.0.0.1:5432/{args.db}'
logger.warning('no postgres dsn specified, use %s instead', default_dsn)
return default_dsn, args.callee_args
def joinargs(callee_args: List[str]) -> str:
if hasattr(shlex, 'join'):
return shlex.join(callee_args)
else:
return ' '.join(shlex.quote(a) for a in callee_args)
# run psql client
async def run_shell(dsn: str, callee_args: List[str]) -> None:
p = urlparse(dsn)
username = p.username or ''
password = p.password or ''
dbname = p.path[1:]
hostname = p.hostname
port = p.port or 5432
temp_pgpass = tempfile.NamedTemporaryFile(mode='w')
print(
'{}:{}:{}:{}:{}'.format(hostname, port, dbname, username, password),
file=temp_pgpass,
flush=True)
os.environ['PGPASSFILE'] = temp_pgpass.name
command = 'psql -h{} -p{} -U{} {} {}'.format(hostname, port, username, joinargs(callee_args), dbname)
proc = await asyncio.create_subprocess_shell(command)
await proc.communicate()
def cl_run_shell() -> None:
dsn, callee_args = find_dsn('sqlmat-shell', 'run psql client shell')
loop = asyncio.get_event_loop()
loop.run_until_complete(run_shell(dsn, callee_args))
# run dbdump
async def run_dbdump(dsn: str, callee_args: List[str]) -> None:
p = urlparse(dsn)
username = p.username or ''
password = p.password or ''
dbname = p.path[1:]
hostname = p.hostname
port = p.port or 5432
temp_pgpass = tempfile.NamedTemporaryFile(mode='w')
print(
'{}:{}:{}:{}:{}'.format(hostname, port, dbname, username, password),
file=temp_pgpass,
flush=True)
os.environ['PGPASSFILE'] = temp_pgpass.name
command = 'pg_dump -h{} -p{} -U{} {} {}'.format(hostname, port, username, joinargs(callee_args), dbname)
proc = await asyncio.create_subprocess_shell(command)
await proc.communicate()
def cl_run_dbdump() -> None:
dsn, callee_args = find_dsn('sqlmat-dump', 'dump database')
loop = asyncio.get_event_loop()
loop.run_until_complete(run_dbdump(dsn, callee_args))
# generate alembic migrations
def gen_migrate(dsn: str) -> None:
init_data = ALEMBIC_INIT.replace('{{dsn}}', dsn)
with open('alembic.ini', 'w') as f:
f.write(init_data)
def cl_gen_migrate() -> None:
dsn, callee_args = find_dsn('sqlmat-genmigrate', 'generate alembic migration')
gen_migrate(dsn)
print('Wrote alembic.ini')
ALEMBIC_INIT = '''\
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
#sqlalchemy.url = driver://user:pass@localhost/dbname
sqlalchemy.url = {{dsn}}
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
'''
| 28.541667 | 108 | 0.65515 | 801 | 6,165 | 4.928839 | 0.319601 | 0.040527 | 0.016464 | 0.017224 | 0.270517 | 0.270517 | 0.270517 | 0.24772 | 0.24772 | 0.24772 | 0 | 0.005225 | 0.223844 | 6,165 | 215 | 109 | 28.674419 | 0.819854 | 0.032928 | 0 | 0.254438 | 0 | 0 | 0.372081 | 0.020326 | 0 | 0 | 0 | 0 | 0.005917 | 1 | 0.04142 | false | 0.071006 | 0.059172 | 0 | 0.147929 | 0.017751 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
72825a6195e36bed57090b1163e99522e513ffd4 | 2,410 | py | Python | eggs/ZConfig-3.0.4-py2.7.egg/ZConfig/tests/test_cookbook.py | salayhin/talkofacta | 8b5a14245dd467bb1fda75423074c4840bd69fb7 | [
"MIT"
] | null | null | null | eggs/ZConfig-3.0.4-py2.7.egg/ZConfig/tests/test_cookbook.py | salayhin/talkofacta | 8b5a14245dd467bb1fda75423074c4840bd69fb7 | [
"MIT"
] | null | null | null | eggs/ZConfig-3.0.4-py2.7.egg/ZConfig/tests/test_cookbook.py | salayhin/talkofacta | 8b5a14245dd467bb1fda75423074c4840bd69fb7 | [
"MIT"
] | null | null | null | ##############################################################################
#
# Copyright (c) 2004 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests of examples from the online cookbook, so we don't break them
down the road. Unless we really mean to.
The ZConfig Cookbook is available online at:
http://dev.zope.org/Zope3/ZConfig
"""
import ZConfig.tests.support
import unittest
def basic_key_mapping_password_to_passwd(key):
# Lower-case the key since that's what basic-key does:
key = key.lower()
# Now map password to passwd:
if key == "password":
key = "passwd"
return key
def user_info_conversion(section):
return section
class CookbookTestCase(ZConfig.tests.support.TestHelper, unittest.TestCase):
def test_rewriting_key_names(self):
schema = self.load_schema_text("""
<schema prefix='%s'>
<sectiontype name='userinfo' datatype='.user_info_conversion'
keytype='.basic_key_mapping_password_to_passwd'>
<key name='userid' datatype='integer'/>
<key name='username' datatype='identifier'/>
<key name='password'/>
</sectiontype>
<section type='userinfo' name='*' attribute='userinfo'/>
</schema>
""" % __name__)
config = self.load_config_text(schema, """\
<userinfo>
USERID 42
USERNAME foouser
PASSWORD yeah-right
</userinfo>
""")
self.assertEqual(config.userinfo.userid, 42)
self.assertEqual(config.userinfo.username, "foouser")
self.assertEqual(config.userinfo.passwd, "yeah-right")
self.assertTrue(not hasattr(config.userinfo, "password"))
def test_suite():
return unittest.makeSuite(CookbookTestCase)
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
| 33.943662 | 78 | 0.612448 | 265 | 2,410 | 5.437736 | 0.498113 | 0.038862 | 0.03331 | 0.060375 | 0.047189 | 0.047189 | 0.047189 | 0 | 0 | 0 | 0 | 0.00592 | 0.229046 | 2,410 | 70 | 79 | 34.428571 | 0.769645 | 0.30332 | 0 | 0 | 0 | 0 | 0.454424 | 0.083167 | 0 | 0 | 0 | 0 | 0.108108 | 1 | 0.108108 | false | 0.216216 | 0.054054 | 0.054054 | 0.27027 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
72831f9ef5d36065feb1e5d281d84dbd15c6710a | 1,840 | py | Python | main.py | ezhkovskii/instagrapi-rest | a3570f279ef0973856b92e433b117e0be0d4c713 | [
"MIT"
] | null | null | null | main.py | ezhkovskii/instagrapi-rest | a3570f279ef0973856b92e433b117e0be0d4c713 | [
"MIT"
] | null | null | null | main.py | ezhkovskii/instagrapi-rest | a3570f279ef0973856b92e433b117e0be0d4c713 | [
"MIT"
] | null | null | null | import pkg_resources
from fastapi import FastAPI
from fastapi.openapi.utils import get_openapi
from starlette.responses import RedirectResponse, JSONResponse
from routers import auth, media, video, photo, user, igtv, clip, album, story, hashtag, direct
app = FastAPI()
app.include_router(auth.router)
app.include_router(media.router)
app.include_router(video.router)
app.include_router(photo.router)
app.include_router(user.router)
app.include_router(igtv.router)
app.include_router(clip.router)
app.include_router(album.router)
app.include_router(story.router)
app.include_router(hashtag.router)
app.include_router(direct.router)
@app.get("/", tags=["system"], summary="Redirect to /docs")
async def root():
"""Redirect to /docs
"""
return RedirectResponse(url="/docs")
@app.get("/version", tags=["system"], summary="Get dependency versions")
async def version():
"""Get dependency versions
"""
versions = {}
for name in ('instagrapi', ):
item = pkg_resources.require(name)
if item:
versions[name] = item[0].version
return versions
@app.exception_handler(Exception)
async def handle_exception(request, exc: Exception):
return JSONResponse({
"detail": str(exc),
"exc_type": str(type(exc).__name__)
}, status_code=500)
def custom_openapi():
if app.openapi_schema:
return app.openapi_schema
# for route in app.routes:
# body_field = getattr(route, 'body_field', None)
# if body_field:
# body_field.type_.__name__ = 'name'
openapi_schema = get_openapi(
title="instagrapi-rest",
version="1.0.0",
description="RESTful API Service for instagrapi",
routes=app.routes,
)
app.openapi_schema = openapi_schema
return app.openapi_schema
app.openapi = custom_openapi
| 28.307692 | 94 | 0.7 | 232 | 1,840 | 5.37931 | 0.323276 | 0.088141 | 0.141026 | 0.176282 | 0.05609 | 0.05609 | 0 | 0 | 0 | 0 | 0 | 0.004639 | 0.179891 | 1,840 | 64 | 95 | 28.75 | 0.822399 | 0.075 | 0 | 0.043478 | 0 | 0 | 0.088073 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021739 | false | 0 | 0.108696 | 0 | 0.23913 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7286c123e2e1e4752621f27aef91caae14dc1664 | 727 | py | Python | ex039.py | vinisantos7/PythonExercicios | bc8f38e03a606d6b0216632a93affeab0792e534 | [
"MIT"
] | 2 | 2021-11-04T21:09:11.000Z | 2021-11-08T09:42:10.000Z | ex039.py | vinisantos7/PythonExercicios | bc8f38e03a606d6b0216632a93affeab0792e534 | [
"MIT"
] | null | null | null | ex039.py | vinisantos7/PythonExercicios | bc8f38e03a606d6b0216632a93affeab0792e534 | [
"MIT"
] | null | null | null | print("@"*30)
print("Alistamento - Serviço Militar")
print("@"*30)
from datetime import date
ano_nasc = int(input("Digite seu ano de nascimento: "))
ano_atual = date.today().year
idade = ano_atual - ano_nasc
print(f"Quem nasceu em {ano_nasc} tem {idade} anos em {ano_atual}")
if idade == 18:
print("É a hora de se alistar no serviço militar, IMEDIATAMENTE!")
elif idade < 18:
saldo = 18 - idade
print(f"Ainda falta {saldo} anos para o seu alistamento!")
ano = ano_atual + saldo
print(f"Seu alistamento será em {ano}")
else:
idade > 18
saldo = idade - 18
print(f"Já passou {saldo} anos do tempo para o seu alistamento!")
ano = ano_atual - saldo
print(f"O seu alistamento foi em {ano}") | 30.291667 | 70 | 0.674003 | 115 | 727 | 4.191304 | 0.426087 | 0.082988 | 0.093361 | 0.078838 | 0.170124 | 0.170124 | 0.170124 | 0.170124 | 0.170124 | 0.170124 | 0 | 0.02418 | 0.203576 | 727 | 24 | 71 | 30.291667 | 0.80829 | 0 | 0 | 0.095238 | 0 | 0 | 0.462912 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.047619 | 0.047619 | 0 | 0.047619 | 0.428571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
728800a60a85756e4875a7047b803fa961c8c2d3 | 25,990 | py | Python | test/python/spl/tk17/opt/.__splpy/packages/streamsx/topology/tester.py | Jaimie-Jin1/streamsx.topology | 6f316ec8e9ed1349c6f061d9bb7d03deb87e3d08 | [
"Apache-2.0"
] | 31 | 2015-06-24T06:21:14.000Z | 2020-08-28T21:45:50.000Z | test/python/spl/tk17/opt/.__splpy/packages/streamsx/topology/tester.py | Jaimie-Jin1/streamsx.topology | 6f316ec8e9ed1349c6f061d9bb7d03deb87e3d08 | [
"Apache-2.0"
] | 1,203 | 2015-06-15T02:11:49.000Z | 2021-03-22T09:47:54.000Z | test/python/spl/tk17/opt/.__splpy/packages/streamsx/topology/tester.py | Jaimie-Jin1/streamsx.topology | 6f316ec8e9ed1349c6f061d9bb7d03deb87e3d08 | [
"Apache-2.0"
] | 53 | 2015-05-28T21:14:16.000Z | 2021-12-23T12:58:59.000Z | # coding=utf-8
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
"""Testing support for streaming applications.
Allows testing of a streaming application by creation conditions
on streams that are expected to become valid during the processing.
`Tester` is designed to be used with Python's `unittest` module.
A complete application may be tested or fragments of it, for example a sub-graph can be tested
in isolation that takes input data and scores it using a model.
Supports execution of the application on
:py:const:`~streamsx.topology.context.ContextTypes.STREAMING_ANALYTICS_SERVICE`,
:py:const:`~streamsx.topology.context.ContextTypes.DISTRIBUTED`
or :py:const:`~streamsx.topology.context.ContextTypes.STANDALONE`.
A :py:class:`Tester` instance is created and associated with the :py:class:`Topology` to be tested.
Conditions are then created against streams, such as a stream must receive 10 tuples using
:py:meth:`~Tester.tuple_count`.
Here is a simple example that tests a filter correctly only passes tuples with values greater than 5::
import unittest
from streamsx.topology.topology import Topology
from streamsx.topology.tester import Tester
class TestSimpleFilter(unittest.TestCase):
def setUp(self):
# Sets self.test_ctxtype and self.test_config
Tester.setup_streaming_analytics(self)
def test_filter(self):
# Declare the application to be tested
topology = Topology()
s = topology.source([5, 7, 2, 4, 9, 3, 8])
s = s.filter(lambda x : x > 5)
# Create tester and assign conditions
tester = Tester(topology)
tester.contents(s, [7, 9, 8])
# Submit the application for test
# If it fails an AssertionError will be raised.
tester.test(self.test_ctxtype, self.test_config)
A stream may have any number of conditions and any number of streams may be tested.
A py:meth:`~Tester.local_check` is supported where a method of the
unittest class is executed once the job becomes healthy. This performs
checks from the context of the Python unittest class, such as
checking external effects of the application or using the REST api to
monitor the application.
.. warning::
Python 3.5 and Streaming Analytics service or IBM Streams 4.2 or later is required when using `Tester`.
"""
import streamsx.ec as ec
import streamsx.topology.context as stc
import os
import unittest
import logging
import collections
import threading
from streamsx.rest import StreamsConnection
from streamsx.rest import StreamingAnalyticsConnection
from streamsx.topology.context import ConfigParams
import time
import streamsx.topology.tester_runtime as sttrt
_logger = logging.getLogger('streamsx.topology.test')
class Tester(object):
"""Testing support for a Topology.
Allows testing of a Topology by creating conditions against the contents
of its streams.
Conditions may be added to a topology at any time before submission.
If a topology is submitted directly to a context then the graph
is not modified. This allows testing code to be inserted while
the topology is being built, but not acted upon unless the topology
is submitted in test mode.
If a topology is submitted through the test method then the topology
may be modified to include operations to ensure the conditions are met.
.. warning::
For future compatibility applications under test should not include intended failures that cause
a processing element to stop or restart. Thus, currently testing is against expected application behavior.
Args:
topology: Topology to be tested.
"""
def __init__(self, topology):
self.topology = topology
topology.tester = self
self._conditions = {}
self.local_check = None
@staticmethod
def setup_standalone(test):
"""
Set up a unittest.TestCase to run tests using IBM Streams standalone mode.
Requires a local IBM Streams install define by the STREAMS_INSTALL
environment variable. If STREAMS_INSTALL is not set, then the
test is skipped.
Two attributes are set in the test case:
* test_ctxtype - Context type the test will be run in.
* test_config- Test configuration.
Args:
test(unittest.TestCase): Test case to be set up to run tests using Tester
Returns: None
"""
if not 'STREAMS_INSTALL' in os.environ:
raise unittest.SkipTest("Skipped due to no local IBM Streams install")
test.test_ctxtype = stc.ContextTypes.STANDALONE
test.test_config = {}
@staticmethod
def setup_distributed(test):
"""
Set up a unittest.TestCase to run tests using IBM Streams distributed mode.
Requires a local IBM Streams install define by the STREAMS_INSTALL
environment variable. If STREAMS_INSTALL is not set then the
test is skipped.
The Streams instance to use is defined by the environment variables:
* STREAMS_ZKCONNECT - Zookeeper connection string
* STREAMS_DOMAIN_ID - Domain identifier
* STREAMS_INSTANCE_ID - Instance identifier
Two attributes are set in the test case:
* test_ctxtype - Context type the test will be run in.
* test_config - Test configuration.
Args:
test(unittest.TestCase): Test case to be set up to run tests using Tester
Returns: None
"""
if not 'STREAMS_INSTALL' in os.environ:
raise unittest.SkipTest("Skipped due to no local IBM Streams install")
if not 'STREAMS_INSTANCE_ID' in os.environ:
raise unittest.SkipTest("Skipped due to STREAMS_INSTANCE_ID environment variable not set")
if not 'STREAMS_DOMAIN_ID' in os.environ:
raise unittest.SkipTest("Skipped due to STREAMS_DOMAIN_ID environment variable not set")
test.username = os.getenv("STREAMS_USERNAME", "streamsadmin")
test.password = os.getenv("STREAMS_PASSWORD", "passw0rd")
test.test_ctxtype = stc.ContextTypes.DISTRIBUTED
test.test_config = {}
@staticmethod
def setup_streaming_analytics(test, service_name=None, force_remote_build=False):
"""
Set up a unittest.TestCase to run tests using Streaming Analytics service on IBM Bluemix cloud platform.
The service to use is defined by:
* VCAP_SERVICES environment variable containing `streaming_analytics` entries.
* service_name which defaults to the value of STREAMING_ANALYTICS_SERVICE_NAME environment variable.
If VCAP_SERVICES is not set or a service name is not defined, then the test is skipped.
Two attributes are set in the test case:
* test_ctxtype - Context type the test will be run in.
* test_config - Test configuration.
Args:
test(unittest.TestCase): Test case to be set up to run tests using Tester
service_name(str): Name of Streaming Analytics service to use. Must exist as an
entry in the VCAP services. Defaults to value of STREAMING_ANALYTICS_SERVICE_NAME environment variable.
Returns: None
"""
if not 'VCAP_SERVICES' in os.environ:
raise unittest.SkipTest("Skipped due to VCAP_SERVICES environment variable not set")
test.test_ctxtype = stc.ContextTypes.STREAMING_ANALYTICS_SERVICE
if service_name is None:
service_name = os.environ.get('STREAMING_ANALYTICS_SERVICE_NAME', None)
if service_name is None:
raise unittest.SkipTest("Skipped due to no service name supplied")
test.test_config = {'topology.service.name': service_name}
if force_remote_build:
test.test_config['topology.forceRemoteBuild'] = True
def add_condition(self, stream, condition):
"""Add a condition to a stream.
Conditions are normally added through :py:meth:`tuple_count`, :py:meth:`contents` or :py:meth:`tuple_check`.
This allows an additional conditions that are implementations of :py:class:`Condition`.
Args:
stream(Stream): Stream to be tested.
condition(Condition): Arbitrary condition.
Returns:
Stream: stream
"""
self._conditions[condition.name] = (stream, condition)
return stream
def tuple_count(self, stream, count, exact=True):
"""Test that a stream contains a number of tuples.
If `exact` is `True`, then condition becomes valid when `count`
tuples are seen on `stream` during the test. Subsequently if additional
tuples are seen on `stream` then the condition fails and can never
become valid.
If `exact` is `False`, then the condition becomes valid once `count`
tuples are seen on `stream` and remains valid regardless of
any additional tuples.
Args:
stream(Stream): Stream to be tested.
count(int): Number of tuples expected.
exact(bool): `True` if the stream must contain exactly `count`
tuples, `False` if the stream must contain at least `count` tuples.
Returns:
Stream: stream
"""
_logger.debug("Adding tuple count (%d) condition to stream %s.", count, stream)
if exact:
name = "ExactCount" + str(len(self._conditions))
cond = sttrt._TupleExactCount(count, name)
cond._desc = "{0} stream expects tuple count equal to {1}.".format(stream.name, count)
else:
name = "AtLeastCount" + str(len(self._conditions))
cond = sttrt._TupleAtLeastCount(count, name)
cond._desc = "'{0}' stream expects tuple count of at least {1}.".format(stream.name, count)
return self.add_condition(stream, cond)
def contents(self, stream, expected, ordered=True):
"""Test that a stream contains the expected tuples.
Args:
stream(Stream): Stream to be tested.
expected(list): Sequence of expected tuples.
ordered(bool): True if the ordering of received tuples must match expected.
Returns:
Stream: stream
"""
name = "StreamContents" + str(len(self._conditions))
if ordered:
cond = sttrt._StreamContents(expected, name)
cond._desc = "'{0}' stream expects tuple ordered contents: {1}.".format(stream.name, expected)
else:
cond = sttrt._UnorderedStreamContents(expected, name)
cond._desc = "'{0}' stream expects tuple unordered contents: {1}.".format(stream.name, expected)
return self.add_condition(stream, cond)
def tuple_check(self, stream, checker):
"""Check each tuple on a stream.
For each tuple ``t`` on `stream` ``checker(t)`` is called.
If the return evaluates to `False` then the condition fails.
Once the condition fails it can never become valid.
Otherwise the condition becomes or remains valid. The first
tuple on the stream makes the condition valid if the checker
callable evaluates to `True`.
The condition can be combined with :py:meth:`tuple_count` with
``exact=False`` to test a stream map or filter with random input data.
An example of combining `tuple_count` and `tuple_check` to test a filter followed
by a map is working correctly across a random set of values::
def rands():
r = random.Random()
while True:
yield r.random()
class TestFilterMap(unittest.testCase):
# Set up omitted
def test_filter(self):
# Declare the application to be tested
topology = Topology()
r = topology.source(rands())
r = r.filter(lambda x : x > 0.7)
r = r.map(lambda x : x + 0.2)
# Create tester and assign conditions
tester = Tester(topology)
# Ensure at least 1000 tuples pass through the filter.
tester.tuple_count(r, 1000, exact=False)
tester.tuple_check(r, lambda x : x > 0.9)
# Submit the application for test
# If it fails an AssertionError will be raised.
tester.test(self.test_ctxtype, self.test_config)
Args:
stream(Stream): Stream to be tested.
checker(callable): Callable that must evaluate to True for each tuple.
"""
name = "TupleCheck" + str(len(self._conditions))
cond = sttrt._TupleCheck(checker, name)
return self.add_condition(stream, cond)
def local_check(self, callable):
"""Perform local check while the application is being tested.
A call to `callable` is made after the application under test is submitted and becomes healthy.
The check is in the context of the Python runtime executing the unittest case,
typically the callable is a method of the test case.
The application remains running until all the conditions are met
and `callable` returns. If `callable` raises an error, typically
through an assertion method from `unittest` then the test will fail.
Used for testing side effects of the application, typically with `STREAMING_ANALYTICS_SERVICE`
or `DISTRIBUTED`. The callable may also use the REST api for context types that support
it to dynamically monitor the running application.
The callable can use `submission_result` and `streams_connection` attributes from :py:class:`Tester` instance
to interact with the job or the running Streams instance.
Simple example of checking the job is healthy::
import unittest
from streamsx.topology.topology import Topology
from streamsx.topology.tester import Tester
class TestLocalCheckExample(unittest.TestCase):
def setUp(self):
Tester.setup_distributed(self)
def test_job_is_healthy(self):
topology = Topology()
s = topology.source(['Hello', 'World'])
self.tester = Tester(topology)
self.tester.tuple_count(s, 2)
# Add the local check
self.tester.local_check = self.local_checks
# Run the test
self.tester.test(self.test_ctxtype, self.test_config)
def local_checks(self):
job = self.tester.submission_result.job
self.assertEqual('healthy', job.health)
.. warning::
A local check must not cancel the job (application under test).
Args:
callable: Callable object.
"""
self.local_check = callable
def test(self, ctxtype, config=None, assert_on_fail=True, username=None, password=None):
"""Test the topology.
Submits the topology for testing and verifies the test conditions are met and the job remained healthy through its execution.
The submitted application (job) is monitored for the test conditions and
will be canceled when all the conditions are valid or at least one failed.
In addition if a local check was specified using :py:meth:`local_check` then
that callable must complete before the job is cancelled.
The test passes if all conditions became valid and the local check callable (if present) completed without
raising an error.
The test fails if the job is unhealthy, any condition fails or the local check callable (if present) raised an exception.
Args:
ctxtype(str): Context type for submission.
config: Configuration for submission.
assert_on_fail(bool): True to raise an assertion if the test fails, False to return the passed status.
username(str): username for distributed tests
password(str): password for distributed tests
Attributes:
submission_result: Result of the application submission from :py:func:`~streamsx.topology.context.submit`.
streams_connection(StreamsConnection): Connection object that can be used to interact with the REST API of
the Streaming Analytics service or instance.
Returns:
bool: `True` if test passed, `False` if test failed if `assert_on_fail` is `False`.
"""
# Add the conditions into the graph as sink operators
_logger.debug("Adding conditions to topology %s.", self.topology.name)
for ct in self._conditions.values():
condition = ct[1]
stream = ct[0]
stream.for_each(condition, name=condition.name)
if config is None:
config = {}
_logger.debug("Starting test topology %s context %s.", self.topology.name, ctxtype)
if stc.ContextTypes.STANDALONE == ctxtype:
passed = self._standalone_test(config)
elif stc.ContextTypes.DISTRIBUTED == ctxtype:
passed = self._distributed_test(config, username, password)
elif stc.ContextTypes.STREAMING_ANALYTICS_SERVICE == ctxtype or stc.ContextTypes.ANALYTICS_SERVICE == ctxtype:
passed = self._streaming_analytics_test(ctxtype, config)
else:
raise NotImplementedError("Tester context type not implemented:", ctxtype)
if 'conditions' in self.result:
for cn,cnr in self.result['conditions'].items():
c = self._conditions[cn][1]
cdesc = cn
if hasattr(c, '_desc'):
cdesc = c._desc
if 'Fail' == cnr:
_logger.error("Condition: %s : %s", cnr, cdesc)
elif 'NotValid' == cnr:
_logger.warning("Condition: %s : %s", cnr, cdesc)
elif 'Valid' == cnr:
_logger.info("Condition: %s : %s", cnr, cdesc)
if assert_on_fail:
assert passed, "Test failed for topology: " + self.topology.name
if passed:
_logger.info("Test topology %s passed for context:%s", self.topology.name, ctxtype)
else:
_logger.error("Test topology %s failed for context:%s", self.topology.name, ctxtype)
return passed
def _standalone_test(self, config):
""" Test using STANDALONE.
Success is solely indicated by the process completing and returning zero.
"""
sr = stc.submit(stc.ContextTypes.STANDALONE, self.topology, config)
self.submission_result = sr
self.result = {'passed': sr['return_code'], 'submission_result': sr}
return sr['return_code'] == 0
def _distributed_test(self, config, username, password):
self.streams_connection = config.get(ConfigParams.STREAMS_CONNECTION)
if self.streams_connection is None:
# Supply a default StreamsConnection object with SSL verification disabled, because the default
# streams server is not shipped with a valid SSL certificate
self.streams_connection = StreamsConnection(username, password)
self.streams_connection.session.verify = False
config[ConfigParams.STREAMS_CONNECTION] = self.streams_connection
sjr = stc.submit(stc.ContextTypes.DISTRIBUTED, self.topology, config)
self.submission_result = sjr
if sjr['return_code'] != 0:
_logger.error("Failed to submit job to distributed instance.")
return False
return self._distributed_wait_for_result()
def _streaming_analytics_test(self, ctxtype, config):
sjr = stc.submit(ctxtype, self.topology, config)
self.submission_result = sjr
self.streams_connection = config.get(ConfigParams.STREAMS_CONNECTION)
if self.streams_connection is None:
vcap_services = config.get(ConfigParams.VCAP_SERVICES)
service_name = config.get(ConfigParams.SERVICE_NAME)
self.streams_connection = StreamingAnalyticsConnection(vcap_services, service_name)
if sjr['return_code'] != 0:
_logger.error("Failed to submit job to Streaming Analytics instance")
return False
return self._distributed_wait_for_result()
def _distributed_wait_for_result(self):
cc = _ConditionChecker(self, self.streams_connection, self.submission_result)
# Wait for the job to be healthy before calling the local check.
if cc._wait_for_healthy():
self._start_local_check()
self.result = cc._complete()
if self.local_check is not None:
self._local_thread.join()
else:
self.result = cc._end(False, _ConditionChecker._UNHEALTHY)
self.result['submission_result'] = self.submission_result
cc._canceljob(self.result)
if self.local_check_exception is not None:
raise self.local_check_exception
return self.result['passed']
def _start_local_check(self):
self.local_check_exception = None
if self.local_check is None:
return
self._local_thread = threading.Thread(target=self._call_local_check)
self._local_thread.start()
def _call_local_check(self):
try:
self.local_check_value = self.local_check()
except Exception as e:
self.local_check_value = None
self.local_check_exception = e
#######################################
# Internal functions
#######################################
def _result_to_dict(passed, t):
result = {}
result['passed'] = passed
result['valid'] = t[0]
result['fail'] = t[1]
result['progress'] = t[2]
result['conditions'] = t[3]
return result
class _ConditionChecker(object):
_UNHEALTHY = (False, False, False, None)
def __init__(self, tester, sc, sjr):
self.tester = tester
self._sc = sc
self._sjr = sjr
self._instance_id = sjr['instanceId']
self._job_id = sjr['jobId']
self._sequences = {}
for cn in tester._conditions:
self._sequences[cn] = -1
self.delay = 0.5
self.timeout = 10.0
self.waits = 0
self.additional_checks = 2
self.job = self._find_job()
# Wait for job to be healthy. Returns True
# if the job became healthy, False if not.
def _wait_for_healthy(self):
while (self.waits * self.delay) < self.timeout:
if self.__check_job_health():
self.waits = 0
return True
time.sleep(self.delay)
self.waits += 1
return False
def _complete(self):
while (self.waits * self.delay) < self.timeout:
check = self. __check_once()
if check[1]:
return self._end(False, check)
if check[0]:
if self.additional_checks == 0:
return self._end(True, check)
self.additional_checks -= 1
continue
if check[2]:
self.waits = 0
else:
self.waits += 1
time.sleep(self.delay)
return self._end(False, check)
def _end(self, passed, check):
result = _result_to_dict(passed, check)
return result
def _canceljob(self, result):
if self.job is not None:
self.job.cancel(force=not result['passed'])
def __check_once(self):
if not self.__check_job_health():
return _ConditionChecker._UNHEALTHY
cms = self._get_job_metrics()
valid = True
progress = True
fail = False
condition_states = {}
for cn in self._sequences:
condition_states[cn] = 'NotValid'
seq_mn = sttrt.Condition._mn('seq', cn)
# If the metrics are missing then the operator
# is probably still starting up, cannot be valid.
if not seq_mn in cms:
valid = False
continue
seq_m = cms[seq_mn]
if seq_m.value == self._sequences[cn]:
progress = False
else:
self._sequences[cn] = seq_m.value
fail_mn = sttrt.Condition._mn('fail', cn)
if not fail_mn in cms:
valid = False
continue
fail_m = cms[fail_mn]
if fail_m.value != 0:
fail = True
condition_states[cn] = 'Fail'
continue
valid_mn = sttrt.Condition._mn('valid', cn)
if not valid_mn in cms:
valid = False
continue
valid_m = cms[valid_mn]
if valid_m.value == 0:
valid = False
else:
condition_states[cn] = 'Valid'
return (valid, fail, progress, condition_states)
def __check_job_health(self):
self.job.refresh()
return self.job.health == 'healthy'
def _find_job(self):
instance = self._sc.get_instance(id=self._instance_id)
return instance.get_job(id=self._job_id)
def _get_job_metrics(self):
"""Fetch all the condition metrics for a job.
We refetch the metrics each time to ensure that we don't miss
any being added, e.g. if an operator is slow to start.
"""
cms = {}
for op in self.job.get_operators():
metrics = op.get_metrics(name=sttrt.Condition._METRIC_PREFIX + '*')
for m in metrics:
cms[m.name] = m
return cms
| 39.740061 | 133 | 0.634052 | 3,201 | 25,990 | 5.036239 | 0.147454 | 0.016128 | 0.017059 | 0.005583 | 0.283791 | 0.233608 | 0.190001 | 0.168042 | 0.138205 | 0.128094 | 0 | 0.004119 | 0.29015 | 25,990 | 653 | 134 | 39.800919 | 0.869695 | 0.460908 | 0 | 0.208029 | 0 | 0 | 0.112797 | 0.008028 | 0 | 0 | 0 | 0 | 0.010949 | 1 | 0.094891 | false | 0.065693 | 0.043796 | 0 | 0.237226 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
729a80929d56b3febe54ea0a4bcd62e2fff44b08 | 6,519 | py | Python | Beheer/tests.py | RamonvdW/nhb-apps | 5a9f840bfe066cd964174515c06b806a7b170c69 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-12-22T13:11:12.000Z | 2021-12-22T13:11:12.000Z | Beheer/tests.py | RamonvdW/nhb-apps | 5a9f840bfe066cd964174515c06b806a7b170c69 | [
"BSD-3-Clause-Clear"
] | 9 | 2020-10-28T07:07:05.000Z | 2021-06-28T20:05:37.000Z | Beheer/tests.py | RamonvdW/nhb-apps | 5a9f840bfe066cd964174515c06b806a7b170c69 | [
"BSD-3-Clause-Clear"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2020-2021 Ramon van der Winkel.
# All rights reserved.
# Licensed under BSD-3-Clause-Clear. See LICENSE file for details.
from django.conf import settings
from django.test import TestCase
from django.urls import reverse
from TestHelpers.e2ehelpers import E2EHelpers
# updaten met dit commando:
# for x in `./manage.py show_urls --settings=nhbapps.settings_dev | rev | cut -d'/' -f2- | rev | grep '/beheer/'`; do echo "'$x/',"; done | grep -vE ':object_id>/|/add/|/autocomplete/'
BEHEER_PAGINAS = (
'/beheer/Account/account/',
'/beheer/Account/accountemail/',
'/beheer/BasisTypen/boogtype/',
'/beheer/BasisTypen/indivwedstrijdklasse/',
'/beheer/BasisTypen/kalenderwedstrijdklasse/',
'/beheer/BasisTypen/leeftijdsklasse/',
'/beheer/BasisTypen/teamtype/',
'/beheer/BasisTypen/teamwedstrijdklasse/',
'/beheer/Competitie/competitie/',
'/beheer/Competitie/competitieklasse/',
'/beheer/Competitie/competitiemutatie/',
'/beheer/Competitie/deelcompetitie/',
'/beheer/Competitie/deelcompetitieklasselimiet/',
'/beheer/Competitie/deelcompetitieronde/',
'/beheer/Competitie/kampioenschapschutterboog/',
'/beheer/Competitie/regiocompetitierondeteam/',
'/beheer/Competitie/regiocompetitieschutterboog/',
'/beheer/Competitie/regiocompetitieteam/',
'/beheer/Competitie/regiocompetitieteampoule/',
'/beheer/Functie/functie/',
'/beheer/Functie/verklaringhanterenpersoonsgegevens/',
'/beheer/HistComp/histcompetitie/',
'/beheer/HistComp/histcompetitieindividueel/',
'/beheer/HistComp/histcompetitieteam/',
'/beheer/Kalender/kalenderwedstrijd/',
'/beheer/Kalender/kalenderwedstrijddeeluitslag/',
'/beheer/Kalender/kalenderwedstrijdsessie/',
'/beheer/Logboek/logboekregel/',
'/beheer/Mailer/mailqueue/',
'/beheer/NhbStructuur/nhbcluster/',
'/beheer/NhbStructuur/nhbrayon/',
'/beheer/NhbStructuur/nhbregio/',
'/beheer/NhbStructuur/nhbvereniging/',
'/beheer/NhbStructuur/speelsterkte/',
'/beheer/Overig/sitefeedback/',
'/beheer/Overig/sitetijdelijkeurl/',
'/beheer/Records/besteindivrecords/',
'/beheer/Records/indivrecord/',
'/beheer/Score/score/',
'/beheer/Score/scorehist/',
'/beheer/Sporter/sporter/',
'/beheer/Sporter/sporterboog/',
'/beheer/Sporter/sportervoorkeuren/',
'/beheer/Taken/taak/',
'/beheer/Wedstrijden/competitiewedstrijd/',
'/beheer/Wedstrijden/competitiewedstrijdenplan/',
'/beheer/Wedstrijden/competitiewedstrijduitslag/',
'/beheer/Wedstrijden/wedstrijdlocatie/',
'/beheer/auth/group/',
'/beheer/jsi18n/',
'/beheer/login/',
'/beheer/logout/',
'/beheer/password_change/',
)
class TestBeheer(E2EHelpers, TestCase):
""" unit tests voor de Beheer applicatie """
def setUp(self):
""" initialisatie van de test case """
self.account_admin = self.e2e_create_account_admin()
def test_login(self):
# controleer dat de admin login vervangen is door een redirect naar onze eigen login
url = reverse('admin:login') # interne url
self.assertEqual(url, '/beheer/login/')
self.e2e_logout()
with self.assert_max_queries(20):
resp = self.client.get('/beheer/login/', follow=True)
self.assertEqual(resp.redirect_chain[-1], ('/account/login/', 302))
with self.assert_max_queries(20):
resp = self.client.get('/beheer/login/?next=/records/', follow=True)
self.assertEqual(resp.redirect_chain[-1], ('/account/login/?next=/records/', 302))
self.e2e_assert_other_http_commands_not_supported('/beheer/login/')
def test_index(self):
# voordat 2FA verificatie gedaan is
self.e2e_login(self.account_admin)
# redirect naar wissel-van-rol pagina
with self.assert_max_queries(20):
resp = self.client.get('/beheer/', follow=True)
self.assertEqual(resp.redirect_chain[-1], ('/functie/otp-controle/?next=/beheer/', 302))
self.e2e_assert_other_http_commands_not_supported('/beheer/')
# na 2FA verificatie
self.e2e_login_and_pass_otp(self.account_admin)
with self.assert_max_queries(20):
resp = self.client.get('/beheer/', follow=True)
self.assertTrue(len(resp.redirect_chain) == 0)
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assertContains(resp, '<title>Websitebeheer | Django-websitebeheer</title>')
# onnodig via beheer-login naar post-authenticatie pagina
with self.assert_max_queries(20):
resp = self.client.get('/beheer/login/?next=/records/', follow=True)
self.assertEqual(resp.redirect_chain[-1], ('/records/', 302))
# onnodig via beheer-login zonder post-authenticatie pagina
with self.assert_max_queries(20):
resp = self.client.get('/beheer/login/', follow=True)
self.assertEqual(resp.redirect_chain[-1], ('/plein/', 302))
def test_logout(self):
# controleer dat de admin login vervangen is door een redirect naar onze eigen login
url = reverse('admin:logout') # interne url
self.assertEqual(url, '/beheer/logout/')
self.e2e_login_and_pass_otp(self.account_admin)
with self.assert_max_queries(20):
resp = self.client.get('/beheer/logout/', follow=True)
self.assertEqual(resp.redirect_chain[-1], ('/account/logout/', 302))
def test_pw_change(self):
url = reverse('admin:password_change')
self.assertEqual(url, '/beheer/password_change/')
self.e2e_login_and_pass_otp(self.account_admin)
with self.assert_max_queries(20):
resp = self.client.get(url, follow=True)
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assertContains(resp, 'Nieuw wachtwoord')
self.assertEqual(resp.redirect_chain[-1], ('/account/nieuw-wachtwoord/', 302))
def test_queries(self):
# controleer dat alle beheer pagina's het goed doen
settings.DEBUG = True
self.e2e_login_and_pass_otp(self.account_admin)
for url in BEHEER_PAGINAS:
with self.assert_max_queries(20):
self.client.get(url)
with self.assert_max_queries(20):
self.client.get(url + 'add/')
with self.assert_max_queries(20):
self.client.get(url + '1/change/')
# for
settings.DEBUG = False
# end of file
| 39.509091 | 185 | 0.670655 | 710 | 6,519 | 6.038028 | 0.302817 | 0.041987 | 0.035923 | 0.04362 | 0.36739 | 0.365057 | 0.349195 | 0.339865 | 0.3331 | 0.312573 | 0 | 0.017219 | 0.189293 | 6,519 | 164 | 186 | 39.75 | 0.793945 | 0.140206 | 0 | 0.194915 | 0 | 0 | 0.397273 | 0.337341 | 0 | 0 | 0 | 0 | 0.237288 | 1 | 0.050847 | false | 0.059322 | 0.033898 | 0 | 0.09322 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
72a50676744b1429bd199408d3b9fb6111481c1b | 322 | py | Python | desktop/core/ext-py/PyYAML-3.12/tests/lib3/test_all.py | kokosing/hue | 2307f5379a35aae9be871e836432e6f45138b3d9 | [
"Apache-2.0"
] | 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | desktop/core/ext-py/PyYAML-3.12/tests/lib3/test_all.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
] | 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | desktop/core/ext-py/PyYAML-3.12/tests/lib3/test_all.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
] | 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z |
import sys, yaml, test_appliance
def main(args=None):
collections = []
import test_yaml
collections.append(test_yaml)
if yaml.__with_libyaml__:
import test_yaml_ext
collections.append(test_yaml_ext)
return test_appliance.run(collections, args)
if __name__ == '__main__':
main()
| 20.125 | 48 | 0.698758 | 40 | 322 | 5.1 | 0.45 | 0.156863 | 0.137255 | 0.245098 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.214286 | 322 | 15 | 49 | 21.466667 | 0.806324 | 0 | 0 | 0 | 0 | 0 | 0.025 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.272727 | 0 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72b29fc480b17250901de0de9a0fdd2643e31525 | 369 | py | Python | auth0_client/menu/datafiles/scripts/get_active_user_count.py | rubelw/auth0_client | 51e68239babcf7c40e40491d1aaa3f8547a67f63 | [
"MIT"
] | 2 | 2020-10-08T21:42:56.000Z | 2021-03-21T08:17:52.000Z | auth0_client/menu/datafiles/scripts/get_active_user_count.py | rubelw/auth0_client | 51e68239babcf7c40e40491d1aaa3f8547a67f63 | [
"MIT"
] | null | null | null | auth0_client/menu/datafiles/scripts/get_active_user_count.py | rubelw/auth0_client | 51e68239babcf7c40e40491d1aaa3f8547a67f63 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import json
from auth0_client.Auth0Client import Auth0Client
from auth0_client.menu.menu_helper.common import *
from auth0_client.menu.menu_helper.pretty import *
try:
users = {}
client = Auth0Client(auth_config())
results = client.active_users()
print(pretty(results))
except (KeyboardInterrupt, SystemExit):
sys.exit()
| 19.421053 | 50 | 0.742547 | 46 | 369 | 5.804348 | 0.565217 | 0.101124 | 0.168539 | 0.142322 | 0.217228 | 0.217228 | 0 | 0 | 0 | 0 | 0 | 0.019231 | 0.154472 | 369 | 18 | 51 | 20.5 | 0.836538 | 0.054201 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.363636 | 0 | 0.363636 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
72bd4a8996d5c4753f1f31aee9a880c97885b93a | 254 | py | Python | examples/single_message.py | Inrixia/pyais | b50fd4d75c687d71b3c70ee939ac9112cfec991e | [
"MIT"
] | 51 | 2019-10-07T11:26:56.000Z | 2022-03-16T10:45:15.000Z | examples/single_message.py | KingKongOne/pyais | ddee5cc4eb8f01f494c82f7b14bdd55aa393af47 | [
"MIT"
] | 57 | 2019-10-14T07:50:00.000Z | 2022-03-28T06:52:27.000Z | examples/single_message.py | KingKongOne/pyais | ddee5cc4eb8f01f494c82f7b14bdd55aa393af47 | [
"MIT"
] | 31 | 2019-10-13T17:17:56.000Z | 2022-03-26T16:46:54.000Z | from pyais.messages import NMEAMessage
message = NMEAMessage(b"!AIVDM,1,1,,B,15M67FC000G?ufbE`FepT@3n00Sa,0*5C")
print(message.decode())
# or
message = NMEAMessage.from_string("!AIVDM,1,1,,B,15M67FC000G?ufbE`FepT@3n00Sa,0*5C")
print(message.decode())
| 25.4 | 84 | 0.755906 | 39 | 254 | 4.897436 | 0.487179 | 0.188482 | 0.073298 | 0.08377 | 0.565445 | 0.565445 | 0.565445 | 0.565445 | 0.565445 | 0.565445 | 0 | 0.118143 | 0.066929 | 254 | 9 | 85 | 28.222222 | 0.687764 | 0.007874 | 0 | 0.4 | 0 | 0 | 0.376 | 0.376 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.4 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72c1a420d34dd573dce6d90546ddf3cb21473656 | 2,660 | py | Python | tests/bugs/core_4318_test.py | FirebirdSQL/firebird-qa | 96af2def7f905a06f178e2a80a2c8be4a4b44782 | [
"MIT"
] | 1 | 2022-02-05T11:37:13.000Z | 2022-02-05T11:37:13.000Z | tests/bugs/core_4318_test.py | FirebirdSQL/firebird-qa | 96af2def7f905a06f178e2a80a2c8be4a4b44782 | [
"MIT"
] | 1 | 2021-09-03T11:47:00.000Z | 2021-09-03T12:42:10.000Z | tests/bugs/core_4318_test.py | FirebirdSQL/firebird-qa | 96af2def7f905a06f178e2a80a2c8be4a4b44782 | [
"MIT"
] | 1 | 2021-06-30T14:14:16.000Z | 2021-06-30T14:14:16.000Z | #coding:utf-8
#
# id: bugs.core_4318
# title: Regression: Predicates involving PSQL variables/parameters are not pushed inside the aggregation
# decription:
# tracker_id: CORE-4318
# min_versions: ['3.0']
# versions: 3.0
# qmid: None
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 3.0
# resources: None
substitutions_1 = []
init_script_1 = """
recreate table t2 (
id integer not null,
t1_id integer
);
commit;
recreate table t1 (
id integer not null
);
commit;
set term ^;
execute block
as
declare variable i integer = 0;
begin
while (i < 1000) do begin
i = i + 1;
insert into t2(id, t1_id) values(:i, mod(:i, 10));
merge into t1 using (
select mod(:i, 10) as f from rdb$database
) src on t1.id = src.f
when not matched then
insert (id) values(src.f);
end -- while (i < 1000) do begin
end^
set term ;^
commit;
alter table t1 add constraint pk_t1 primary key (id);
alter table t2 add constraint pk_t2 primary key (id);
alter table t2 add constraint fk_t2_ref_t1 foreign key (t1_id) references t1(id);
commit;
"""
db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1)
test_script_1 = """
set explain on;
set planonly;
set term ^;
execute block
returns (
s integer
)
as
declare variable v integer = 1;
begin
with t as (
select t1_id as t1_id, sum(id) as s
from t2
group by 1
)
select s
from t
where t1_id = :v
into :s;
suspend;
end
^
set term ;^
-- In 3.0.0.30837 plan was:
-- Select Expression
-- -> Singularity Check
-- -> Filter
-- -> Aggregate
-- -> Table "T T2" Access By ID
-- -> Index "FK_T2_REF_T1" Scan
-- (i.e. there was NO "Filter" between "Aggregate" and "Table "T T2" Access By ID")
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
Select Expression
-> Singularity Check
-> Filter
-> Aggregate
-> Filter
-> Table "T2" as "T T2" Access By ID
-> Index "FK_T2_REF_T1" Range Scan (full match)
"""
@pytest.mark.version('>=3.0')
def test_1(act_1: Action):
act_1.expected_stdout = expected_stdout_1
act_1.execute()
assert act_1.clean_stdout == act_1.clean_expected_stdout
| 22.931034 | 112 | 0.557895 | 357 | 2,660 | 4.008403 | 0.37535 | 0.025157 | 0.014675 | 0.018868 | 0.194969 | 0.171209 | 0.089448 | 0.089448 | 0.037736 | 0.037736 | 0 | 0.052359 | 0.346617 | 2,660 | 115 | 113 | 23.130435 | 0.771001 | 0.104887 | 0 | 0.25 | 0 | 0.011905 | 0.790629 | 0 | 0 | 0 | 0 | 0 | 0.011905 | 1 | 0.011905 | false | 0 | 0.02381 | 0 | 0.035714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72c23dc2d109c0b3025a3d48b3833415e7515ab1 | 1,686 | py | Python | GHOST.py | RadicalAjay/Ghost_data | b151b0b92d27c3b8454e28d4f037eafb587d7b23 | [
"MIT"
] | 1 | 2020-06-13T11:29:17.000Z | 2020-06-13T11:29:17.000Z | GHOST.py | RadicalAjay/Ghost_data | b151b0b92d27c3b8454e28d4f037eafb587d7b23 | [
"MIT"
] | null | null | null | GHOST.py | RadicalAjay/Ghost_data | b151b0b92d27c3b8454e28d4f037eafb587d7b23 | [
"MIT"
] | null | null | null | #! /usr/bin/python3
# Description: Data_Ghost, concealing data into spaces and tabs making it imperceptable to human eyes.
# Author: Ajay Dyavathi
# Github: Radical Ajay
class Ghost():
def __init__(self, file_name, output_format='txt'):
''' Converts ascii text to spaces and tabs '''
self.file_name = file_name
self.output_format = output_format
def ascii2bin(self, asc):
''' Converting ascii to bianry '''
return ''.join('{:08b}'.format(ord(i)) for i in asc)
def bin2ascii(self, bid):
''' Converting binary to ascii '''
return ''.join(chr(int(bid[i:i + 8], 2)) for i in range(0, len(bid), 8))
def ghost(self, filename):
''' Ghosting data converting it to spaces and tabs '''
with open(filename, 'w') as out_f:
with open(self.file_name, 'r') as in_f:
for in_data in in_f.readlines():
bin_data = self.ascii2bin(in_data)
out_data = bin_data.replace('1', '\t')
out_data = out_data.replace('0', ' ')
out_f.write(out_data)
def unghost(self, in_filename, out_filename):
''' Unghosting data converting back from spaces and tabs to human-readable text '''
with open(out_filename, 'w') as out_f:
with open(in_filename, 'r') as in_f:
for line in in_f.readlines():
line = line.replace('\t', '1')
line = line.replace(' ', '0')
out_f.write(self.bin2ascii(line))
# USAGE:
# ghoster = Ghost('data.txt')
# ghoster.ghost('ghosted.txt')
# ghoster.unghost('ghosted.txt', 'unghosted.txt')
| 33.72 | 102 | 0.577699 | 224 | 1,686 | 4.205357 | 0.352679 | 0.038217 | 0.055202 | 0.031847 | 0.104034 | 0.048832 | 0.048832 | 0 | 0 | 0 | 0 | 0.012542 | 0.290629 | 1,686 | 49 | 103 | 34.408163 | 0.775084 | 0.293594 | 0 | 0 | 0 | 0 | 0.019948 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0 | 0 | 0 | 0.347826 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72c98748f08c6f90f0d9a63c5a27d1f4d96b3af8 | 1,685 | py | Python | tests/_site/myauth/models.py | ahmetdaglarbas/e-commerce | ff190244ccd422b4e08d7672f50709edcbb6ebba | [
"BSD-3-Clause"
] | 2 | 2015-12-11T00:19:15.000Z | 2021-11-14T19:44:42.000Z | tests/_site/myauth/models.py | ahmetdaglarbas/e-commerce | ff190244ccd422b4e08d7672f50709edcbb6ebba | [
"BSD-3-Clause"
] | null | null | null | tests/_site/myauth/models.py | ahmetdaglarbas/e-commerce | ff190244ccd422b4e08d7672f50709edcbb6ebba | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Code will only work with Django >= 1.5. See tests/config.py
import re
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.core import validators
from django.contrib.auth.models import BaseUserManager
from oscar.apps.customer.abstract_models import AbstractUser
class CustomUserManager(BaseUserManager):
def create_user(self, username, email, password):
"""
Creates and saves a User with the given email and password.
"""
if not email:
raise ValueError('Users must have an email address')
user = self.model(
email=CustomUserManager.normalize_email(email),
username=username,
is_active=True,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, email, password):
u = self.create_user(username, email, password=password)
u.is_admin = True
u.is_staff = True
u.save(using=self._db)
return u
class User(AbstractUser):
"""
Custom user based on Oscar's AbstractUser
"""
username = models.CharField(_('username'), max_length=30, unique=True,
help_text=_('Required. 30 characters or fewer. Letters, numbers and '
'@/./+/-/_ characters'),
validators=[
validators.RegexValidator(re.compile('^[\w.@+-]+$'), _('Enter a valid username.'), 'invalid')
])
extra_field = models.CharField(
_('Nobody needs me'), max_length=5, blank=True)
objects = CustomUserManager()
class Meta:
app_label = 'myauth'
| 29.051724 | 105 | 0.636795 | 196 | 1,685 | 5.357143 | 0.540816 | 0.038095 | 0.06 | 0.047619 | 0.04 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00638 | 0.255786 | 1,685 | 57 | 106 | 29.561404 | 0.830941 | 0.109199 | 0 | 0 | 0 | 0 | 0.12115 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0.111111 | 0.166667 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
72cf53ccf7f23461f4563c9f0a973dec0115aebc | 2,235 | py | Python | libhustpass/login.py | naivekun/libhustpass | d8d487e3af996898e4a7b21b924fbf0fc4fbe419 | [
"WTFPL"
] | 26 | 2020-02-18T14:30:30.000Z | 2021-11-30T02:50:37.000Z | libhustpass/login.py | ingdex/libhustpass | d8d487e3af996898e4a7b21b924fbf0fc4fbe419 | [
"WTFPL"
] | 3 | 2020-05-01T20:26:42.000Z | 2020-12-30T07:03:10.000Z | libhustpass/login.py | ingdex/libhustpass | d8d487e3af996898e4a7b21b924fbf0fc4fbe419 | [
"WTFPL"
] | 6 | 2020-02-18T14:33:39.000Z | 2022-01-28T11:09:25.000Z | import libhustpass.sbDes as sbDes
import libhustpass.captcha as captcha
import requests
import re
import random
def toWideChar(data):
data_bytes = bytes(data, encoding="utf-8")
ret = []
for i in data_bytes:
ret.extend([0, i])
while len(ret) % 8 != 0:
ret.append(0)
return ret
def Enc(data, first_key, second_key, third_key):
data_bytes = toWideChar(data)
key1_bytes = toWideChar(first_key)
key2_bytes = toWideChar(second_key)
key3_bytes = toWideChar(third_key)
ret_ = []
i = 0
while i < len(data_bytes):
tmp = data_bytes[i : i + 8]
x = 0
y = 0
z = 0
while x < len(key1_bytes):
enc1_ = sbDes.des(key1_bytes[x : x + 8], sbDes.ECB)
tmp = list(enc1_.encrypt(tmp))
x += 8
while y < len(key2_bytes):
enc2_ = sbDes.des(key2_bytes[y : y + 8], sbDes.ECB)
tmp = list(enc2_.encrypt(tmp))
y += 8
while z < len(key3_bytes):
enc3_ = sbDes.des(key3_bytes[z : z + 8], sbDes.ECB)
tmp = list(enc3_.encrypt(tmp))
z += 8
ret_.extend(tmp)
i += 8
ret = ""
for i in ret_:
ret += "%02X" % i
return ret
def login(username, password, url):
r = requests.session()
login_html = r.get(url)
captcha_content = r.get("https://pass.hust.edu.cn/cas/code?"+str(random.random()), stream=True)
captcha_content.raw.decode_content = True
nonce = re.search(
'<input type="hidden" id="lt" name="lt" value="(.*)" />', login_html.text
).group(1)
action = re.search(
'<form id="loginForm" action="(.*)" method="post">', login_html.text
).group(1)
post_params = {
"code": captcha.deCaptcha(captcha_content.raw),
"rsa": Enc(username + password + nonce, "1", "2", "3"),
"ul": len(username),
"pl": len(password),
"lt": nonce,
"execution": "e1s1",
"_eventId": "submit",
}
redirect_html = r.post(
"https://pass.hust.edu.cn" + action, data=post_params, allow_redirects=False
)
try:
return redirect_html.headers["Location"]
except:
raise Exception("login failed")
| 28.653846 | 99 | 0.561521 | 295 | 2,235 | 4.111864 | 0.355932 | 0.037098 | 0.022259 | 0.029678 | 0.117065 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025933 | 0.292617 | 2,235 | 77 | 100 | 29.025974 | 0.741303 | 0 | 0 | 0.057143 | 0 | 0 | 0.104251 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042857 | false | 0.1 | 0.071429 | 0 | 0.157143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
72e1010bc4f2ebd173a6efd489e56ee4ea6793c8 | 1,228 | py | Python | problems/p009.py | davisschenk/project-euler-python | 1375412e6c8199ab02250bd67223c758d4df1725 | [
"MIT"
] | null | null | null | problems/p009.py | davisschenk/project-euler-python | 1375412e6c8199ab02250bd67223c758d4df1725 | [
"MIT"
] | null | null | null | problems/p009.py | davisschenk/project-euler-python | 1375412e6c8199ab02250bd67223c758d4df1725 | [
"MIT"
] | 2 | 2020-10-08T23:35:03.000Z | 2020-10-09T00:28:36.000Z | from math import ceil, sqrt
from problem import Problem
from utils.math import gcd
class PythagoreanTriplet(Problem, name="Special Pythagorean triplet", expected=31875000):
@Problem.solution()
def brute_force(self, ts=1000):
for a in range(3, round((ts - 3) / 2)):
for b in range(a + 1, round((ts - 1 - a) / 2)):
c = ts - a - b
if c * c == a * a + b * b:
return a * b * c
@Problem.solution()
def parametrisation(self, ts=1000):
s2 = ts / 2
mlimit = ceil(sqrt(s2)) - 1
for m in range(2, mlimit):
if s2 % m == 0:
sm = s2 / m
while sm % 2 == 0:
sm /= 2
if m % 2 == 1:
k = m + 2
else:
k = m + 1
while k < 2 * m and k <= sm:
if sm % k == 0 and gcd(k, m) == 1:
d = s2 / (k * m)
n = k - m
a = d * (m * m - n * n)
b = 2 * d * m * n
c = d * (m * m + n * n)
return a * b * c
k += 2
| 29.238095 | 89 | 0.35342 | 157 | 1,228 | 2.757962 | 0.299363 | 0.023095 | 0.083141 | 0.04157 | 0.023095 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075044 | 0.533388 | 1,228 | 41 | 90 | 29.95122 | 0.680628 | 0 | 0 | 0.121212 | 0 | 0 | 0.021987 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.060606 | false | 0 | 0.090909 | 0 | 0.242424 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72ea2c27713d0d21a3c0d65d78528e65b46ecc6c | 61,742 | py | Python | baseCli.py | eym55/mango-client-python | 2cb1ce77d785343c24ecba913eaa9693c3db1181 | [
"MIT"
] | null | null | null | baseCli.py | eym55/mango-client-python | 2cb1ce77d785343c24ecba913eaa9693c3db1181 | [
"MIT"
] | null | null | null | baseCli.py | eym55/mango-client-python | 2cb1ce77d785343c24ecba913eaa9693c3db1181 | [
"MIT"
] | null | null | null | import abc
import datetime
import enum
import logging
import time
import typing
import aysncio
import Layout as layouts
from decimal import Decimal
from pyserum.market import Market
from pyserum.open_orders_account import OpenOrdersAccount
from solana.account import Account
from solana.publickey import PublicKey
from solana.rpc.commitment import Single
from solana.rpc.types import MemcmpOpts, TokenAccountOpts, RPCMethod, RPCResponse
from spl.token.client import Token as SplToken
from spl.token.constants import TOKEN_PROGRAM_ID
from Constants import NUM_MARKETS, NUM_TOKENS, SOL_DECIMALS, SYSTEM_PROGRAM_ADDRESS, MAX_RATE,OPTIMAL_RATE,OPTIMAL_UTIL
from Context import Context
from Decoder import decode_binary, encode_binary, encode_key
class Version(enum.Enum):
UNSPECIFIED = 0
V1 = 1
V2 = 2
V3 = 3
V4 = 4
V5 = 5
class InstructionType(enum.IntEnum):
InitMangoGroup = 0
InitMarginAccount = 1
Deposit = 2
Withdraw = 3
Borrow = 4
SettleBorrow = 5
Liquidate = 6
DepositSrm = 7
WithdrawSrm = 8
PlaceOrder = 9
SettleFunds = 10
CancelOrder = 11
CancelOrderByClientId = 12
ChangeBorrowLimit = 13
PlaceAndSettle = 14
ForceCancelOrders = 15
PartialLiquidate = 16
def __str__(self):
return self.name
class AccountInfo:
def __init__(self, address: PublicKey, executable: bool, lamports: Decimal, owner: PublicKey, rent_epoch: Decimal, data: bytes):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.address: PublicKey = address
self.executable: bool = executable
self.lamports: Decimal = lamports
self.owner: PublicKey = owner
self.rent_epoch: Decimal = rent_epoch
self.data: bytes = data
def encoded_data(self) -> typing.List:
return encode_binary(self.data)
def __str__(self) -> str:
return f"""« AccountInfo [{self.address}]:
Owner: {self.owner}
Executable: {self.executable}
Lamports: {self.lamports}
Rent Epoch: {self.rent_epoch}
»"""
def __repr__(self) -> str:
return f"{self}"
@staticmethod
async def load(context: Context, address: PublicKey) -> typing.Optional["AccountInfo"]:
response: RPCResponse = context.client.get_account_info(address)
result = context.unwrap_or_raise_exception(response)
if result["value"] is None:
return None
return AccountInfo._from_response_values(result["value"], address)
@staticmethod
async def load_multiple(context: Context, addresses: typing.List[PublicKey]) -> typing.List["AccountInfo"]:
address_strings = list(map(PublicKey.__str__, addresses))
response = await context.client._provider.make_request(RPCMethod("getMultipleAccounts"), address_strings)
response_value_list = zip(response["result"]["value"], addresses)
return list(map(lambda pair: AccountInfo._from_response_values(pair[0], pair[1]), response_value_list))
@staticmethod
def _from_response_values(response_values: typing.Dict[str, typing.Any], address: PublicKey) -> "AccountInfo":
executable = bool(response_values["executable"])
lamports = Decimal(response_values["lamports"])
owner = PublicKey(response_values["owner"])
rent_epoch = Decimal(response_values["rentEpoch"])
data = decode_binary(response_values["data"])
return AccountInfo(address, executable, lamports, owner, rent_epoch, data)
@staticmethod
def from_response(response: RPCResponse, address: PublicKey) -> "AccountInfo":
return AccountInfo._from_response_values(response["result"]["value"], address)
class AddressableAccount(metaclass=abc.ABCMeta):
def __init__(self, account_info: AccountInfo):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.account_info = account_info
@property
def address(self) -> PublicKey:
return self.account_info.address
def __repr__(self) -> str:
return f"{self}"
class SerumAccountFlags:
def __init__(self, version: Version, initialized: bool, market: bool, open_orders: bool,
request_queue: bool, event_queue: bool, bids: bool, asks: bool, disabled: bool):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.version: Version = version
self.initialized = initialized
self.market = market
self.open_orders = open_orders
self.request_queue = request_queue
self.event_queue = event_queue
self.bids = bids
self.asks = asks
self.disabled = disabled
@staticmethod
def from_layout(layout: layouts.SERUM_ACCOUNT_FLAGS) -> "SerumAccountFlags":
return SerumAccountFlags(Version.UNSPECIFIED, layout.initialized, layout.market,
layout.open_orders, layout.request_queue, layout.event_queue,
layout.bids, layout.asks, layout.disabled)
def __str__(self) -> str:
flags: typing.List[typing.Optional[str]] = []
flags += ["initialized" if self.initialized else None]
flags += ["market" if self.market else None]
flags += ["open_orders" if self.open_orders else None]
flags += ["request_queue" if self.request_queue else None]
flags += ["event_queue" if self.event_queue else None]
flags += ["bids" if self.bids else None]
flags += ["asks" if self.asks else None]
flags += ["disabled" if self.disabled else None]
flag_text = " | ".join(flag for flag in flags if flag is not None) or "None"
return f"« SerumAccountFlags: {flag_text} »"
def __repr__(self) -> str:
return f"{self}"
class MangoAccountFlags:
def __init__(self, version: Version, initialized: bool, group: bool, margin_account: bool, srm_account: bool):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.version: Version = version
self.initialized = initialized
self.group = group
self.margin_account = margin_account
self.srm_account = srm_account
@staticmethod
def from_layout(layout: layouts.MANGO_ACCOUNT_FLAGS) -> "MangoAccountFlags":
return MangoAccountFlags(Version.UNSPECIFIED, layout.initialized, layout.group, layout.margin_account,
layout.srm_account)
def __str__(self) -> str:
flags: typing.List[typing.Optional[str]] = []
flags += ["initialized" if self.initialized else None]
flags += ["group" if self.group else None]
flags += ["margin_account" if self.margin_account else None]
flags += ["srm_account" if self.srm_account else None]
flag_text = " | ".join(flag for flag in flags if flag is not None) or "None"
return f"« MangoAccountFlags: {flag_text} »"
def __repr__(self) -> str:
return f"{self}"
class Index:
def __init__(self, version: Version, last_update: datetime.datetime, borrow: Decimal, deposit: Decimal):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.version: Version = version
self.last_update: datetime.datetime = last_update
self.borrow: Decimal = borrow
self.deposit: Decimal = deposit
@staticmethod
def from_layout(layout: layouts.INDEX, decimals: Decimal) -> "Index":
borrow = layout.borrow / Decimal(10 ** decimals)
deposit = layout.deposit / Decimal(10 ** decimals)
return Index(Version.UNSPECIFIED, layout.last_update, borrow, deposit)
def __str__(self) -> str:
return f"« Index: Borrow: {self.borrow:,.8f}, Deposit: {self.deposit:,.8f} [last update: {self.last_update}] »"
def __repr__(self) -> str:
return f"{self}"
class AggregatorConfig:
def __init__(self, version: Version, description: str, decimals: Decimal, restart_delay: Decimal,
max_submissions: Decimal, min_submissions: Decimal, reward_amount: Decimal,
reward_token_account: PublicKey):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.version: Version = version
self.description: str = description
self.decimals: Decimal = decimals
self.restart_delay: Decimal = restart_delay
self.max_submissions: Decimal = max_submissions
self.min_submissions: Decimal = min_submissions
self.reward_amount: Decimal = reward_amount
self.reward_token_account: PublicKey = reward_token_account
@staticmethod
def from_layout(layout: layouts.AGGREGATOR_CONFIG) -> "AggregatorConfig":
return AggregatorConfig(Version.UNSPECIFIED, layout.description, layout.decimals,
layout.restart_delay, layout.max_submissions, layout.min_submissions,
layout.reward_amount, layout.reward_token_account)
def __str__(self) -> str:
return f"« AggregatorConfig: '{self.description}', Decimals: {self.decimals} [restart delay: {self.restart_delay}], Max: {self.max_submissions}, Min: {self.min_submissions}, Reward: {self.reward_amount}, Reward Account: {self.reward_token_account} »"
def __repr__(self) -> str:
return f"{self}"
class Round:
def __init__(self, version: Version, id: Decimal, created_at: datetime.datetime, updated_at: datetime.datetime):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.version: Version = version
self.id: Decimal = id
self.created_at: datetime.datetime = created_at
self.updated_at: datetime.datetime = updated_at
@staticmethod
def from_layout(layout: layouts.ROUND) -> "Round":
return Round(Version.UNSPECIFIED, layout.id, layout.created_at, layout.updated_at)
def __str__(self) -> str:
return f"« Round[{self.id}], Created: {self.updated_at}, Updated: {self.updated_at} »"
def __repr__(self) -> str:
return f"{self}"
class Answer:
def __init__(self, version: Version, round_id: Decimal, median: Decimal, created_at: datetime.datetime, updated_at: datetime.datetime):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.version: Version = version
self.round_id: Decimal = round_id
self.median: Decimal = median
self.created_at: datetime.datetime = created_at
self.updated_at: datetime.datetime = updated_at
@staticmethod
def from_layout(layout: layouts.ANSWER) -> "Answer":
return Answer(Version.UNSPECIFIED, layout.round_id, layout.median, layout.created_at, layout.updated_at)
def __str__(self) -> str:
return f"« Answer: Round[{self.round_id}], Median: {self.median:,.8f}, Created: {self.updated_at}, Updated: {self.updated_at} »"
def __repr__(self) -> str:
return f"{self}"
class Aggregator(AddressableAccount):
def __init__(self, account_info: AccountInfo, version: Version, config: AggregatorConfig,
initialized: bool, name: str, owner: PublicKey, round_: Round,
round_submissions: PublicKey, answer: Answer, answer_submissions: PublicKey):
super().__init__(account_info)
self.version: Version = version
self.config: AggregatorConfig = config
self.initialized: bool = initialized
self.name: str = name
self.owner: PublicKey = owner
self.round: Round = round_
self.round_submissions: PublicKey = round_submissions
self.answer: Answer = answer
self.answer_submissions: PublicKey = answer_submissions
@property
def price(self) -> Decimal:
return self.answer.median / (10 ** self.config.decimals)
@staticmethod
def from_layout(layout: layouts.AGGREGATOR, account_info: AccountInfo, name: str) -> "Aggregator":
config = AggregatorConfig.from_layout(layout.config)
initialized = bool(layout.initialized)
round_ = Round.from_layout(layout.round)
answer = Answer.from_layout(layout.answer)
return Aggregator(account_info, Version.UNSPECIFIED, config, initialized, name, layout.owner,
round_, layout.round_submissions, answer, layout.answer_submissions)
@staticmethod
def parse(context: Context, account_info: AccountInfo) -> "Aggregator":
data = account_info.data
if len(data) != layouts.AGGREGATOR.sizeof():
raise Exception(f"Data length ({len(data)}) does not match expected size ({layouts.AGGREGATOR.sizeof()})")
name = context.lookup_oracle_name(account_info.address)
layout = layouts.AGGREGATOR.parse(data)
return Aggregator.from_layout(layout, account_info, name)
@staticmethod
def load(context: Context, account_address: PublicKey):
account_info = AccountInfo.load(context, account_address)
if account_info is None:
raise Exception(f"Aggregator account not found at address '{account_address}'")
return Aggregator.parse(context, account_info)
def __str__(self) -> str:
return f"""
« Aggregator '{self.name}' [{self.version}]:
Config: {self.config}
Initialized: {self.initialized}
Owner: {self.owner}
Round: {self.round}
Round Submissions: {self.round_submissions}
Answer: {self.answer}
Answer Submissions: {self.answer_submissions}
»
"""
class Token:
def __init__(self, name: str, mint: PublicKey, decimals: Decimal):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.name: str = name.upper()
self.mint: PublicKey = mint
self.decimals: Decimal = decimals
def round(self, value: Decimal) -> Decimal:
rounded = round(value, int(self.decimals))
return Decimal(rounded)
def name_matches(self, name: str) -> bool:
return self.name.upper() == name.upper()
@staticmethod
def find_by_name(values: typing.List["Token"], name: str) -> "Token":
found = [value for value in values if value.name_matches(name)]
if len(found) == 0:
raise Exception(f"Token '{name}' not found in token values: {values}")
if len(found) > 1:
raise Exception(f"Token '{name}' matched multiple tokens in values: {values}")
return found[0]
@staticmethod
def find_by_mint(values: typing.List["Token"], mint: PublicKey) -> "Token":
found = [value for value in values if value.mint == mint]
if len(found) == 0:
raise Exception(f"Token '{mint}' not found in token values: {values}")
if len(found) > 1:
raise Exception(f"Token '{mint}' matched multiple tokens in values: {values}")
return found[0]
# TokenMetadatas are equal if they have the same mint address.
def __eq__(self, other):
if hasattr(other, 'mint'):
return self.mint == other.mint
return False
def __str__(self) -> str:
return f"« Token '{self.name}' [{self.mint} ({self.decimals} decimals)] »"
def __repr__(self) -> str:
return f"{self}"
SolToken = Token("SOL", SYSTEM_PROGRAM_ADDRESS, SOL_DECIMALS)
class TokenLookup:
@staticmethod
def find_by_name(context: Context, name: str) -> Token:
if SolToken.name_matches(name):
return SolToken
mint = context.lookup_token_address(name)
if mint is None:
raise Exception(f"Could not find token with name '{name}'.")
return Token(name, mint, Decimal(6))
@staticmethod
def find_by_mint(context: Context, mint: PublicKey) -> Token:
if SolToken.mint == mint:
return SolToken
name = context.lookup_token_name(mint)
if name is None:
raise Exception(f"Could not find token with mint '{mint}'.")
return Token(name, mint, Decimal(6))
class BasketToken:
def __init__(self, token: Token, vault: PublicKey, index: Index):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.token: Token = token
self.vault: PublicKey = vault
self.index: Index = index
@staticmethod
def find_by_name(values: typing.List["BasketToken"], name: str) -> "BasketToken":
found = [value for value in values if value.token.name_matches(name)]
if len(found) == 0:
raise Exception(f"Token '{name}' not found in token values: {values}")
if len(found) > 1:
raise Exception(f"Token '{name}' matched multiple tokens in values: {values}")
return found[0]
@staticmethod
def find_by_mint(values: typing.List["BasketToken"], mint: PublicKey) -> "BasketToken":
found = [value for value in values if value.token.mint == mint]
if len(found) == 0:
raise Exception(f"Token '{mint}' not found in token values: {values}")
if len(found) > 1:
raise Exception(f"Token '{mint}' matched multiple tokens in values: {values}")
return found[0]
@staticmethod
def find_by_token(values: typing.List["BasketToken"], token: Token) -> "BasketToken":
return BasketToken.find_by_mint(values, token.mint)
# BasketTokens are equal if they have the same underlying token.
def __eq__(self, other):
if hasattr(other, 'token'):
return self.token == other.token
return False
def __str__(self) -> str:
return f"""« BasketToken [{self.token}]:
Vault: {self.vault}
Index: {self.index}
»"""
def __repr__(self) -> str:
return f"{self}"
class TokenValue:
def __init__(self, token: Token, value: Decimal):
self.token = token
self.value = value
@staticmethod
async def fetch_total_value_or_none(context: Context, account_public_key: PublicKey, token: Token) -> typing.Optional["TokenValue"]:
opts = TokenAccountOpts(mint=token.mint)
token_accounts_response = await context.client.get_token_accounts_by_owner(account_public_key, opts, commitment=context.commitment)
token_accounts = token_accounts_response["result"]["value"]
if len(token_accounts) == 0:
return None
total_value = Decimal(0)
for token_account in token_accounts:
result = await context.client.get_token_account_balance(token_account["pubkey"], commitment=context.commitment)
value = Decimal(result["result"]["value"]["amount"])
decimal_places = result["result"]["value"]["decimals"]
divisor = Decimal(10 ** decimal_places)
total_value += value / divisor
return TokenValue(token, total_value)
@staticmethod
def fetch_total_value(context: Context, account_public_key: PublicKey, token: Token) -> "TokenValue":
value = TokenValue.fetch_total_value_or_none(context, account_public_key, token)
if value is None:
return TokenValue(token, Decimal(0))
return value
@staticmethod
def report(reporter: typing.Callable[[str], None], values: typing.List["TokenValue"]) -> None:
for value in values:
reporter(f"{value.value:>18,.8f} {value.token.name}")
@staticmethod
def find_by_name(values: typing.List["TokenValue"], name: str) -> "TokenValue":
found = [value for value in values if value.token.name_matches(name)]
if len(found) == 0:
raise Exception(f"Token '{name}' not found in token values: {values}")
if len(found) > 1:
raise Exception(f"Token '{name}' matched multiple tokens in values: {values}")
return found[0]
@staticmethod
def find_by_mint(values: typing.List["TokenValue"], mint: PublicKey) -> "TokenValue":
found = [value for value in values if value.token.mint == mint]
if len(found) == 0:
raise Exception(f"Token '{mint}' not found in token values: {values}")
if len(found) > 1:
raise Exception(f"Token '{mint}' matched multiple tokens in values: {values}")
return found[0]
@staticmethod
def find_by_token(values: typing.List["TokenValue"], token: Token) -> "TokenValue":
return TokenValue.find_by_mint(values, token.mint)
@staticmethod
def changes(before: typing.List["TokenValue"], after: typing.List["TokenValue"]) -> typing.List["TokenValue"]:
changes: typing.List[TokenValue] = []
for before_balance in before:
after_balance = TokenValue.find_by_token(after, before_balance.token)
result = TokenValue(before_balance.token, after_balance.value - before_balance.value)
changes += [result]
return changes
def __str__(self) -> str:
return f"« TokenValue: {self.value:>18,.8f} {self.token.name} »"
def __repr__(self) -> str:
return f"{self}"
class OwnedTokenValue:
def __init__(self, owner: PublicKey, token_value: TokenValue):
self.owner = owner
self.token_value = token_value
@staticmethod
def find_by_owner(values: typing.List["OwnedTokenValue"], owner: PublicKey) -> "OwnedTokenValue":
found = [value for value in values if value.owner == owner]
if len(found) == 0:
raise Exception(f"Owner '{owner}' not found in: {values}")
if len(found) > 1:
raise Exception(f"Owner '{owner}' matched multiple tokens in: {values}")
return found[0]
@staticmethod
def changes(before: typing.List["OwnedTokenValue"], after: typing.List["OwnedTokenValue"]) -> typing.List["OwnedTokenValue"]:
changes: typing.List[OwnedTokenValue] = []
for before_value in before:
after_value = OwnedTokenValue.find_by_owner(after, before_value.owner)
token_value = TokenValue(before_value.token_value.token, after_value.token_value.value - before_value.token_value.value)
result = OwnedTokenValue(before_value.owner, token_value)
changes += [result]
return changes
def __str__(self) -> str:
return f"[{self.owner}]: {self.token_value}"
def __repr__(self) -> str:
return f"{self}"
class MarketMetadata:
def __init__(self, name: str, address: PublicKey, base: BasketToken, quote: BasketToken,
spot: PublicKey, oracle: PublicKey, decimals: Decimal):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.name: str = name
self.address: PublicKey = address
self.base: BasketToken = base
self.quote: BasketToken = quote
self.spot: PublicKey = spot
self.oracle: PublicKey = oracle
self.decimals: Decimal = decimals
self._market = None
async def fetch_market(self, context: Context) -> Market:
if self._market is None:
self._market = await Market.load(context.client, self.spot)
return self._market
def __str__(self) -> str:
return f"""« Market '{self.name}' [{self.spot}]:
Base: {self.base}
Quote: {self.quote}
Oracle: {self.oracle} ({self.decimals} decimals)
»"""
def __repr__(self) -> str:
return f"{self}"
class Group(AddressableAccount):
def __init__(self, account_info: AccountInfo, version: Version, context: Context,
account_flags: MangoAccountFlags, basket_tokens: typing.List[BasketToken],
markets: typing.List[MarketMetadata],
signer_nonce: Decimal, signer_key: PublicKey, dex_program_id: PublicKey,
total_deposits: typing.List[Decimal], total_borrows: typing.List[Decimal],
maint_coll_ratio: Decimal, init_coll_ratio: Decimal, srm_vault: PublicKey,
admin: PublicKey, borrow_limits: typing.List[Decimal]):
super().__init__(account_info)
self.version: Version = version
self.context: Context = context
self.account_flags: MangoAccountFlags = account_flags
self.basket_tokens: typing.List[BasketToken] = basket_tokens
self.markets: typing.List[MarketMetadata] = markets
self.signer_nonce: Decimal = signer_nonce
self.signer_key: PublicKey = signer_key
self.dex_program_id: PublicKey = dex_program_id
self.total_deposits: typing.List[Decimal] = total_deposits
self.total_borrows: typing.List[Decimal] = total_borrows
self.maint_coll_ratio: Decimal = maint_coll_ratio
self.init_coll_ratio: Decimal = init_coll_ratio
self.srm_vault: PublicKey = srm_vault
self.admin: PublicKey = admin
self.borrow_limits: typing.List[Decimal] = borrow_limits
self.mint_decimals: typing.List[int] = [token.mint for token in basket_tokens]
@property
def shared_quote_token(self) -> BasketToken:
return self.basket_tokens[-1]
@staticmethod
def from_layout(layout: layouts.GROUP, context: Context, account_info: AccountInfo) -> "Group":
account_flags = MangoAccountFlags.from_layout(layout.account_flags)
indexes = list(map(lambda pair: Index.from_layout(pair[0], pair[1]), zip(layout.indexes, layout.mint_decimals)))
basket_tokens: typing.List[BasketToken] = []
for index in range(NUM_TOKENS):
token_address = layout.tokens[index]
token_name = context.lookup_token_name(token_address)
if token_name is None:
raise Exception(f"Could not find token with mint '{token_address}' in Group.")
token = Token(token_name, token_address, layout.mint_decimals[index])
basket_token = BasketToken(token, layout.vaults[index], indexes[index])
basket_tokens += [basket_token]
markets: typing.List[MarketMetadata] = []
for index in range(NUM_MARKETS):
market_address = layout.spot_markets[index]
market_name = context.lookup_market_name(market_address)
base_name, quote_name = market_name.split("/")
base_token = BasketToken.find_by_name(basket_tokens, base_name)
quote_token = BasketToken.find_by_name(basket_tokens, quote_name)
market = MarketMetadata(market_name, market_address, base_token, quote_token,
layout.spot_markets[index],
layout.oracles[index],
layout.oracle_decimals[index])
markets += [market]
maint_coll_ratio = layout.maint_coll_ratio.quantize(Decimal('.01'))
init_coll_ratio = layout.init_coll_ratio.quantize(Decimal('.01'))
return Group(account_info, Version.UNSPECIFIED, context, account_flags, basket_tokens, markets,
layout.signer_nonce, layout.signer_key, layout.dex_program_id, layout.total_deposits,
layout.total_borrows, maint_coll_ratio, init_coll_ratio, layout.srm_vault,
layout.admin, layout.borrow_limits)
@staticmethod
def parse(context: Context, account_info: AccountInfo) -> "Group":
data = account_info.data
if len(data) != layouts.GROUP.sizeof():
raise Exception(f"Data length ({len(data)}) does not match expected size ({layouts.GROUP.sizeof()})")
layout = layouts.GROUP.parse(data)
return Group.from_layout(layout, context, account_info)
@staticmethod
def load(context: Context):
account_info = AccountInfo.load(context, context.group_id)
if account_info is None:
raise Exception(f"Group account not found at address '{context.group_id}'")
return Group.parse(context, account_info)
#TODO Test this method, implement get_ui_total_borrow,get_ui_total_deposit
def get_deposit_rate(self,token_index: int):
borrow_rate = self.get_borrow_rate(token_index)
total_borrows = self.get_ui_total_borrow(token_index)
total_deposits = self.get_ui_total_deposit(token_index)
if total_deposits == 0 and total_borrows == 0: return 0
elif total_deposits == 0: return MAX_RATE
utilization = total_borrows / total_deposits
return utilization * borrow_rate
#TODO Test this method, implement get_ui_total_borrow, get_ui_total_deposit
def get_borrow_rate(self,token_index: int):
total_borrows = self.get_ui_total_borrow(token_index)
total_deposits = self.get_ui_total_deposit(token_index)
if total_deposits == 0 and total_borrows == 0: return 0
if total_deposits <= total_borrows : return MAX_RATE
utilization = total_borrows / total_deposits
if utilization > OPTIMAL_UTIL:
extra_util = utilization - OPTIMAL_UTIL
slope = (MAX_RATE - OPTIMAL_RATE) / (1 - OPTIMAL_UTIL)
return OPTIMAL_RATE + slope * extra_util
else:
slope = OPTIMAL_RATE / OPTIMAL_UTIL
return slope * utilization
def get_token_index(self, token: Token) -> int:
for index, existing in enumerate(self.basket_tokens):
if existing.token == token:
return index
return -1
def get_prices(self) -> typing.List[TokenValue]:
started_at = time.time()
# Note: we can just load the oracle data in a simpler way, with:
# oracles = map(lambda market: Aggregator.load(self.context, market.oracle), self.markets)
# but that makes a network request for every oracle. We can reduce that to just one request
# if we use AccountInfo.load_multiple() and parse the data ourselves.
#
# This seems to halve the time this function takes.
oracle_addresses = list([market.oracle for market in self.markets])
oracle_account_infos = AccountInfo.load_multiple(self.context, oracle_addresses)
oracles = map(lambda oracle_account_info: Aggregator.parse(self.context, oracle_account_info),
oracle_account_infos)
prices = list(map(lambda oracle: oracle.price, oracles)) + [Decimal(1)]
token_prices = []
for index, price in enumerate(prices):
token_prices += [TokenValue(self.basket_tokens[index].token, price)]
time_taken = time.time() - started_at
self.logger.info(f"Faster fetching prices complete. Time taken: {time_taken:.2f} seconds.")
return token_prices
def fetch_balances(self, root_address: PublicKey) -> typing.List[TokenValue]:
balances: typing.List[TokenValue] = []
sol_balance = self.context.fetch_sol_balance(root_address)
balances += [TokenValue(SolToken, sol_balance)]
for basket_token in self.basket_tokens:
balance = TokenValue.fetch_total_value(self.context, root_address, basket_token.token)
balances += [balance]
return balances
def native_to_ui(self, amount, decimals) -> int:
return amount / (10 ** decimals)
def ui_to_native(self, amount, decimals) -> int:
return amount * (10 ** decimals)
def getUiTotalDeposit(self, tokenIndex: int) -> int:
return Group.ui_to_native(self.totalDeposits[tokenIndex] * self.indexes[tokenIndex].deposit, self.mint_decimals[tokenIndex])
def getUiTotalBorrow(self, tokenIndex: int) -> int:
return Group.native_to_ui(self.totalBorrows[tokenIndex] * self.indexes[tokenIndex].borrow, self.mint_decimals[tokenIndex])
def __str__(self) -> str:
total_deposits = "\n ".join(map(str, self.total_deposits))
total_borrows = "\n ".join(map(str, self.total_borrows))
borrow_limits = "\n ".join(map(str, self.borrow_limits))
return f"""
« Group [{self.version}] {self.address}:
Flags: {self.account_flags}
Tokens:
{self.basket_tokens}
Markets:
{self.markets}
DEX Program ID: « {self.dex_program_id} »
SRM Vault: « {self.srm_vault} »
Admin: « {self.admin} »
Signer Nonce: {self.signer_nonce}
Signer Key: « {self.signer_key} »
Initial Collateral Ratio: {self.init_coll_ratio}
Maintenance Collateral Ratio: {self.maint_coll_ratio}
Total Deposits:
{total_deposits}
Total Borrows:
{total_borrows}
Borrow Limits:
{borrow_limits}
»
"""
class TokenAccount(AddressableAccount):
def __init__(self, account_info: AccountInfo, version: Version, mint: PublicKey, owner: PublicKey, amount: Decimal):
super().__init__(account_info)
self.version: Version = version
self.mint: PublicKey = mint
self.owner: PublicKey = owner
self.amount: Decimal = amount
@staticmethod
def create(context: Context, account: Account, token: Token):
spl_token = await SplToken(context.client, token.mint, TOKEN_PROGRAM_ID, account)
owner = account.public_key()
new_account_address = spl_token.create_account(owner)
return TokenAccount.load(context, new_account_address)
@staticmethod
def fetch_all_for_owner_and_token(context: Context, owner_public_key: PublicKey, token: Token) -> typing.List["TokenAccount"]:
opts = TokenAccountOpts(mint=token.mint)
token_accounts_response = await context.client.get_token_accounts_by_owner(owner_public_key, opts, commitment=context.commitment)
all_accounts: typing.List[TokenAccount] = []
for token_account_response in token_accounts_response["result"]["value"]:
account_info = AccountInfo._from_response_values(token_account_response["account"], PublicKey(token_account_response["pubkey"]))
token_account = TokenAccount.parse(account_info)
all_accounts += [token_account]
return all_accounts
@staticmethod
def fetch_largest_for_owner_and_token(context: Context, owner_public_key: PublicKey, token: Token) -> typing.Optional["TokenAccount"]:
all_accounts = TokenAccount.fetch_all_for_owner_and_token(context, owner_public_key, token)
largest_account: typing.Optional[TokenAccount] = None
for token_account in all_accounts:
if largest_account is None or token_account.amount > largest_account.amount:
largest_account = token_account
return largest_account
@staticmethod
def fetch_or_create_largest_for_owner_and_token(context: Context, account: Account, token: Token) -> "TokenAccount":
all_accounts = TokenAccount.fetch_all_for_owner_and_token(context, account.public_key(), token)
largest_account: typing.Optional[TokenAccount] = None
for token_account in all_accounts:
if largest_account is None or token_account.amount > largest_account.amount:
largest_account = token_account
if largest_account is None:
return TokenAccount.create(context, account, token)
return largest_account
@staticmethod
def from_layout(layout: layouts.TOKEN_ACCOUNT, account_info: AccountInfo) -> "TokenAccount":
return TokenAccount(account_info, Version.UNSPECIFIED, layout.mint, layout.owner, layout.amount)
@staticmethod
def parse(account_info: AccountInfo) -> "TokenAccount":
data = account_info.data
if len(data) != layouts.TOKEN_ACCOUNT.sizeof():
raise Exception(f"Data length ({len(data)}) does not match expected size ({layouts.TOKEN_ACCOUNT.sizeof()})")
layout = layouts.TOKEN_ACCOUNT.parse(data)
return TokenAccount.from_layout(layout, account_info)
@staticmethod
def load(context: Context, address: PublicKey) -> typing.Optional["TokenAccount"]:
account_info = AccountInfo.load(context, address)
if account_info is None or (len(account_info.data) != layouts.TOKEN_ACCOUNT.sizeof()):
return None
return TokenAccount.parse(account_info)
def __str__(self) -> str:
return f"« Token: Mint: {self.mint}, Owner: {self.owner}, Amount: {self.amount} »"
class OpenOrders(AddressableAccount):
def __init__(self, account_info: AccountInfo, version: Version, program_id: PublicKey,
account_flags: SerumAccountFlags, market: PublicKey, owner: PublicKey,
base_token_free: Decimal, base_token_total: Decimal, quote_token_free: Decimal,
quote_token_total: Decimal, free_slot_bits: Decimal, is_bid_bits: Decimal,
orders: typing.List[Decimal], client_ids: typing.List[Decimal],
referrer_rebate_accrued: Decimal):
super().__init__(account_info)
self.version: Version = version
self.program_id: PublicKey = program_id
self.account_flags: SerumAccountFlags = account_flags
self.market: PublicKey = market
self.owner: PublicKey = owner
self.base_token_free: Decimal = base_token_free
self.base_token_total: Decimal = base_token_total
self.quote_token_free: Decimal = quote_token_free
self.quote_token_total: Decimal = quote_token_total
self.free_slot_bits: Decimal = free_slot_bits
self.is_bid_bits: Decimal = is_bid_bits
self.orders: typing.List[Decimal] = orders
self.client_ids: typing.List[Decimal] = client_ids
self.referrer_rebate_accrued: Decimal = referrer_rebate_accrued
# Sometimes pyserum wants to take its own OpenOrdersAccount as a parameter (e.g. in settle_funds())
def to_pyserum(self) -> OpenOrdersAccount:
return OpenOrdersAccount.from_bytes(self.address, self.account_info.data)
@staticmethod
def from_layout(layout: layouts.OPEN_ORDERS, account_info: AccountInfo,
base_decimals: Decimal, quote_decimals: Decimal) -> "OpenOrders":
account_flags = SerumAccountFlags.from_layout(layout.account_flags)
program_id = account_info.owner
base_divisor = 10 ** base_decimals
quote_divisor = 10 ** quote_decimals
base_token_free: Decimal = layout.base_token_free / base_divisor
base_token_total: Decimal = layout.base_token_total / base_divisor
quote_token_free: Decimal = layout.quote_token_free / quote_divisor
quote_token_total: Decimal = layout.quote_token_total / quote_divisor
nonzero_orders: typing.List[Decimal] = list([order for order in layout.orders if order != 0])
nonzero_client_ids: typing.List[Decimal] = list([client_id for client_id in layout.client_ids if client_id != 0])
return OpenOrders(account_info, Version.UNSPECIFIED, program_id, account_flags, layout.market,
layout.owner, base_token_free, base_token_total, quote_token_free, quote_token_total,
layout.free_slot_bits, layout.is_bid_bits, nonzero_orders, nonzero_client_ids,
layout.referrer_rebate_accrued)
@staticmethod
def parse(account_info: AccountInfo, base_decimals: Decimal, quote_decimals: Decimal) -> "OpenOrders":
data = account_info.data
if len(data) != layouts.OPEN_ORDERS.sizeof():
raise Exception(f"Data length ({len(data)}) does not match expected size ({layouts.OPEN_ORDERS.sizeof()})")
layout = layouts.OPEN_ORDERS.parse(data)
return OpenOrders.from_layout(layout, account_info, base_decimals, quote_decimals)
@staticmethod
async def load_raw_open_orders_account_infos(context: Context, group: Group) -> typing.Dict[str, AccountInfo]:
filters = [
MemcmpOpts(
offset=layouts.SERUM_ACCOUNT_FLAGS.sizeof() + 37,
bytes=encode_key(group.signer_key)
)
]
response = await context.client.get_program_accounts(group.dex_program_id, data_size=layouts.OPEN_ORDERS.sizeof(), memcmp_opts=filters, commitment=Single, encoding="base64")
account_infos = list(map(lambda pair: AccountInfo._from_response_values(pair[0], pair[1]), [(result["account"], PublicKey(result["pubkey"])) for result in response["result"]]))
account_infos_by_address = {key: value for key, value in [(str(account_info.address), account_info) for account_info in account_infos]}
return account_infos_by_address
@staticmethod
def load(context: Context, address: PublicKey, base_decimals: Decimal, quote_decimals: Decimal) -> "OpenOrders":
open_orders_account = AccountInfo.load(context, address)
if open_orders_account is None:
raise Exception(f"OpenOrders account not found at address '{address}'")
return OpenOrders.parse(open_orders_account, base_decimals, quote_decimals)
@staticmethod
async def load_for_market_and_owner(context: Context, market: PublicKey, owner: PublicKey, program_id: PublicKey, base_decimals: Decimal, quote_decimals: Decimal):
filters = [
MemcmpOpts(
offset=layouts.SERUM_ACCOUNT_FLAGS.sizeof() + 5,
bytes=encode_key(market)
),
MemcmpOpts(
offset=layouts.SERUM_ACCOUNT_FLAGS.sizeof() + 37,
bytes=encode_key(owner)
)
]
response = await context.client.get_program_accounts(context.dex_program_id, data_size=layouts.OPEN_ORDERS.sizeof(), memcmp_opts=filters, commitment=Single, encoding="base64")
accounts = list(map(lambda pair: AccountInfo._from_response_values(pair[0], pair[1]), [(result["account"], PublicKey(result["pubkey"])) for result in response["result"]]))
return list(map(lambda acc: OpenOrders.parse(acc, base_decimals, quote_decimals), accounts))
def __str__(self) -> str:
orders = ", ".join(map(str, self.orders)) or "None"
client_ids = ", ".join(map(str, self.client_ids)) or "None"
return f"""« OpenOrders:
Flags: {self.account_flags}
Program ID: {self.program_id}
Address: {self.address}
Market: {self.market}
Owner: {self.owner}
Base Token: {self.base_token_free:,.8f} of {self.base_token_total:,.8f}
Quote Token: {self.quote_token_free:,.8f} of {self.quote_token_total:,.8f}
Referrer Rebate Accrued: {self.referrer_rebate_accrued}
Orders:
{orders}
Client IDs:
{client_ids}
»"""
class BalanceSheet:
def __init__(self, token: Token, liabilities: Decimal, settled_assets: Decimal, unsettled_assets: Decimal):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.token: Token = token
self.liabilities: Decimal = liabilities
self.settled_assets: Decimal = settled_assets
self.unsettled_assets: Decimal = unsettled_assets
@property
def assets(self) -> Decimal:
return self.settled_assets + self.unsettled_assets
@property
def value(self) -> Decimal:
return self.assets - self.liabilities
@property
def collateral_ratio(self) -> Decimal:
if self.liabilities == Decimal(0):
return Decimal(0)
return self.assets / self.liabilities
def __str__(self) -> str:
name = "«Unspecified»"
if self.token is not None:
name = self.token.name
return f"""« BalanceSheet [{name}]:
Assets : {self.assets:>18,.8f}
Settled Assets : {self.settled_assets:>18,.8f}
Unsettled Assets : {self.unsettled_assets:>18,.8f}
Liabilities : {self.liabilities:>18,.8f}
Value : {self.value:>18,.8f}
Collateral Ratio : {self.collateral_ratio:>18,.2%}
»
"""
def __repr__(self) -> str:
return f"{self}"
class MarginAccount(AddressableAccount):
def __init__(self, account_info: AccountInfo, version: Version, account_flags: MangoAccountFlags,
mango_group: PublicKey, owner: PublicKey, deposits: typing.List[Decimal],
borrows: typing.List[Decimal], open_orders: typing.List[PublicKey]):
super().__init__(account_info)
self.version: Version = version
self.account_flags: MangoAccountFlags = account_flags
self.mango_group: PublicKey = mango_group
self.owner: PublicKey = owner
self.deposits: typing.List[Decimal] = deposits
self.borrows: typing.List[Decimal] = borrows
self.open_orders: typing.List[PublicKey] = open_orders
self.open_orders_accounts: typing.List[typing.Optional[OpenOrders]] = [None] * NUM_MARKETS
@staticmethod
def from_layout(layout: layouts.MARGIN_ACCOUNT, account_info: AccountInfo) -> "MarginAccount":
account_flags: MangoAccountFlags = MangoAccountFlags.from_layout(layout.account_flags)
deposits: typing.List[Decimal] = []
for index, deposit in enumerate(layout.deposits):
deposits += [deposit]
borrows: typing.List[Decimal] = []
for index, borrow in enumerate(layout.borrows):
borrows += [borrow]
return MarginAccount(account_info, Version.UNSPECIFIED, account_flags, layout.mango_group,
layout.owner, deposits, borrows, list(layout.open_orders))
@staticmethod
def parse(account_info: AccountInfo) -> "MarginAccount":
data = account_info.data
if len(data) != layouts.MARGIN_ACCOUNT.sizeof():
raise Exception(f"Data length ({len(data)}) does not match expected size ({layouts.MARGIN_ACCOUNT.sizeof()})")
layout = layouts.MARGIN_ACCOUNT.parse(data)
return MarginAccount.from_layout(layout, account_info)
@staticmethod
def load(context: Context, margin_account_address: PublicKey, group: typing.Optional[Group] = None) -> "MarginAccount":
account_info = AccountInfo.load(context, margin_account_address)
if account_info is None:
raise Exception(f"MarginAccount account not found at address '{margin_account_address}'")
margin_account = MarginAccount.parse(account_info)
if group is None:
group = Group.load(context)
margin_account.load_open_orders_accounts(context, group)
return margin_account
@staticmethod
def load_all_for_group(context: Context, program_id: PublicKey, group: Group) -> typing.List["MarginAccount"]:
filters = [
MemcmpOpts(
offset=layouts.MANGO_ACCOUNT_FLAGS.sizeof(), # mango_group is just after the MangoAccountFlags, which is the first entry
bytes=encode_key(group.address)
)
]
response = context.client.get_program_accounts(program_id, data_size=layouts.MARGIN_ACCOUNT.sizeof(), memcmp_opts=filters, commitment=Single, encoding="base64")
margin_accounts = []
for margin_account_data in response["result"]:
address = PublicKey(margin_account_data["pubkey"])
account = AccountInfo._from_response_values(margin_account_data["account"], address)
margin_account = MarginAccount.parse(account)
margin_accounts += [margin_account]
return margin_accounts
@staticmethod
def load_all_for_group_with_open_orders(context: Context, program_id: PublicKey, group: Group) -> typing.List["MarginAccount"]:
margin_accounts = MarginAccount.load_all_for_group(context, context.program_id, group)
open_orders = OpenOrders.load_raw_open_orders_account_infos(context, group)
for margin_account in margin_accounts:
margin_account.install_open_orders_accounts(group, open_orders)
return margin_accounts
@staticmethod
def load_all_for_owner(context: Context, owner: PublicKey, group: typing.Optional[Group] = None) -> typing.List["MarginAccount"]:
if group is None:
group = Group.load(context)
mango_group_offset = layouts.MANGO_ACCOUNT_FLAGS.sizeof() # mango_group is just after the MangoAccountFlags, which is the first entry.
owner_offset = mango_group_offset + 32 # owner is just after mango_group in the layout, and it's a PublicKey which is 32 bytes.
filters = [
MemcmpOpts(
offset=mango_group_offset,
bytes=encode_key(group.address)
),
MemcmpOpts(
offset=owner_offset,
bytes=encode_key(owner)
)
]
response = context.client.get_program_accounts(context.program_id, data_size=layouts.MARGIN_ACCOUNT.sizeof(), memcmp_opts=filters, commitment=Single, encoding="base64")
margin_accounts = []
for margin_account_data in response["result"]:
address = PublicKey(margin_account_data["pubkey"])
account = AccountInfo._from_response_values(margin_account_data["account"], address)
margin_account = MarginAccount.parse(account)
margin_account.load_open_orders_accounts(context, group)
margin_accounts += [margin_account]
return margin_accounts
@classmethod
def load_all_ripe(cls, context: Context) -> typing.List["MarginAccount"]:
logger: logging.Logger = logging.getLogger(cls.__name__)
started_at = time.time()
group = Group.load(context)
margin_accounts = MarginAccount.load_all_for_group_with_open_orders(context, context.program_id, group)
logger.info(f"Fetched {len(margin_accounts)} margin accounts to process.")
prices = group.get_prices()
nonzero: typing.List[MarginAccountMetadata] = []
for margin_account in margin_accounts:
balance_sheet = margin_account.get_balance_sheet_totals(group, prices)
if balance_sheet.collateral_ratio > 0:
balances = margin_account.get_intrinsic_balances(group)
nonzero += [MarginAccountMetadata(margin_account, balance_sheet, balances)]
logger.info(f"Of those {len(margin_accounts)}, {len(nonzero)} have a nonzero collateral ratio.")
ripe_metadata = filter(lambda mam: mam.balance_sheet.collateral_ratio <= group.init_coll_ratio, nonzero)
ripe_accounts = list(map(lambda mam: mam.margin_account, ripe_metadata))
logger.info(f"Of those {len(nonzero)}, {len(ripe_accounts)} are ripe 🥭.")
time_taken = time.time() - started_at
logger.info(f"Loading ripe 🥭 accounts complete. Time taken: {time_taken:.2f} seconds.")
return ripe_accounts
def load_open_orders_accounts(self, context: Context, group: Group) -> None:
for index, oo in enumerate(self.open_orders):
key = oo
if key != SYSTEM_PROGRAM_ADDRESS:
self.open_orders_accounts[index] = OpenOrders.load(context, key, group.basket_tokens[index].token.decimals, group.shared_quote_token.token.decimals)
def install_open_orders_accounts(self, group: Group, all_open_orders_by_address: typing.Dict[str, AccountInfo]) -> None:
for index, oo in enumerate(self.open_orders):
key = str(oo)
if key in all_open_orders_by_address:
open_orders_account_info = all_open_orders_by_address[key]
open_orders = OpenOrders.parse(open_orders_account_info,
group.basket_tokens[index].token.decimals,
group.shared_quote_token.token.decimals)
self.open_orders_accounts[index] = open_orders
def get_intrinsic_balance_sheets(self, group: Group) -> typing.List[BalanceSheet]:
settled_assets: typing.List[Decimal] = [Decimal(0)] * NUM_TOKENS
liabilities: typing.List[Decimal] = [Decimal(0)] * NUM_TOKENS
for index in range(NUM_TOKENS):
settled_assets[index] = group.basket_tokens[index].index.deposit * self.deposits[index]
liabilities[index] = group.basket_tokens[index].index.borrow * self.borrows[index]
unsettled_assets: typing.List[Decimal] = [Decimal(0)] * NUM_TOKENS
for index in range(NUM_MARKETS):
open_orders_account = self.open_orders_accounts[index]
if open_orders_account is not None:
unsettled_assets[index] += open_orders_account.base_token_total
unsettled_assets[NUM_TOKENS - 1] += open_orders_account.quote_token_total
balance_sheets: typing.List[BalanceSheet] = []
for index in range(NUM_TOKENS):
balance_sheets += [BalanceSheet(group.basket_tokens[index].token, liabilities[index],
settled_assets[index], unsettled_assets[index])]
return balance_sheets
def get_priced_balance_sheets(self, group: Group, prices: typing.List[TokenValue]) -> typing.List[BalanceSheet]:
priced: typing.List[BalanceSheet] = []
balance_sheets = self.get_intrinsic_balance_sheets(group)
for balance_sheet in balance_sheets:
price = TokenValue.find_by_token(prices, balance_sheet.token)
liabilities = balance_sheet.liabilities * price.value
settled_assets = balance_sheet.settled_assets * price.value
unsettled_assets = balance_sheet.unsettled_assets * price.value
priced += [BalanceSheet(
price.token,
price.token.round(liabilities),
price.token.round(settled_assets),
price.token.round(unsettled_assets)
)]
return priced
def get_balance_sheet_totals(self, group: Group, prices: typing.List[TokenValue]) -> BalanceSheet:
liabilities = Decimal(0)
settled_assets = Decimal(0)
unsettled_assets = Decimal(0)
balance_sheets = self.get_priced_balance_sheets(group, prices)
for balance_sheet in balance_sheets:
if balance_sheet is not None:
liabilities += balance_sheet.liabilities
settled_assets += balance_sheet.settled_assets
unsettled_assets += balance_sheet.unsettled_assets
# A BalanceSheet must have a token - it's a pain to make it a typing.Optional[Token].
# So in this one case, we produce a 'fake' token whose symbol is a summary of all token
# symbols that went into it.
#
# If this becomes more painful than typing.Optional[Token], we can go with making
# Token optional.
summary_name = "-".join([bal.token.name for bal in balance_sheets])
summary_token = Token(summary_name, SYSTEM_PROGRAM_ADDRESS, Decimal(0))
return BalanceSheet(summary_token, liabilities, settled_assets, unsettled_assets)
def get_intrinsic_balances(self, group: Group) -> typing.List[TokenValue]:
balance_sheets = self.get_intrinsic_balance_sheets(group)
balances: typing.List[TokenValue] = []
for index, balance_sheet in enumerate(balance_sheets):
if balance_sheet.token is None:
raise Exception(f"Intrinsic balance sheet with index [{index}] has no token.")
balances += [TokenValue(balance_sheet.token, balance_sheet.value)]
return balances
def __str__(self) -> str:
deposits = ", ".join([f"{item:,.8f}" for item in self.deposits])
borrows = ", ".join([f"{item:,.8f}" for item in self.borrows])
if all(oo is None for oo in self.open_orders_accounts):
open_orders = f"{self.open_orders}"
else:
open_orders_unindented = f"{self.open_orders_accounts}"
open_orders = open_orders_unindented.replace("\n", "\n ")
return f"""« MarginAccount: {self.address}
Flags: {self.account_flags}
Owner: {self.owner}
Mango Group: {self.mango_group}
Deposits: [{deposits}]
Borrows: [{borrows}]
Mango Open Orders: {open_orders}
»"""
class MarginAccountMetadata:
def __init__(self, margin_account: MarginAccount, balance_sheet: BalanceSheet, balances: typing.List[TokenValue]):
self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
self.margin_account = margin_account
self.balance_sheet = balance_sheet
self.balances = balances
@property
def assets(self):
return self.balance_sheet.assets
@property
def liabilities(self):
return self.balance_sheet.liabilities
@property
def collateral_ratio(self):
return self.balance_sheet.collateral_ratio
class LiquidationEvent:
def __init__(self, timestamp: datetime.datetime, signature: str, wallet_address: PublicKey, margin_account_address: PublicKey, balances_before: typing.List[TokenValue], balances_after: typing.List[TokenValue]):
self.timestamp = timestamp
self.signature = signature
self.wallet_address = wallet_address
self.margin_account_address = margin_account_address
self.balances_before = balances_before
self.balances_after = balances_after
def __str__(self) -> str:
changes = TokenValue.changes(self.balances_before, self.balances_after)
changes_text = "\n ".join([f"{change.value:>15,.8f} {change.token.name}" for change in changes])
return f"""« 🥭 Liqudation Event 💧 at {self.timestamp}
📇 Signature: {self.signature}
👛 Wallet: {self.wallet_address}
💳 Margin Account: {self.margin_account_address}
💸 Changes:
{changes_text}
»"""
def __repr__(self) -> str:
return f"{self}"
def _notebook_tests():
log_level = logging.getLogger().level
try:
logging.getLogger().setLevel(logging.CRITICAL)
from Constants import SYSTEM_PROGRAM_ADDRESS
from Context import default_context
balances_before = [
TokenValue(TokenLookup.find_by_name(default_context, "ETH"), Decimal(1)),
TokenValue(TokenLookup.find_by_name(default_context, "BTC"), Decimal("0.1")),
TokenValue(TokenLookup.find_by_name(default_context, "USDT"), Decimal(1000))
]
balances_after = [
TokenValue(TokenLookup.find_by_name(default_context, "ETH"), Decimal(1)),
TokenValue(TokenLookup.find_by_name(default_context, "BTC"), Decimal("0.05")),
TokenValue(TokenLookup.find_by_name(default_context, "USDT"), Decimal(2000))
]
timestamp = datetime.datetime(2021, 5, 17, 12, 20, 56)
event = LiquidationEvent(timestamp, "signature", SYSTEM_PROGRAM_ADDRESS, SYSTEM_PROGRAM_ADDRESS,
balances_before, balances_after)
assert(str(event) == """« 🥭 Liqudation Event 💧 at 2021-05-17 12:20:56
📇 Signature: signature
👛 Wallet: 11111111111111111111111111111111
💳 Margin Account: 11111111111111111111111111111111
💸 Changes:
0.00000000 ETH
-0.05000000 BTC
1,000.00000000 USDT
»""")
finally:
logging.getLogger().setLevel(log_level)
_notebook_tests()
del _notebook_tests
if __name__ == "__main__":
logging.getLogger().setLevel(logging.INFO)
import base64
from Constants import SYSTEM_PROGRAM_ADDRESS
from Context import default_context
# Just use any public key here
fake_public_key = SYSTEM_PROGRAM_ADDRESS
encoded = "AwAAAAAAAACCaOmpoURMK6XHelGTaFawcuQ/78/15LAemWI8jrt3SRKLy2R9i60eclDjuDS8+p/ZhvTUd9G7uQVOYCsR6+BhmqGCiO6EPYP2PQkf/VRTvw7JjXvIjPFJy06QR1Cq1WfTonHl0OjCkyEf60SD07+MFJu5pVWNFGGEO/8AiAYfduaKdnFTaZEHPcK5Eq72WWHeHg2yIbBF09kyeOhlCJwOoG8O5SgpPV8QOA64ZNV4aKroFfADg6kEy/wWCdp3fv0O4GJgAAAAAPH6Ud6jtjwAAQAAAAAAAADiDkkCi9UOAAEAAAAAAAAADuBiYAAAAACNS5bSy7soAAEAAAAAAAAACMvgO+2jCwABAAAAAAAAAA7gYmAAAAAAZFeDUBNVhwABAAAAAAAAABtRNytozC8AAQAAAAAAAABIBGiCcyaEZdNhrTyeqUY692vOzzPdHaxAxguht3JQGlkzjtd05dX9LENHkl2z1XvUbTNKZlweypNRetmH0lmQ9VYQAHqylxZVK65gEg85g27YuSyvOBZAjJyRmYU9KdCO1D+4ehdPu9dQB1yI1uh75wShdAaFn2o4qrMYwq3SQQEAAAAAAAAAAiH1PPJKAuh6oGiE35aGhUQhFi/bxgKOudpFv8HEHNCFDy1uAqR6+CTQmradxC1wyyjL+iSft+5XudJWwSdi7wvphsxb96x7Obj/AgAAAAAKlV4LL5ow6r9LMhIAAAAADvsOtqcVFmChDPzPnwAAAE33lx1h8hPFD04AAAAAAAA8YRV3Oa309B2wGwAAAAAA+yPBZRlZz7b605n+AQAAAACgmZmZmZkZAQAAAAAAAAAAMDMzMzMzMwEAAAAAAAAA25D1XcAtRzSuuyx3U+X7aE9vM1EJySU9KprgL0LMJ/vat9+SEEUZuga7O5tTUrcMDYWDg+LYaAWhSQiN2fYk7aCGAQAAAAAAgIQeAAAAAAAA8gUqAQAAAAYGBgICAAAA"
decoded = base64.b64decode(encoded)
group_account_info = AccountInfo(fake_public_key, False, Decimal(0), fake_public_key, Decimal(0), decoded)
group = Group.parse(default_context, group_account_info)
print("\n\nThis is hard-coded, not live information!")
print(group)
print(TokenLookup.find_by_name(default_context, "ETH"))
print(TokenLookup.find_by_name(default_context, "BTC"))
# USDT
print(TokenLookup.find_by_mint(default_context, PublicKey("Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB")))
single_account_info = AccountInfo.load(default_context, default_context.dex_program_id)
print("DEX account info", single_account_info)
multiple_account_info = AccountInfo.load_multiple(default_context, [default_context.program_id, default_context.dex_program_id])
print("Mango program and DEX account info", multiple_account_info)
balances_before = [
TokenValue(TokenLookup.find_by_name(default_context, "ETH"), Decimal(1)),
TokenValue(TokenLookup.find_by_name(default_context, "BTC"), Decimal("0.1")),
TokenValue(TokenLookup.find_by_name(default_context, "USDT"), Decimal(1000))
]
balances_after = [
TokenValue(TokenLookup.find_by_name(default_context, "ETH"), Decimal(1)),
TokenValue(TokenLookup.find_by_name(default_context, "BTC"), Decimal("0.05")),
TokenValue(TokenLookup.find_by_name(default_context, "USDT"), Decimal(2000))
]
timestamp = datetime.datetime(2021, 5, 17, 12, 20, 56)
event = LiquidationEvent(timestamp, "signature", SYSTEM_PROGRAM_ADDRESS, SYSTEM_PROGRAM_ADDRESS,
balances_before, balances_after)
print(event) | 45.498895 | 1,008 | 0.678064 | 7,149 | 61,742 | 5.620506 | 0.068961 | 0.018666 | 0.008735 | 0.009407 | 0.457878 | 0.378935 | 0.318758 | 0.283418 | 0.243623 | 0.223439 | 0 | 0.00993 | 0.223624 | 61,742 | 1,357 | 1,009 | 45.498895 | 0.827016 | 0.020942 | 0 | 0.323689 | 0 | 0.005425 | 0.134013 | 0.033164 | 0 | 0 | 0 | 0.000737 | 0.000904 | 1 | 0.120253 | false | 0 | 0.022604 | 0.047016 | 0.299277 | 0.007233 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72ee1cbe6083bf017bca4e5b6925555840bc1de4 | 1,288 | py | Python | openstack/tests/unit/metric/v1/test_capabilities.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | 43 | 2018-12-19T08:39:15.000Z | 2021-07-21T02:45:43.000Z | openstack/tests/unit/metric/v1/test_capabilities.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | 11 | 2019-03-17T13:28:56.000Z | 2020-09-23T23:57:50.000Z | openstack/tests/unit/metric/v1/test_capabilities.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | 47 | 2018-12-19T05:14:25.000Z | 2022-03-19T15:28:30.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from openstack.metric.v1 import capabilities
BODY = {
'aggregation_methods': ['mean', 'max', 'avg'],
}
class TestCapabilites(testtools.TestCase):
def test_basic(self):
sot = capabilities.Capabilities()
self.assertEqual('/capabilities', sot.base_path)
self.assertEqual('metric', sot.service.service_type)
self.assertFalse(sot.allow_create)
self.assertTrue(sot.allow_get)
self.assertFalse(sot.allow_update)
self.assertFalse(sot.allow_delete)
self.assertFalse(sot.allow_list)
def test_make_it(self):
sot = capabilities.Capabilities(**BODY)
self.assertEqual(BODY['aggregation_methods'],
sot.aggregation_methods)
| 34.810811 | 75 | 0.714286 | 166 | 1,288 | 5.463855 | 0.572289 | 0.066152 | 0.079383 | 0.101433 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004812 | 0.193323 | 1,288 | 36 | 76 | 35.777778 | 0.868142 | 0.401398 | 0 | 0 | 0 | 0 | 0.088274 | 0 | 0 | 0 | 0 | 0 | 0.421053 | 1 | 0.105263 | false | 0 | 0.105263 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72ef3701a3a8ef52c1a792f4ce8c00616bb47526 | 351 | py | Python | scripts/get-table-schemas.py | numankh/GRE-Vocab-Helper | c2858f3200f6d6673b1f316879e5ac482a6b7a83 | [
"MIT"
] | null | null | null | scripts/get-table-schemas.py | numankh/GRE-Vocab-Helper | c2858f3200f6d6673b1f316879e5ac482a6b7a83 | [
"MIT"
] | null | null | null | scripts/get-table-schemas.py | numankh/GRE-Vocab-Helper | c2858f3200f6d6673b1f316879e5ac482a6b7a83 | [
"MIT"
] | null | null | null | import psycopg2
from decouple import config
import pandas as pd
import dbconnect
cursor, connection = dbconnect.connect_to_db()
sql = """
SELECT "table_name","column_name", "data_type", "table_schema"
FROM INFORMATION_SCHEMA.COLUMNS
WHERE "table_schema" = 'public'
ORDER BY table_name
"""
df = pd.read_sql(sql, con=connection)
print(df.to_string()) | 25.071429 | 62 | 0.77208 | 51 | 351 | 5.098039 | 0.647059 | 0.069231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003215 | 0.11396 | 351 | 14 | 63 | 25.071429 | 0.832797 | 0 | 0 | 0 | 0 | 0 | 0.426136 | 0.150568 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.307692 | 0 | 0.307692 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
72ef4fcc94a467e2eb56273b32fbc169f181effc | 7,880 | py | Python | tests/test_table/test_pivot.py | andriyor/agate | 9b12d4bcc75bf3788e0774e23188f4409c3e7519 | [
"MIT"
] | 663 | 2016-02-16T13:43:00.000Z | 2022-03-13T17:21:19.000Z | tests/test_table/test_pivot.py | andriyor/agate | 9b12d4bcc75bf3788e0774e23188f4409c3e7519 | [
"MIT"
] | 347 | 2015-08-28T13:46:37.000Z | 2016-02-16T01:53:06.000Z | tests/test_table/test_pivot.py | andriyor/agate | 9b12d4bcc75bf3788e0774e23188f4409c3e7519 | [
"MIT"
] | 122 | 2016-02-23T02:43:24.000Z | 2022-03-04T17:21:14.000Z | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
try:
from cdecimal import Decimal
except ImportError: # pragma: no cover
from decimal import Decimal
from agate import Table
from agate.aggregations import Sum
from agate.computations import Percent
from agate.data_types import Number, Text
from agate.testcase import AgateTestCase
class TestPivot(AgateTestCase):
def setUp(self):
self.rows = (
('joe', 'white', 'male', 20, 'blue'),
('jane', 'white', 'female', 20, 'blue'),
('josh', 'black', 'male', 20, 'blue'),
('jim', 'latino', 'male', 25, 'blue'),
('julia', 'white', 'female', 25, 'green'),
('joan', 'asian', 'female', 25, 'green')
)
self.number_type = Number()
self.text_type = Text()
self.column_names = ['name', 'race', 'gender', 'age', 'color']
self.column_types = [self.text_type, self.text_type, self.text_type, self.number_type, self.text_type]
def test_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender')
pivot_rows = (
('white', 1, 2),
('black', 1, 0),
('latino', 1, 0),
('asian', 0, 1)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertRowNames(pivot_table, ['white', 'black', 'latino', 'asian'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(lambda r: r['gender'])
pivot_rows = (
('male', 3),
('female', 3)
)
self.assertColumnNames(pivot_table, ['group', 'Count'])
self.assertRowNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda_group_name(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(lambda r: r['gender'], key_name='gender')
pivot_rows = (
('male', 3),
('female', 3)
)
self.assertColumnNames(pivot_table, ['gender', 'Count'])
self.assertRowNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda_group_name_sequence_invalid(self):
table = Table(self.rows, self.column_names, self.column_types)
with self.assertRaises(ValueError):
table.pivot(['race', 'gender'], key_name='foo')
def test_pivot_no_key(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(pivot='gender')
pivot_rows = (
(3, 3),
)
self.assertColumnNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_no_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race')
pivot_rows = (
('white', 3),
('black', 1),
('latino', 1),
('asian', 1)
)
self.assertColumnNames(pivot_table, ['race', 'Count'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_sum(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender', Sum('age'))
pivot_rows = (
('white', 20, 45),
('black', 20, 0),
('latino', 25, 0),
('asian', 0, 25)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_multiple_keys(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(['race', 'gender'], 'age')
pivot_rows = (
('white', 'male', 1, 0),
('white', 'female', 1, 1),
('black', 'male', 1, 0),
('latino', 'male', 0, 1),
('asian', 'female', 0, 1),
)
self.assertRows(pivot_table, pivot_rows)
self.assertColumnNames(pivot_table, ['race', 'gender', '20', '25'])
self.assertRowNames(pivot_table, [
('white', 'male'),
('white', 'female'),
('black', 'male'),
('latino', 'male'),
('asian', 'female'),
])
self.assertColumnTypes(pivot_table, [Text, Text, Number, Number])
def test_pivot_multiple_keys_no_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(['race', 'gender'])
pivot_rows = (
('white', 'male', 1),
('white', 'female', 2),
('black', 'male', 1),
('latino', 'male', 1),
('asian', 'female', 1),
)
self.assertRows(pivot_table, pivot_rows)
self.assertColumnNames(pivot_table, ['race', 'gender', 'Count'])
self.assertColumnTypes(pivot_table, [Text, Text, Number])
def test_pivot_default_value(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender', default_value=None)
pivot_rows = (
('white', 1, 2),
('black', 1, None),
('latino', 1, None),
('asian', None, 1)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', computation=Percent('Count'))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(50)),
('female', Decimal(50)),
)
self.assertColumnNames(pivot_table, ['gender', 'Percent'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute_pivots(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', 'color', computation=Percent('Count'))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(50), 0),
('female', Decimal(1) / Decimal(6) * Decimal(100), Decimal(1) / Decimal(3) * Decimal(100)),
)
self.assertColumnNames(pivot_table, ['gender', 'blue', 'green'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute_kwargs(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', 'color', computation=Percent('Count', total=8))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(3) / Decimal(8) * Decimal(100), 0),
('female', Decimal(1) / Decimal(8) * Decimal(100), Decimal(2) / Decimal(8) * Decimal(100)),
)
self.assertColumnNames(pivot_table, ['gender', 'blue', 'green'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
| 33.248945 | 110 | 0.583629 | 881 | 7,880 | 5.034052 | 0.112372 | 0.124014 | 0.047351 | 0.052762 | 0.747689 | 0.695603 | 0.683878 | 0.647802 | 0.636302 | 0.624803 | 0 | 0.017583 | 0.263832 | 7,880 | 236 | 111 | 33.389831 | 0.74694 | 0.00736 | 0 | 0.356322 | 0 | 0 | 0.096048 | 0 | 0 | 0 | 0 | 0 | 0.235632 | 1 | 0.08046 | false | 0 | 0.051724 | 0 | 0.137931 | 0.017241 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72f29f7ed6f48568758a4eb5e3565edf5506bbba | 1,332 | py | Python | test_impartial.py | georg-wolflein/impartial | a53819cefcb74a57e3c1148a6b8fa88aed9264d4 | [
"Apache-2.0"
] | null | null | null | test_impartial.py | georg-wolflein/impartial | a53819cefcb74a57e3c1148a6b8fa88aed9264d4 | [
"Apache-2.0"
] | null | null | null | test_impartial.py | georg-wolflein/impartial | a53819cefcb74a57e3c1148a6b8fa88aed9264d4 | [
"Apache-2.0"
] | null | null | null | from functools import partial
from impartial import impartial
def f(x: int, y: int, z: int = 0) -> int:
return x + 2*y + z
def test_simple_call_args():
assert impartial(f, 1)(2) == f(1, 2)
def test_simple_call_kwargs():
assert impartial(f, y=2)(x=1) == f(1, 2)
def test_simple_call_empty():
assert impartial(f, 1, y=2)() == f(1, 2)
def test_decorator():
@impartial
def f(x, y):
return x + 2*y
assert f.with_y(2)(1) == 5
def test_func():
assert impartial(f, 1).func is f
def test_with_kwargs():
assert impartial(f, 1).with_z(3)(2) == f(1, 2, 3)
def test_multiple_with_kwargs():
assert impartial(f, 1).with_z(3).with_y(2)() == f(1, 2, 3)
def test_with_kwargs_override():
assert impartial(f, 1, 2).with_z(3).with_z(4)() == f(1, 2, 4)
def test_nested_impartial():
imp = impartial(f, x=1, y=2)
imp = impartial(imp, x=2)
imp = impartial(imp, x=3)
assert imp() == f(3, 2)
assert not isinstance(imp.func, impartial)
assert imp.func is f
def test_nested_partial():
imp = partial(f, x=1, y=2)
imp = partial(imp, x=2)
imp = impartial(imp, x=3)
assert imp() == f(3, 2)
assert not isinstance(imp.func, partial)
assert imp.func is f
def test_configure():
assert impartial(f, 1, z=2).configure(2, z=3)() == f(1, 2, 3)
| 20.492308 | 65 | 0.61036 | 240 | 1,332 | 3.2625 | 0.15 | 0.03576 | 0.034483 | 0.15198 | 0.490421 | 0.413793 | 0.378033 | 0.237548 | 0.237548 | 0.153257 | 0 | 0.054966 | 0.221471 | 1,332 | 64 | 66 | 20.8125 | 0.700096 | 0 | 0 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.384615 | 1 | 0.333333 | false | 0 | 0.051282 | 0.051282 | 0.435897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72f43506a3e179e12b61e504fc43770a91f14bf0 | 5,076 | py | Python | manager.py | smilechaser/screeps-script-caddy | 11b6e809675dfd0a5a4ff917a492adc4a5a08bca | [
"MIT"
] | 2 | 2016-02-23T09:50:15.000Z | 2016-02-28T22:08:03.000Z | manager.py | smilechaser/screeps-script-caddy | 11b6e809675dfd0a5a4ff917a492adc4a5a08bca | [
"MIT"
] | null | null | null | manager.py | smilechaser/screeps-script-caddy | 11b6e809675dfd0a5a4ff917a492adc4a5a08bca | [
"MIT"
] | null | null | null | '''
Python script for uploading/downloading scripts for use with the game Screeps.
http://support.screeps.com/hc/en-us/articles/203022612-Commiting-scripts-using-direct-API-access
Usage:
#
# general help/usage
#
python3 manager.py --help
#
# retrieve all scripts from the game and store them
# in the folder "some_folder"
#
python3 manager.py from_game some_folder
#
# send all *.js files to the game
#
python3 manager.py to_game some_folder
WARNING: Use at your own risk! Make backups of all your game content!
'''
import sys
import os
import argparse
import json
import requests
from requests.auth import HTTPBasicAuth
SCREEPS_ENDPOINT = 'https://screeps.com/api/user/code'
USER_ENV = 'SCREEPS_USER'
PASSWORD_ENV = 'SCREEPS_PASSWORD'
TO_SCREEPS = 'to_game'
FROM_SCREEPS = 'from_game'
def get_user_from_env():
user = os.environ.get('SCREEPS_USER')
if not user:
print('You must provide a username, i.e. export '
'{}=<your email address>'.
format(USER_ENV))
sys.exit()
return user
def get_password_from_env():
password = os.environ.get('SCREEPS_PASSWORD')
if not password:
print('You must provide a password, i.e. export {}=<your password>'.
format(PASSWORD_ENV))
sys.exit()
return password
def get_data(user, password):
print('Retrieving data...')
response = requests.get(SCREEPS_ENDPOINT,
auth=HTTPBasicAuth(user, password))
response.raise_for_status()
data = response.json()
if data['ok'] != 1:
raise Exception()
return data
def send_data(user, password, modules):
auth = HTTPBasicAuth(user, password)
headers = {'Content-Type': 'application/json; charset=utf-8'}
data = {'modules': modules}
resp = requests.post(SCREEPS_ENDPOINT,
data=json.dumps(data),
headers=headers,
auth=auth)
resp.raise_for_status()
def check_for_collisions(target_folder, modules):
for module in modules:
target = os.path.join(target_folder, '{}.js'.format(module))
if os.path.exists(target):
print('File {} exists.'.format(target))
print('Specify --force to overwrite. Aborting...')
sys.exit()
def main():
parser = argparse.ArgumentParser(description='')
parser.add_argument('operation',
choices=(TO_SCREEPS, FROM_SCREEPS),
help='')
parser.add_argument('destination', help='')
parser.add_argument('--user', help='')
parser.add_argument('--password', help='')
parser.add_argument('--force', action='store_const', const=True,
help='force overwrite of files in an existing folder')
parser.add_argument('--merge', action='store_const', const=True,
help='merge scripts into a single main.js module')
args = parser.parse_args()
user = args.user if args.user else get_user_from_env()
password = args.password if args.password else get_password_from_env()
target_folder = os.path.abspath(args.destination)
if args.operation == FROM_SCREEPS:
data = get_data(user, password)
# does the folder exist?
if not os.path.isdir(target_folder):
# no - create it
print('Creating new folder "{}"...'.format(target_folder))
os.makedirs(target_folder)
else:
# yes - check for collisions (unless --force was specified)
if not args.force:
print('Checking for collisions...')
check_for_collisions(target_folder, data['modules'])
print('Ok, no collisions.')
# for each module, create a corresponding filename and put it in
# the target folder
for module in data['modules']:
target = os.path.join(target_folder, '{}.js'.format(module))
with open(target, 'w') as fout:
fout.write(data['modules'][module])
else:
modules = {}
for root, folders, files in os.walk(target_folder):
folders[:] = []
for target_file in files:
name, ext = os.path.splitext(target_file)
if ext != '.js':
continue
with open(os.path.join(root, target_file), 'r') as fin:
modules[name] = fin.read()
if args.merge:
merge_modules(modules)
# upload modules
send_data(user, password, modules)
def generate_header(filename):
return '''
// {border}
// {name}
// {border}
'''.format(border='-' * 25, name=filename)
def merge_modules(modules):
keys = [x for x in modules.keys()]
keys.sort()
merged = ''
for key in keys:
merged = merged + generate_header(key) + modules[key]
del(modules[key])
modules['main.js'] = merged
if __name__ == '__main__':
main()
| 22.460177 | 96 | 0.597715 | 602 | 5,076 | 4.906977 | 0.299003 | 0.040623 | 0.034529 | 0.028436 | 0.104942 | 0.05281 | 0.033175 | 0.033175 | 0.033175 | 0.033175 | 0 | 0.00441 | 0.285264 | 5,076 | 225 | 97 | 22.56 | 0.809813 | 0.149921 | 0 | 0.081818 | 0 | 0 | 0.156243 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.072727 | false | 0.136364 | 0.054545 | 0.009091 | 0.163636 | 0.072727 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
72f4405126d83aa638993123007b34b00b84222c | 289 | py | Python | contact.py | Nemfeto/python_training | 4d04f07700da4b0d5b50736ba197ad85fd2ee549 | [
"Apache-2.0"
] | null | null | null | contact.py | Nemfeto/python_training | 4d04f07700da4b0d5b50736ba197ad85fd2ee549 | [
"Apache-2.0"
] | null | null | null | contact.py | Nemfeto/python_training | 4d04f07700da4b0d5b50736ba197ad85fd2ee549 | [
"Apache-2.0"
] | null | null | null | class Contact:
def __init__(self, first_name, last_name, nickname, address, mobile, email):
self.first_name = first_name
self.last_name = last_name
self.nickname = nickname
self.address = address
self.mobile = mobile
self.email = email
| 28.9 | 80 | 0.643599 | 35 | 289 | 5.028571 | 0.342857 | 0.153409 | 0.147727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.280277 | 289 | 9 | 81 | 32.111111 | 0.846154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72f84712a4005f1ecc74d20ce01f90b1d0a8a90c | 237 | py | Python | tests/test_geometry.py | resurtm/wvflib | 106f426cc2c63c8d21f3e0ec1b90b06450dfc547 | [
"MIT"
] | 1 | 2020-08-14T20:59:54.000Z | 2020-08-14T20:59:54.000Z | tests/test_geometry.py | resurtm/wvflib | 106f426cc2c63c8d21f3e0ec1b90b06450dfc547 | [
"MIT"
] | 3 | 2020-03-31T11:16:01.000Z | 2022-03-01T01:40:38.000Z | tests/test_geometry.py | resurtm/wvflib | 106f426cc2c63c8d21f3e0ec1b90b06450dfc547 | [
"MIT"
] | 3 | 2020-01-24T11:10:46.000Z | 2020-03-31T11:24:34.000Z | import unittest
from wvflib.geometry import Face
class TestGeometry(unittest.TestCase):
def test_constructor(self):
f = Face()
self.assertTrue(len(f.vertices) == 0)
if __name__ == '__main__':
unittest.main()
| 16.928571 | 45 | 0.675105 | 28 | 237 | 5.392857 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005348 | 0.21097 | 237 | 13 | 46 | 18.230769 | 0.802139 | 0 | 0 | 0 | 0 | 0 | 0.033755 | 0 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.125 | false | 0 | 0.25 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
72fb29b0b3b127d1a4779c19adfdd5ba81413ede | 2,057 | py | Python | stix2/__init__.py | khdesai/cti-python-stix2 | 20a9bb316c43b7d9faaab686db8d51e5c89416da | [
"BSD-3-Clause"
] | null | null | null | stix2/__init__.py | khdesai/cti-python-stix2 | 20a9bb316c43b7d9faaab686db8d51e5c89416da | [
"BSD-3-Clause"
] | null | null | null | stix2/__init__.py | khdesai/cti-python-stix2 | 20a9bb316c43b7d9faaab686db8d51e5c89416da | [
"BSD-3-Clause"
] | null | null | null | """Python APIs for STIX 2.
.. autosummary::
:toctree: api
confidence
datastore
environment
equivalence
exceptions
markings
parsing
pattern_visitor
patterns
properties
serialization
utils
v20
v21
versioning
workbench
"""
# flake8: noqa
DEFAULT_VERSION = '2.1' # Default version will always be the latest STIX 2.X version
from .confidence import scales
from .datastore import CompositeDataSource
from .datastore.filesystem import (
FileSystemSink, FileSystemSource, FileSystemStore,
)
from .datastore.filters import Filter
from .datastore.memory import MemorySink, MemorySource, MemoryStore
from .datastore.taxii import (
TAXIICollectionSink, TAXIICollectionSource, TAXIICollectionStore,
)
from .environment import Environment, ObjectFactory
from .markings import (
add_markings, clear_markings, get_markings, is_marked, remove_markings,
set_markings,
)
from .parsing import _collect_stix2_mappings, parse, parse_observable
from .patterns import (
AndBooleanExpression, AndObservationExpression, BasicObjectPathComponent,
BinaryConstant, BooleanConstant, EqualityComparisonExpression,
FloatConstant, FollowedByObservationExpression,
GreaterThanComparisonExpression, GreaterThanEqualComparisonExpression,
HashConstant, HexConstant, InComparisonExpression, IntegerConstant,
IsSubsetComparisonExpression, IsSupersetComparisonExpression,
LessThanComparisonExpression, LessThanEqualComparisonExpression,
LikeComparisonExpression, ListConstant, ListObjectPathComponent,
MatchesComparisonExpression, ObjectPath, ObservationExpression,
OrBooleanExpression, OrObservationExpression, ParentheticalExpression,
QualifiedObservationExpression, ReferenceObjectPathComponent,
RepeatQualifier, StartStopQualifier, StringConstant, TimestampConstant,
WithinQualifier,
)
from .v21 import * # This import will always be the latest STIX 2.X version
from .version import __version__
from .versioning import new_version, revoke
_collect_stix2_mappings()
| 31.646154 | 85 | 0.808459 | 169 | 2,057 | 9.721893 | 0.615385 | 0.039562 | 0.014607 | 0.018259 | 0.046257 | 0.046257 | 0.046257 | 0.046257 | 0.046257 | 0.046257 | 0 | 0.007928 | 0.141468 | 2,057 | 64 | 86 | 32.140625 | 0.922424 | 0.191055 | 0 | 0 | 0 | 0 | 0.001814 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.361111 | 0 | 0.361111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
72fd07154cf4859fb20d5f7aa637f41a882f2a27 | 584 | py | Python | UMSLHackRestAPI/api/urls.py | trujivan/climate-impact-changes | 609b8197b0ede1c1fdac3aa82b34e73e6f4526e3 | [
"MIT"
] | 1 | 2020-03-29T17:52:26.000Z | 2020-03-29T17:52:26.000Z | UMSLHackRestAPI/api/urls.py | trujivan/climate-impact-changes | 609b8197b0ede1c1fdac3aa82b34e73e6f4526e3 | [
"MIT"
] | 6 | 2021-03-19T00:01:21.000Z | 2021-09-22T18:37:17.000Z | UMSLHackRestAPI/api/urls.py | trujivan/climate-impact-changes | 609b8197b0ede1c1fdac3aa82b34e73e6f4526e3 | [
"MIT"
] | null | null | null | from django.urls import path, include
from .views import main_view, PredictionView
#router = routers.DefaultRouter(trailing_slash=False)
#router.register('years', YearView, basename='years')
#router.register('predict', PredictionView, basename='predict')
urlpatterns = [
#path('api/', get_dummy_data),
#path('pollution/predict', get_prediction, name='test_predict'),
#path('myform/', api_form_view, name='year_form'),
#path('api/', include(router.urls)),
path(r'', main_view, name="main"),
path(r'api/v1/predict', PredictionView.as_view(), name='predict')
] | 36.5 | 69 | 0.714041 | 73 | 584 | 5.561644 | 0.493151 | 0.059113 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001927 | 0.111301 | 584 | 16 | 70 | 36.5 | 0.780347 | 0.585616 | 0 | 0 | 0 | 0 | 0.105932 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
f4030f6d52f16b8e41c89e74609c247cf9d493ab | 864 | py | Python | cattr/__init__.py | bluetech/cattrs | be438d5566bd308b584359a9b0011a7bd0006b06 | [
"MIT"
] | 1 | 2021-07-07T12:24:58.000Z | 2021-07-07T12:24:58.000Z | cattr/__init__.py | bluetech/cattrs | be438d5566bd308b584359a9b0011a7bd0006b06 | [
"MIT"
] | null | null | null | cattr/__init__.py | bluetech/cattrs | be438d5566bd308b584359a9b0011a7bd0006b06 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from .converters import Converter, UnstructureStrategy
__all__ = ('global_converter', 'unstructure', 'structure',
'structure_attrs_fromtuple', 'structure_attrs_fromdict',
'UnstructureStrategy')
__author__ = 'Tin Tvrtković'
__email__ = 'tinchester@gmail.com'
global_converter = Converter()
unstructure = global_converter.unstructure
structure = global_converter.structure
structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple
structure_attrs_fromdict = global_converter.structure_attrs_fromdict
register_structure_hook = global_converter.register_structure_hook
register_structure_hook_func = global_converter.register_structure_hook_func
register_unstructure_hook = global_converter.register_unstructure_hook
register_unstructure_hook_func = \
global_converter.register_unstructure_hook_func
| 37.565217 | 76 | 0.834491 | 90 | 864 | 7.411111 | 0.277778 | 0.224888 | 0.125937 | 0.104948 | 0.38081 | 0.134933 | 0 | 0 | 0 | 0 | 0 | 0.001282 | 0.097222 | 864 | 22 | 77 | 39.272727 | 0.853846 | 0.024306 | 0 | 0 | 0 | 0 | 0.162901 | 0.058264 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.0625 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f40992ff6f047f5e4c5a436cd251bdd645155f4b | 424 | py | Python | sample_project/sample_content/serializers.py | zentrumnawi/solid-backend | 0a6ac51608d4c713903856bb9b0cbf0068aa472c | [
"MIT"
] | 1 | 2021-01-24T11:54:01.000Z | 2021-01-24T11:54:01.000Z | sample_project/sample_content/serializers.py | zentrumnawi/solid-backend | 0a6ac51608d4c713903856bb9b0cbf0068aa472c | [
"MIT"
] | 112 | 2020-04-22T10:07:03.000Z | 2022-03-29T15:25:26.000Z | sample_project/sample_content/serializers.py | zentrumnawi/solid-backend | 0a6ac51608d4c713903856bb9b0cbf0068aa472c | [
"MIT"
] | null | null | null | from rest_framework import serializers
from solid_backend.photograph.serializers import PhotographSerializer
from solid_backend.media_object.serializers import MediaObjectSerializer
from .models import SampleProfile
class SampleProfileSerializer(serializers.ModelSerializer):
media_objects = MediaObjectSerializer(many=True)
class Meta:
model = SampleProfile
fields = "__all__"
depth = 1
| 28.266667 | 72 | 0.794811 | 41 | 424 | 8 | 0.634146 | 0.054878 | 0.097561 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002809 | 0.160377 | 424 | 14 | 73 | 30.285714 | 0.918539 | 0 | 0 | 0 | 0 | 0 | 0.016509 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.7 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
f40f0e5d0f6c305a62e87232ab24691dc4b36cbe | 4,053 | py | Python | DEMs/denmark/download_dk_dem.py | PeterFogh/digital_elevation_model_use_cases | 0e72cc6238ca5217a73d06dc3e8c3229024112c3 | [
"MIT"
] | null | null | null | DEMs/denmark/download_dk_dem.py | PeterFogh/digital_elevation_model_use_cases | 0e72cc6238ca5217a73d06dc3e8c3229024112c3 | [
"MIT"
] | null | null | null | DEMs/denmark/download_dk_dem.py | PeterFogh/digital_elevation_model_use_cases | 0e72cc6238ca5217a73d06dc3e8c3229024112c3 | [
"MIT"
] | null | null | null | """
Fetch all files from Kortforsyningen FTP server folder.
Copyright (c) 2021 Peter Fogh
See also command line alternative in `download_dk_dem.sh`
"""
from ftplib import FTP, error_perm
import os
from pathlib import Path
import time
import operator
import functools
import shutil
# TODO: use logging to std instead of print(time.ctime())
from environs import Env
# Functions
def download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
# Set up empty local dir and FTP current work dir before tree traversal.
shutil.rmtree(local_dir)
ftp.cwd(remote_dir.parent.as_posix())
local_dir.mkdir(parents=True, exist_ok=True)
return _recursive_download_FTP_tree(ftp, remote_dir, local_dir)
def _is_ftp_dir(ftp, name):
"""
Check if FTP entry is a directory.
Modified from here https://www.daniweb.com/programming/software-development/threads/243712/ftplib-isdir-or-isfile
to accommodate not necessarily being in the top-level directory.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
name: str
Name of FTP file system entry to check if directory or not.
"""
try:
current_dir = ftp.pwd()
ftp.cwd(name)
#print(f'File system entry "{name=}" is a directory.')
ftp.cwd(current_dir)
return True
except error_perm as e:
#print(f'File system entry "{name=}" is a file.')
return False
def _recursive_download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
print(f'{remote_dir=}')
print(f'{local_dir=}')
ftp.cwd(remote_dir.name)
local_dir.mkdir(exist_ok=True)
print(f'{time.ctime()}: Fetching file & directory names within "{remote_dir}".')
dir_entries = ftp.nlst()
print(f'{time.ctime()}: Fetched file & directory names within "{remote_dir}".')
dirs = []
for filename in sorted(dir_entries)[-5:]: # TODO: remove restriction on downloaded of entries
if _is_ftp_dir(ftp, filename):
dirs.append(filename)
else:
local_file = local_dir/filename
print(f'{time.ctime()}: Downloading "{local_file}".')
ftp.retrbinary(
cmd=f'RETR {filename}',
callback=local_file.open('wb').write)
print(f'{time.ctime()}: Downloaded "{local_file}".')
print(f'Traverse dir tree to "{dirs=}"')
map_download_FTP_tree = map(lambda dir: _recursive_download_FTP_tree(
ftp, remote_dir/dir, local_dir/dir), dirs)
return functools.reduce(operator.iand, map_download_FTP_tree, True)
if __name__ == '__main__':
# Load environment variables from local `.env` file.
env = Env()
env.read_env()
# Set up server and source/destination paths.
ftp_host = 'ftp.kortforsyningen.dk'
dem_ftp_dir = Path('dhm_danmarks_hoejdemodel/DTM')
local_ftp_dir = env.path('LOCAL_FTP_DIR', './')
local_dem_ftp_dir = local_ftp_dir/'kortforsyningen'/dem_ftp_dir
# Perform FTP download.
print(f'{time.ctime()}: Connect to {ftp_host}')
ftp = FTP(ftp_host)
ftp.login(env('KORTFORSYNING_USERNAME'), env('KORTFORSYNING_PASSWORD'))
download_FTP_tree(ftp, dem_ftp_dir, local_dem_ftp_dir)
ftp.close()
print(f'{time.ctime()}: Finished')
| 32.166667 | 117 | 0.66642 | 540 | 4,053 | 4.818519 | 0.314815 | 0.036895 | 0.040354 | 0.034589 | 0.355496 | 0.355496 | 0.297079 | 0.283244 | 0.261722 | 0.219831 | 0 | 0.013162 | 0.231434 | 4,053 | 125 | 118 | 32.424 | 0.822151 | 0.400691 | 0 | 0 | 0 | 0 | 0.215134 | 0.041355 | 0 | 0 | 0 | 0.016 | 0 | 1 | 0.052632 | false | 0.017544 | 0.140351 | 0 | 0.263158 | 0.157895 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f4166388f315b81cfe6df485234fcfe561b8ac22 | 251 | py | Python | src/ychaos/utils/types.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
] | 8 | 2021-07-21T15:37:48.000Z | 2022-03-03T14:43:09.000Z | src/ychaos/utils/types.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
] | 102 | 2021-07-20T16:08:29.000Z | 2022-03-25T07:28:37.000Z | src/ychaos/utils/types.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
] | 8 | 2021-07-20T13:37:46.000Z | 2022-02-18T01:44:52.000Z | from typing import Dict, List, TypeVar, Union
JsonTypeVar = TypeVar("JsonTypeVar")
JsonPrimitive = Union[str, float, int, bool, None]
JsonDict = Dict[str, JsonTypeVar]
JsonArray = List[JsonTypeVar]
Json = Union[JsonPrimitive, JsonDict, JsonArray]
| 22.818182 | 50 | 0.760956 | 29 | 251 | 6.586207 | 0.586207 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131474 | 251 | 10 | 51 | 25.1 | 0.876147 | 0 | 0 | 0 | 0 | 0 | 0.043825 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f420912bbaeaef68549b8a153f2087a527d8302c | 475 | py | Python | example/example/urls.py | pmaccamp/django-tastypie-swagger | d51ef3ea8e33791617edba8ed55a1be1f16e4ccc | [
"Apache-2.0"
] | 2 | 2020-04-13T13:26:42.000Z | 2021-10-30T17:56:15.000Z | example/example/urls.py | pmaccamp/django-tastypie-swagger | d51ef3ea8e33791617edba8ed55a1be1f16e4ccc | [
"Apache-2.0"
] | null | null | null | example/example/urls.py | pmaccamp/django-tastypie-swagger | d51ef3ea8e33791617edba8ed55a1be1f16e4ccc | [
"Apache-2.0"
] | 5 | 2020-04-15T07:05:13.000Z | 2021-11-01T20:36:10.000Z | from django.conf.urls import include, url
from django.contrib import admin
from demo.apis import api
urlpatterns = [
url(r'^api/', include(api.urls)),
url(r'^api/doc/', include(('tastypie_swagger.urls', 'tastypie_swagger'),
namespace='demo_api_swagger'),
kwargs={
"tastypie_api_module":"demo.apis.api",
"namespace":"demo_api_swagger",
"version": "0.1"}
),
url(r'^admin/', admin.site.urls),
]
| 29.6875 | 76 | 0.6 | 58 | 475 | 4.775862 | 0.413793 | 0.043321 | 0.050542 | 0.166065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00554 | 0.24 | 475 | 15 | 77 | 31.666667 | 0.761773 | 0 | 0 | 0 | 0 | 0 | 0.296842 | 0.044211 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.214286 | 0 | 0.214286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f42643ddcdfa49204eb89ec1d689fa4a85b4b22e | 38,947 | py | Python | rpython/jit/backend/llsupport/test/test_rewrite.py | jptomo/pypy-lang-scheme | 55edb2cec69d78f86793282a4566fcbc1ef9fcac | [
"MIT"
] | 1 | 2019-11-25T10:52:01.000Z | 2019-11-25T10:52:01.000Z | rpython/jit/backend/llsupport/test/test_rewrite.py | jptomo/pypy-lang-scheme | 55edb2cec69d78f86793282a4566fcbc1ef9fcac | [
"MIT"
] | null | null | null | rpython/jit/backend/llsupport/test/test_rewrite.py | jptomo/pypy-lang-scheme | 55edb2cec69d78f86793282a4566fcbc1ef9fcac | [
"MIT"
] | null | null | null | from rpython.jit.backend.llsupport.descr import get_size_descr,\
get_field_descr, get_array_descr, ArrayDescr, FieldDescr,\
SizeDescr, get_interiorfield_descr
from rpython.jit.backend.llsupport.gc import GcLLDescr_boehm,\
GcLLDescr_framework
from rpython.jit.backend.llsupport import jitframe
from rpython.jit.metainterp.gc import get_description
from rpython.jit.tool.oparser import parse
from rpython.jit.metainterp.optimizeopt.util import equaloplists
from rpython.jit.metainterp.history import JitCellToken, FLOAT
from rpython.jit.metainterp.history import AbstractFailDescr
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper import rclass
from rpython.jit.backend.x86.arch import WORD
class Evaluator(object):
def __init__(self, scope):
self.scope = scope
def __getitem__(self, key):
return eval(key, self.scope)
class FakeLoopToken(object):
pass
o_vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True)
class RewriteTests(object):
def check_rewrite(self, frm_operations, to_operations, **namespace):
S = lltype.GcStruct('S', ('x', lltype.Signed),
('y', lltype.Signed))
sdescr = get_size_descr(self.gc_ll_descr, S)
sdescr.tid = 1234
#
T = lltype.GcStruct('T', ('y', lltype.Signed),
('z', lltype.Ptr(S)),
('t', lltype.Signed))
tdescr = get_size_descr(self.gc_ll_descr, T)
tdescr.tid = 5678
tzdescr = get_field_descr(self.gc_ll_descr, T, 'z')
#
A = lltype.GcArray(lltype.Signed)
adescr = get_array_descr(self.gc_ll_descr, A)
adescr.tid = 4321
alendescr = adescr.lendescr
#
B = lltype.GcArray(lltype.Char)
bdescr = get_array_descr(self.gc_ll_descr, B)
bdescr.tid = 8765
blendescr = bdescr.lendescr
#
C = lltype.GcArray(lltype.Ptr(S))
cdescr = get_array_descr(self.gc_ll_descr, C)
cdescr.tid = 8111
clendescr = cdescr.lendescr
#
E = lltype.GcStruct('Empty')
edescr = get_size_descr(self.gc_ll_descr, E)
edescr.tid = 9000
#
vtable_descr = self.gc_ll_descr.fielddescr_vtable
O = lltype.GcStruct('O', ('parent', rclass.OBJECT),
('x', lltype.Signed))
o_descr = self.cpu.sizeof(O, True)
o_vtable = globals()['o_vtable']
#
tiddescr = self.gc_ll_descr.fielddescr_tid
wbdescr = self.gc_ll_descr.write_barrier_descr
WORD = globals()['WORD']
#
strdescr = self.gc_ll_descr.str_descr
unicodedescr = self.gc_ll_descr.unicode_descr
strlendescr = strdescr.lendescr
unicodelendescr = unicodedescr.lendescr
strhashdescr = self.gc_ll_descr.str_hash_descr
unicodehashdescr = self.gc_ll_descr.unicode_hash_descr
casmdescr = JitCellToken()
clt = FakeLoopToken()
clt._ll_initial_locs = [0, 8]
frame_info = lltype.malloc(jitframe.JITFRAMEINFO, flavor='raw')
clt.frame_info = frame_info
frame_info.jfi_frame_depth = 13
frame_info.jfi_frame_size = 255
framedescrs = self.gc_ll_descr.getframedescrs(self.cpu)
framelendescr = framedescrs.arraydescr.lendescr
jfi_frame_depth = framedescrs.jfi_frame_depth
jfi_frame_size = framedescrs.jfi_frame_size
jf_frame_info = framedescrs.jf_frame_info
jf_savedata = framedescrs.jf_savedata
jf_force_descr = framedescrs.jf_force_descr
jf_descr = framedescrs.jf_descr
jf_guard_exc = framedescrs.jf_guard_exc
jf_forward = framedescrs.jf_forward
jf_extra_stack_depth = framedescrs.jf_extra_stack_depth
signedframedescr = self.cpu.signedframedescr
floatframedescr = self.cpu.floatframedescr
casmdescr.compiled_loop_token = clt
#
guarddescr = AbstractFailDescr()
#
namespace.update(locals())
#
for funcname in self.gc_ll_descr._generated_functions:
namespace[funcname] = self.gc_ll_descr.get_malloc_fn(funcname)
namespace[funcname + '_descr'] = getattr(self.gc_ll_descr,
'%s_descr' % funcname)
#
ops = parse(frm_operations, namespace=namespace)
expected = parse(to_operations % Evaluator(namespace),
namespace=namespace)
operations = self.gc_ll_descr.rewrite_assembler(self.cpu,
ops.operations,
[])
remap = {}
for a, b in zip(ops.inputargs, expected.inputargs):
remap[b] = a
equaloplists(operations, expected.operations, remap=remap)
lltype.free(frame_info, flavor='raw')
class FakeTracker(object):
pass
class BaseFakeCPU(object):
JITFRAME_FIXED_SIZE = 0
def __init__(self):
self.tracker = FakeTracker()
self._cache = {}
self.signedframedescr = ArrayDescr(3, 8, FieldDescr('len', 0, 0, 0), 0)
self.floatframedescr = ArrayDescr(5, 8, FieldDescr('len', 0, 0, 0), 0)
def getarraydescr_for_frame(self, tp):
if tp == FLOAT:
return self.floatframedescr
return self.signedframedescr
def unpack_arraydescr_size(self, d):
return 0, d.itemsize, 0
def unpack_fielddescr(self, d):
return d.offset
def arraydescrof(self, ARRAY):
try:
return self._cache[ARRAY]
except KeyError:
r = ArrayDescr(1, 2, FieldDescr('len', 0, 0, 0), 0)
self._cache[ARRAY] = r
return r
def fielddescrof(self, STRUCT, fname):
key = (STRUCT, fname)
try:
return self._cache[key]
except KeyError:
r = FieldDescr(fname, 1, 1, 1)
self._cache[key] = r
return r
class TestBoehm(RewriteTests):
def setup_method(self, meth):
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
assert is_object
return SizeDescr(102, gc_fielddescrs=[],
vtable=o_vtable)
self.cpu = FakeCPU()
self.gc_ll_descr = GcLLDescr_boehm(None, None, None)
def test_new(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_no_collapsing(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
p1 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_new_array_fixed(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
10, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(adescr.basesize + 10 * adescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=alendescr)
def test_new_array_variable(self):
self.check_rewrite("""
[i1]
p0 = new_array(i1, descr=adescr)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
i1, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), 102, \
descr=malloc_fixedsize_descr)
setfield_gc(p0, ConstClass(o_vtable), descr=vtable_descr)
jump()
""")
def test_newstr(self):
self.check_rewrite("""
[i1]
p0 = newstr(i1)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(strdescr.basesize)d, \
i1, \
%(strdescr.itemsize)d, \
%(strlendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_newunicode(self):
self.check_rewrite("""
[i1]
p0 = newunicode(10)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(unicodedescr.basesize)d, \
10, \
%(unicodedescr.itemsize)d, \
%(unicodelendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(unicodedescr.basesize + \
## 10 * unicodedescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=unicodelendescr)
class TestFramework(RewriteTests):
def setup_method(self, meth):
class config_(object):
class translation(object):
gc = 'minimark'
gcrootfinder = 'asmgcc'
gctransformer = 'framework'
gcremovetypeptr = False
gcdescr = get_description(config_)
self.gc_ll_descr = GcLLDescr_framework(gcdescr, None, None, None,
really_not_translated=True)
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: True)
self.gc_ll_descr.malloc_zero_filled = False
#
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
descr = SizeDescr(104, gc_fielddescrs=[])
descr.tid = 9315
return descr
self.cpu = FakeCPU()
def test_rewrite_assembler_new_to_malloc(self):
self.check_rewrite("""
[p1]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_new3_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=tdescr)
p2 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + tdescr.size + sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 5678, descr=tiddescr)
p2 = nursery_ptr_increment(p1, %(tdescr.size)d)
setfield_gc(p2, 1234, descr=tiddescr)
zero_ptr_field(p1, %(tdescr.gc_fielddescrs[0].offset)s)
jump()
""")
def test_rewrite_assembler_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 4321, descr=tiddescr)
setfield_gc(p0, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_new_and_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + \
adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 4321, descr=tiddescr)
setfield_gc(p1, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_round_up(self):
self.check_rewrite("""
[]
p0 = new_array(6, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(bdescr.basesize + 8)d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 6, descr=blendescr)
jump()
""")
def test_rewrite_assembler_round_up_always(self):
self.check_rewrite("""
[]
p0 = new_array(5, descr=bdescr)
p1 = new_array(5, descr=bdescr)
p2 = new_array(5, descr=bdescr)
p3 = new_array(5, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4 * (bdescr.basesize + 8))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 5, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 8)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 5, descr=blendescr)
p2 = nursery_ptr_increment(p1, %(bdescr.basesize + 8)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 5, descr=blendescr)
p3 = nursery_ptr_increment(p2, %(bdescr.basesize + 8)d)
setfield_gc(p3, 8765, descr=tiddescr)
setfield_gc(p3, 5, descr=blendescr)
jump()
""")
def test_rewrite_assembler_minimal_size(self):
self.check_rewrite("""
[]
p0 = new(descr=edescr)
p1 = new(descr=edescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4*WORD)d)
setfield_gc(p0, 9000, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(2*WORD)d)
setfield_gc(p1, 9000, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_variable_size(self):
self.check_rewrite("""
[i0]
p0 = new_array(i0, descr=bdescr)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
jump(i0)
""")
def test_rewrite_new_string(self):
self.check_rewrite("""
[i0]
p0 = newstr(i0)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(1, 1, i0, descr=strdescr)
setfield_gc(p0, i0, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
jump(i0)
""")
def test_rewrite_assembler_nonstandard_array(self):
# a non-standard array is a bit hard to get; e.g. GcArray(Float)
# is like that on Win32, but not on Linux. Build one manually...
NONSTD = lltype.GcArray(lltype.Float)
nonstd_descr = get_array_descr(self.gc_ll_descr, NONSTD)
nonstd_descr.tid = 6464
nonstd_descr.basesize = 64 # <= hacked
nonstd_descr.itemsize = 8
nonstd_descr_gcref = 123
self.check_rewrite("""
[i0, p1]
p0 = new_array(i0, descr=nonstd_descr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", """
[i0, p1]
p0 = call_malloc_gc(ConstClass(malloc_array_nonstandard), \
64, 8, \
%(nonstd_descr.lendescr.offset)d, \
6464, i0, \
descr=malloc_array_nonstandard_descr)
cond_call_gc_wb_array(p0, i0, descr=wbdescr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", nonstd_descr=nonstd_descr)
def test_rewrite_assembler_maximal_size_1(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_array(103, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 103, \
descr=malloc_array_descr)
jump()
""")
def test_rewrite_assembler_maximal_size_2(self):
self.gc_ll_descr.max_size_of_young_obj = 300
self.check_rewrite("""
[]
p0 = new_array(101, descr=bdescr)
p1 = new_array(102, descr=bdescr) # two new_arrays can be combined
p2 = new_array(103, descr=bdescr) # but not all three
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(2 * (bdescr.basesize + 104))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 101, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 104)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 102, descr=blendescr)
p2 = call_malloc_nursery( \
%(bdescr.basesize + 104)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 103, descr=blendescr)
jump()
""")
def test_rewrite_assembler_huge_size(self):
# "huge" is defined as "larger than 0xffffff bytes, or 16MB"
self.check_rewrite("""
[]
p0 = new_array(20000000, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 20000000, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(104) # rounded up
setfield_gc(p0, 9315, descr=tiddescr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_new_with_vtable_too_big(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_big_fixedsize), 104, 9315, \
descr=malloc_big_fixedsize_descr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_rewrite_assembler_newstr_newunicode(self):
self.check_rewrite("""
[i2]
p0 = newstr(14)
p1 = newunicode(10)
p2 = newunicode(i2)
p3 = newstr(i2)
jump()
""", """
[i2]
p0 = call_malloc_nursery( \
%(strdescr.basesize + 16 * strdescr.itemsize + \
unicodedescr.basesize + 10 * unicodedescr.itemsize)d)
setfield_gc(p0, %(strdescr.tid)d, descr=tiddescr)
setfield_gc(p0, 14, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
p1 = nursery_ptr_increment(p0, %(strdescr.basesize + 16 * strdescr.itemsize)d)
setfield_gc(p1, %(unicodedescr.tid)d, descr=tiddescr)
setfield_gc(p1, 10, descr=unicodelendescr)
setfield_gc(p1, 0, descr=unicodehashdescr)
p2 = call_malloc_nursery_varsize(2, %(unicodedescr.itemsize)d, i2,\
descr=unicodedescr)
setfield_gc(p2, i2, descr=unicodelendescr)
setfield_gc(p2, 0, descr=unicodehashdescr)
p3 = call_malloc_nursery_varsize(1, 1, i2, \
descr=strdescr)
setfield_gc(p3, i2, descr=strlendescr)
setfield_gc(p3, 0, descr=strhashdescr)
jump()
""")
def test_write_barrier_before_setfield_gc(self):
self.check_rewrite("""
[p1, p2]
setfield_gc(p1, p2, descr=tzdescr)
jump()
""", """
[p1, p2]
cond_call_gc_wb(p1, descr=wbdescr)
setfield_gc(p1, p2, descr=tzdescr)
jump()
""")
def test_write_barrier_before_array_without_from_array(self):
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: False)
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_short_array(self):
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(129, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 129 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 129, descr=clendescr)
zero_array(p1, 0, 129, descr=cdescr)
call_n(123456)
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_long_array(self):
# the limit of "being too long" is fixed, arbitrarily, at 130
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(130, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 130 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 130, descr=clendescr)
zero_array(p1, 0, 130, descr=cdescr)
call_n(123456)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_unknown_array(self):
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_label_makes_size_unknown(self):
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(5, descr=cdescr)
label(p1, i2, p3)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 5, descr=clendescr)
zero_array(p1, 0, 5, descr=cdescr)
label(p1, i2, p3)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_setinteriorfield_gc(self):
S1 = lltype.GcStruct('S1')
INTERIOR = lltype.GcArray(('z', lltype.Ptr(S1)))
interiordescr = get_array_descr(self.gc_ll_descr, INTERIOR)
interiordescr.tid = 1291
interiorlendescr = interiordescr.lendescr
interiorzdescr = get_interiorfield_descr(self.gc_ll_descr,
INTERIOR, 'z')
self.check_rewrite("""
[p1, p2]
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", """
[p1, p2]
cond_call_gc_wb_array(p1, 0, descr=wbdescr)
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", interiorzdescr=interiorzdescr)
def test_initialization_store(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_2(self):
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = new(descr=sdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size + sdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(tdescr.size)d)
setfield_gc(p1, 1234, descr=tiddescr)
# <<<no cond_call_gc_wb here>>>
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_array(self):
self.check_rewrite("""
[p1, i2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""", """
[p1, i2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 2, 3, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_right(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 3, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""")
def test_zero_array_not_reduced_at_all(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_completely(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 5, 0, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_call(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
cond_call_gc_wb(p0, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_label(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
cond_call_gc_wb_array(p0, 1, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_varsize(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
jump()
""")
def test_zero_array_varsize_cannot_reduce(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
cond_call_gc_wb_array(p0, 0, descr=wbdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""")
def test_initialization_store_potentially_large_array(self):
# the write barrier cannot be omitted, because we might get
# an array with cards and the GC assumes that the write
# barrier is always called, even on young (but large) arrays
self.check_rewrite("""
[i0, p1, i2]
p0 = new_array(i0, descr=bdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""", """
[i0, p1, i2]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
cond_call_gc_wb_array(p0, i2, descr=wbdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""")
def test_non_initialization_store(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
p1 = newstr(i0)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = call_malloc_nursery_varsize(1, 1, i0, \
descr=strdescr)
setfield_gc(p1, i0, descr=strlendescr)
setfield_gc(p1, 0, descr=strhashdescr)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_non_initialization_store_label(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
label(p0, p1)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
label(p0, p1)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_multiple_writes(self):
self.check_rewrite("""
[p0, p1, p2]
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""", """
[p0, p1, p2]
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""")
def test_rewrite_call_assembler(self):
self.check_rewrite("""
[i0, f0]
i2 = call_assembler_i(i0, f0, descr=casmdescr)
""", """
[i0, f0]
i1 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_size)
p1 = call_malloc_nursery_varsize_frame(i1)
setfield_gc(p1, 0, descr=tiddescr)
i2 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_depth)
setfield_gc(p1, 0, descr=jf_extra_stack_depth)
setfield_gc(p1, NULL, descr=jf_savedata)
setfield_gc(p1, NULL, descr=jf_force_descr)
setfield_gc(p1, NULL, descr=jf_descr)
setfield_gc(p1, NULL, descr=jf_guard_exc)
setfield_gc(p1, NULL, descr=jf_forward)
setfield_gc(p1, i2, descr=framelendescr)
setfield_gc(p1, ConstClass(frame_info), descr=jf_frame_info)
setarrayitem_gc(p1, 0, i0, descr=signedframedescr)
setarrayitem_gc(p1, 1, f0, descr=floatframedescr)
i3 = call_assembler_i(p1, descr=casmdescr)
""")
def test_int_add_ovf(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""")
def test_int_gt(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""")
def test_zero_ptr_field_before_getfield(self):
# This case may need to be fixed in the metainterp/optimizeopt
# already so that it no longer occurs for rewrite.py. But anyway
# it's a good idea to make sure rewrite.py is correct on its own.
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""")
| 36.811909 | 90 | 0.518885 | 4,203 | 38,947 | 4.564835 | 0.091601 | 0.053685 | 0.037527 | 0.039612 | 0.687324 | 0.624257 | 0.557073 | 0.483425 | 0.457208 | 0.410091 | 0 | 0.048662 | 0.371582 | 38,947 | 1,057 | 91 | 36.846736 | 0.73524 | 0.031248 | 0 | 0.636364 | 0 | 0 | 0.658979 | 0.082488 | 0 | 0 | 0 | 0 | 0.001045 | 1 | 0.064786 | false | 0.00209 | 0.011494 | 0.003135 | 0.100313 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f4418c7fe5090cc1ad72d42e956421d4fcbc0d8c | 5,253 | py | Python | transformers/tests/tokenization_xlnet_test.py | deepbluesea/transformers | 11a2317986aad6e9a72f542e31344cfb7c94cbab | [
"Apache-2.0"
] | 270 | 2020-04-26T17:54:36.000Z | 2022-03-24T20:47:11.000Z | transformers/tests/tokenization_xlnet_test.py | deepbluesea/transformers | 11a2317986aad6e9a72f542e31344cfb7c94cbab | [
"Apache-2.0"
] | 27 | 2020-06-03T17:34:41.000Z | 2022-03-31T01:17:34.000Z | transformers/tests/tokenization_xlnet_test.py | deepbluesea/transformers | 11a2317986aad6e9a72f542e31344cfb7c94cbab | [
"Apache-2.0"
] | 61 | 2020-04-25T21:48:11.000Z | 2022-03-23T02:39:10.000Z | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import unittest
from transformers.tokenization_xlnet import (XLNetTokenizer, SPIECE_UNDERLINE)
from .tokenization_tests_commons import CommonTestCases
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'fixtures/test_sentencepiece.model')
class XLNetTokenizationTest(CommonTestCases.CommonTokenizerTester):
tokenizer_class = XLNetTokenizer
def setUp(self):
super(XLNetTokenizationTest, self).setUp()
# We have a SentencePiece fixture for testing
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, keep_accents=True)
tokenizer.save_pretrained(self.tmpdirname)
def get_tokenizer(self, **kwargs):
return XLNetTokenizer.from_pretrained(self.tmpdirname, **kwargs)
def get_input_output_texts(self):
input_text = u"This is a test"
output_text = u"This is a test"
return input_text, output_text
def test_full_tokenizer(self):
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, keep_accents=True)
tokens = tokenizer.tokenize(u'This is a test')
self.assertListEqual(tokens, [u'▁This', u'▁is', u'▁a', u'▁t', u'est'])
self.assertListEqual(
tokenizer.convert_tokens_to_ids(tokens), [285, 46, 10, 170, 382])
tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
u'or', u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u's', u'é', u'.'])
ids = tokenizer.convert_tokens_to_ids(tokens)
self.assertListEqual(
ids, [8, 21, 84, 55, 24, 19, 7, 0,
602, 347, 347, 347, 3, 12, 66,
46, 72, 80, 6, 0, 4])
back_tokens = tokenizer.convert_ids_to_tokens(ids)
self.assertListEqual(back_tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
u'or', u'n', SPIECE_UNDERLINE + u'in',
SPIECE_UNDERLINE + u'', u'<unk>', u'2', u'0', u'0', u'0', u',',
SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u's',
u'<unk>', u'.'])
def test_tokenizer_lower(self):
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, do_lower_case=True)
tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'', u'i', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
u'or', u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u'se', u'.'])
self.assertListEqual(tokenizer.tokenize(u"H\u00E9llo"), [u"▁he", u"ll", u"o"])
def test_tokenizer_no_lower(self):
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, do_lower_case=False)
tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b', u'or',
u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u'se', u'.'])
def test_sequence_builders(self):
tokenizer = XLNetTokenizer.from_pretrained("xlnet-base-cased")
text = tokenizer.encode("sequence builders")
text_2 = tokenizer.encode("multi-sequence build")
encoded_sentence = tokenizer.add_special_tokens_single_sequence(text)
encoded_pair = tokenizer.add_special_tokens_sequence_pair(text, text_2)
assert encoded_sentence == text + [4, 3]
assert encoded_pair == text + [4] + text_2 + [4, 3]
if __name__ == '__main__':
unittest.main()
| 49.093458 | 128 | 0.61146 | 682 | 5,253 | 4.543988 | 0.285924 | 0.17909 | 0.185866 | 0.010326 | 0.426267 | 0.412391 | 0.380768 | 0.349145 | 0.349145 | 0.314295 | 0 | 0.027561 | 0.267847 | 5,253 | 106 | 129 | 49.556604 | 0.776911 | 0.119931 | 0 | 0.208955 | 0 | 0 | 0.090297 | 0.007163 | 0 | 0 | 0 | 0 | 0.149254 | 1 | 0.104478 | false | 0 | 0.074627 | 0.014925 | 0.238806 | 0.014925 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f4430a5ed7a70794aa650554ee2233f1a76e4ce7 | 1,362 | py | Python | Bot/db_aps.py | FaHoLo/Fish_shop | b08018223705bca169dab9f39ec5a55f62822f0b | [
"MIT"
] | null | null | null | Bot/db_aps.py | FaHoLo/Fish_shop | b08018223705bca169dab9f39ec5a55f62822f0b | [
"MIT"
] | null | null | null | Bot/db_aps.py | FaHoLo/Fish_shop | b08018223705bca169dab9f39ec5a55f62822f0b | [
"MIT"
] | null | null | null | import logging
import os
import redis
import moltin_aps
_database = None
db_logger = logging.getLogger('db_logger')
async def get_database_connection():
global _database
if _database is None:
database_password = os.getenv('DB_PASSWORD')
database_host = os.getenv('DB_HOST')
database_port = os.getenv('DB_PORT')
_database = redis.Redis(host=database_host, port=database_port, password=database_password)
db_logger.debug('Got new db connection')
return _database
def get_moltin_customer_id(customer_key):
db = await get_database_connection()
customer_id = db.get(customer_key)
if customer_id:
customer_id = customer_id.decode('utf-8')
db_logger.debug(f'Got moltin customer id «{customer_id}» from db')
return customer_id
def update_customer_info(customer_key, customer_info):
db = await get_database_connection()
customer_id = db.get(customer_key).decode('utf-8')
moltin_aps.update_customer_info(customer_id, customer_info)
db_logger.debug(f'Customer «{customer_id}» info was updated')
def create_customer(customer_key, customer_info):
db = await get_database_connection()
customer_id = moltin_aps.create_customer(customer_info)['data']['id']
db.set(customer_key, customer_id)
db_logger.debug(f'New customer «{customer_key}» was created')
| 29.608696 | 99 | 0.737885 | 195 | 1,362 | 4.866667 | 0.230769 | 0.136986 | 0.088514 | 0.056902 | 0.202318 | 0.202318 | 0.202318 | 0.202318 | 0.202318 | 0.202318 | 0 | 0.001759 | 0.165198 | 1,362 | 45 | 100 | 30.266667 | 0.827617 | 0 | 0 | 0.09375 | 0 | 0 | 0.146109 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0.0625 | 0.125 | 0 | 0.28125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
f4496e9806f5e5781ad656efc22821170a6cd22c | 3,702 | py | Python | tests/unit/discovery/test_py_spec.py | xavfernandez/virtualenv | dd37c7d2af8a21026f4d4b7f43142e4e1e0faf86 | [
"MIT"
] | 1 | 2020-02-25T15:08:59.000Z | 2020-02-25T15:08:59.000Z | tests/unit/discovery/test_py_spec.py | xavfernandez/virtualenv | dd37c7d2af8a21026f4d4b7f43142e4e1e0faf86 | [
"MIT"
] | null | null | null | tests/unit/discovery/test_py_spec.py | xavfernandez/virtualenv | dd37c7d2af8a21026f4d4b7f43142e4e1e0faf86 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, unicode_literals
import itertools
import os
import sys
from copy import copy
import pytest
from virtualenv.discovery.py_spec import PythonSpec
def test_bad_py_spec():
text = "python2.3.4.5"
spec = PythonSpec.from_string_spec(text)
assert text in repr(spec)
assert spec.str_spec == text
assert spec.path == os.path.abspath(text)
content = vars(spec)
del content[str("str_spec")]
del content[str("path")]
assert all(v is None for v in content.values())
def test_py_spec_first_digit_only_major():
spec = PythonSpec.from_string_spec("278")
assert spec.major == 2
assert spec.minor == 78
def test_spec_satisfies_path_ok():
spec = PythonSpec.from_string_spec(sys.executable)
assert spec.satisfies(spec) is True
def test_spec_satisfies_path_nok(tmp_path):
spec = PythonSpec.from_string_spec(sys.executable)
of = PythonSpec.from_string_spec(str(tmp_path))
assert spec.satisfies(of) is False
def test_spec_satisfies_arch():
spec_1 = PythonSpec.from_string_spec("python-32")
spec_2 = PythonSpec.from_string_spec("python-64")
assert spec_1.satisfies(spec_1) is True
assert spec_2.satisfies(spec_1) is False
@pytest.mark.parametrize(
"req, spec",
list(itertools.combinations(["py", "CPython", "python"], 2)) + [("jython", "jython")] + [("CPython", "cpython")],
)
def test_spec_satisfies_implementation_ok(req, spec):
spec_1 = PythonSpec.from_string_spec(req)
spec_2 = PythonSpec.from_string_spec(spec)
assert spec_1.satisfies(spec_1) is True
assert spec_2.satisfies(spec_1) is True
def test_spec_satisfies_implementation_nok():
spec_1 = PythonSpec.from_string_spec("python")
spec_2 = PythonSpec.from_string_spec("jython")
assert spec_2.satisfies(spec_1) is False
assert spec_1.satisfies(spec_2) is False
def _version_satisfies_pairs():
target = set()
version = tuple(str(i) for i in sys.version_info[0:3])
for i in range(len(version) + 1):
req = ".".join(version[0:i])
for j in range(i + 1):
sat = ".".join(version[0:j])
# can be satisfied in both directions
target.add((req, sat))
target.add((sat, req))
return sorted(target)
@pytest.mark.parametrize("req, spec", _version_satisfies_pairs())
def test_version_satisfies_ok(req, spec):
req_spec = PythonSpec.from_string_spec("python{}".format(req))
sat_spec = PythonSpec.from_string_spec("python{}".format(spec))
assert sat_spec.satisfies(req_spec) is True
def _version_not_satisfies_pairs():
target = set()
version = tuple(str(i) for i in sys.version_info[0:3])
for i in range(len(version)):
req = ".".join(version[0 : i + 1])
for j in range(i + 1):
sat_ver = list(sys.version_info[0 : j + 1])
for l in range(j + 1):
for o in [1, -1]:
temp = copy(sat_ver)
temp[l] += o
sat = ".".join(str(i) for i in temp)
target.add((req, sat))
return sorted(target)
@pytest.mark.parametrize("req, spec", _version_not_satisfies_pairs())
def test_version_satisfies_nok(req, spec):
req_spec = PythonSpec.from_string_spec("python{}".format(req))
sat_spec = PythonSpec.from_string_spec("python{}".format(spec))
assert sat_spec.satisfies(req_spec) is False
def test_relative_spec(tmp_path, monkeypatch):
monkeypatch.chdir(tmp_path)
a_relative_path = str((tmp_path / "a" / "b").relative_to(tmp_path))
spec = PythonSpec.from_string_spec(a_relative_path)
assert spec.path == os.path.abspath(str(tmp_path / a_relative_path))
| 31.913793 | 117 | 0.679633 | 541 | 3,702 | 4.399261 | 0.18854 | 0.094118 | 0.134454 | 0.161345 | 0.620588 | 0.506303 | 0.382773 | 0.287815 | 0.272269 | 0.227731 | 0 | 0.016521 | 0.198811 | 3,702 | 115 | 118 | 32.191304 | 0.785907 | 0.009454 | 0 | 0.232558 | 0 | 0 | 0.044748 | 0 | 0 | 0 | 0 | 0 | 0.197674 | 1 | 0.139535 | false | 0 | 0.081395 | 0 | 0.244186 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f450452cbcef41209866e35540c53f785f67820d | 1,183 | py | Python | Scripts/simulation/careers/detective/detective_crime_scene.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/careers/detective/detective_crime_scene.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/careers/detective/detective_crime_scene.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\careers\detective\detective_crime_scene.py
# Compiled at: 2015-02-08 03:00:54
# Size of source mod 2**32: 1608 bytes
from careers.career_event_zone_director import CareerEventZoneDirector
import sims4.log
logger = sims4.log.Logger('Crime Scene', default_owner='bhill')
class CrimeSceneZoneDirector(CareerEventZoneDirector):
def __init__(self, *args, **kwargs):
(super().__init__)(*args, **kwargs)
self._should_load_sims = False
def _load_custom_zone_director(self, zone_director_proto, reader):
self._should_load_sims = True
super()._load_custom_zone_director(zone_director_proto, reader)
def _on_maintain_zone_saved_sim(self, sim_info):
if self._should_load_sims:
super()._on_maintain_zone_saved_sim(sim_info)
else:
logger.info('Discarding saved sim: {}', sim_info)
def _process_injected_sim(self, sim_info):
logger.info('Discarding injected sim: {}', sim_info) | 42.25 | 107 | 0.72612 | 168 | 1,183 | 4.797619 | 0.547619 | 0.074442 | 0.052109 | 0.066998 | 0.054591 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067677 | 0.163145 | 1,183 | 28 | 108 | 42.25 | 0.746465 | 0.27388 | 0 | 0 | 0 | 0 | 0.078546 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.117647 | 0 | 0.411765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f458c5e01d9e2170ec0f7c2f7180c5b33bb75bc9 | 16,446 | py | Python | spc/backend_utils.py | adamnew123456/spc | 8809d1817f66cf8266f145aa0c2474b32dc1087a | [
"MIT"
] | 1 | 2017-10-15T19:55:48.000Z | 2017-10-15T19:55:48.000Z | spc/backend_utils.py | adamnew123456/spc | 8809d1817f66cf8266f145aa0c2474b32dc1087a | [
"MIT"
] | null | null | null | spc/backend_utils.py | adamnew123456/spc | 8809d1817f66cf8266f145aa0c2474b32dc1087a | [
"MIT"
] | null | null | null | """
Utility functions and classes shared by multiple backends
"""
from collections import namedtuple
import logging
from . import symbols
from . import types
LOGGER = logging.getLogger('spc.backend_utils')
# NameContexts encapsulate both the function stack (which holds values) and
# the symbol table context (which binds them)
NameContext = namedtuple('NameContext', ['symbol_ctx', 'func_stack'])
# While loops are identified by two labels - the start label, for re-running
# the condition, and the end label, for exiting when the condition is false
WhileLabels = namedtuple('WhileLabels', ['cond', 'exit'])
# If conditions are identified by two labels - the else label, for when
# the condition is false (to skip the then block) and the end label, for
# when the condition is true (to skip the else block)
IfLabels = namedtuple('IfLabels', ['else_body', 'end'])
# Switch conditionals are handled sort of like if conditionals:
#
# (switch |
# (case T1 B1) | jump-if-not T1, l1prime; ...; jump l4; l1prime:
# (case T2 B2) | jump-if-not T2, l2prime; ...; jump l4; l2prime:
# (else B3)) | ...
# | l4:
class SwitchLabels:
"""
Switch labels are similar to conditionals:
(switch |
(case T1 B1) | jump-if-not T1, case_lbl_1; ...; jump end; case_lbl_1:
(case T2 B2) | jump-if-not T2, case_lbl_2; ...; jump end; case_lbl_2:
(else B3) | ...; end_lbl:
Since each case is processed in order, only the current case end label and
the end switch label is available at any given time.
"""
def __init__(self, end_label):
self.end_label = end_label
self.case_end_label = None
class CoercionContext:
"""
This is used to wrap up all the information needed to coerce values from
one type to another.
"""
def __init__(self, backend, temp_context, code_templates):
self.backend = backend
self.temp_context = temp_context
self.templates = code_templates
def copy_with_context(self, new_context):
"""
Creates a copy of this object, but within a new temporary context.
"""
return CoercionContext(self.backend, new_context, self.templates)
def coerce(self, input_offset, input_type, output_type):
"""
Coerces a value, located on the stack, from the given input type to the
given output type. Returns the stack offset of the converted
variable and the output type.
Raises a TypeError if this is not possible.
"""
if input_type == output_type:
return input_offset, output_type
elif (input_type, output_type) == (types.Integer, types.Byte):
return self._coerce_int_to_byte(input_offset), output_type
elif (input_type, output_type) == (types.Byte, types.Integer):
return self._coerce_byte_to_int(input_offset), output_type
else:
raise TypeError('Cannot coerce {} -> {}'.format(input_type, output_type))
def _coerce_int_to_byte(self, input_offset):
"""
Coerces an integer to a byte, returning the stack offset of the
resulting byte.
"""
byte_size = self.backend._type_size(types.Byte)
byte_align = self.backend._type_alignment(types.Byte)
dest_offset = self.temp_context.add_temp(byte_size, byte_align)
tmp_reg = self.templates.tmp_regs[0]
self.backend._write_comment('Coercing int@{} to byte@{}',
input_offset, dest_offset)
self.templates.emit_load_stack_word(tmp_reg, input_offset)
self.templates.emit_int_to_byte(tmp_reg)
self.templates.emit_save_stack_byte(tmp_reg, dest_offset)
return dest_offset
def _coerce_byte_to_int(self, input_offset):
"""
Coerces a byte to an integer, returning the stack offset of the
resulting integer.
"""
int_size = self.backend._type_size(types.Integer)
int_align = self.backend._type_alignment(types.Integer)
dest_offset = self.temp_context.add_temp(int_size, int_align)
tmp_reg = self.templates.tmp_regs[0]
self.backend._write_comment('Coercing byte@{} to int@{}',
input_offset, dest_offset)
self.templates.emit_load_stack_byte(tmp_reg, input_offset)
self.templates.emit_byte_to_int(tmp_reg)
self.templates.emit_save_stack_word(tmp_reg, dest_offset)
return dest_offset
class FunctionStack:
"""
Tracks where variables are on the function's stack.
Note that this makes a number of assumptions about how things are stored:
- All arguments are stored on the stack, in reverse order. This goes
against the calling conventions for register rich architectures, like
MIPS, but there are enough corner cases (like copying structs by value)
that ignoring the calling convention is worthwhile for a non-optimizing
compiler like this.
- Locals and temporaries are stored on the stack, in order of creation.
"""
def __init__(self, backend):
self.backend = backend
self.local_offset = self._starting_locals_offset()
self.param_offset = self._starting_param_offset()
self.vars = {}
def _starting_locals_offset(self):
"""
Returns the starting offset of the local variables on the stack.
"""
raise NotImplementedError
def _starting_param_offset(self):
"""
Returns the starting offset of the parameter on the stack.
"""
raise NotImplementedError
def _expand_stack(self, size):
"""
Emits code to expand the stack frame by the given size.
"""
raise NotImplementedError
def _shrink_stack(self, size):
"""
Emits code to reduce the stack frame by the given size.
"""
raise NotImplementedError
def pad_param(self, space):
"""
Adds blank space before the next parameter.
"""
self.param_offset += space
def add_param(self, name, size, alignment):
"""
Adds a new parameter to the stack.
"""
self.param_offset = types.align_address(self.param_offset, alignment)
self.vars[name] = self.param_offset
self.param_offset += size
self.backend._write_comment('Binding param "{}" to offset {}', name, self.vars[name])
def add_local(self, name, size, alignment):
"""
Adds a local variable to the stack.
"""
self.local_offset = (
types.align_address(self.local_offset - size, alignment,
types.Alignment.Down))
self.vars[name] = self.local_offset
self.backend._write_comment('Binding local "{}" to offset {}', name, self.vars[name])
def get_temp_context(self, backend):
"""
Returns a context which can be used for putting temporary values on
the stack. When the context exits, the space used by the temporary
variables is cleaned up.
"""
root = self
class TemporaryContext:
def __init__(self, start_offset):
self.tmp_offset = start_offset
self.total_tmp_size = 0
def __enter__(self):
pass
def __exit__(self, *exc_info):
root._shrink_stack(self.total_tmp_size)
def add_temp(self, size, alignment):
"""
Makes space for a new temporary, returning the $fp offset at
which to write it.
"""
old_tmp_offset = self.tmp_offset
self.tmp_offset = (
types.align_address(self.tmp_offset - size, alignment,
types.Alignment.Down))
size_used = old_tmp_offset - self.tmp_offset
self.total_tmp_size += size_used
root._expand_stack(size_used)
return self.tmp_offset
def get_temp_context(self):
"""
Creates a temporary context, which starts at this temporary context.
"""
return TemporaryContext(self.tmp_offset)
return TemporaryContext(self.local_offset)
def expand_locals(self):
"""
Makes enough space for the local variables on the stack.
"""
self._expand_stack(self.locals_size())
def cleanup_locals(self):
"""
Cleans up the space used by the local variables on the stack.
"""
self._shrink_stack(self.locals_size())
def locals_size(self):
"""
Gets the size used by all the locals.
"""
return abs(self.local_offset) - abs(self._starting_locals_offset())
def __getitem__(self, name):
"""
Gets the offset to the variable on the stack, or a Register (if the
name was bound to one of the first four parameters)
"""
return self.vars[name]
class VerificationContext:
"""
Used to record all values and types defined all at once (i.e. inside the
same declaration block), so that they can be verified all at once.
"Verification" here means that their types are checked to be valid, which
means different things for different types.
"""
def __init__(self):
self.types = []
self.values = []
def add_value(self, name):
"""
Registers a new value to be verified.
"""
self.values.append(name)
def add_type(self, name):
"""
Registers a new type to be defined.
"""
self.types.append(types)
def verify(self, backend):
"""
Verifies all the definitions against the backend.
"""
backend._check_valid_types(backend.ctx_types[name] for name in self.types)
backend._check_valid_types(backend.ctx_values[name] for name in self.values)
class ContextMixin:
"""
Manages the symbol table contexts for this backend (as well as its function stack
Depends upon the user of this mixin to inherit from BaseBackend in
addition to this one.
"""
def __init__(self):
self.parent_contexts = []
self.current_context = NameContext(symbols.Context(), None)
self.verify_context = VerificationContext()
def _register_file_ns(self, namespace):
"""
Replaces the current context, with one where the symbol context is
expanded to contain the file's namespace.
"""
file_context = self.current_context.symbol_ctx.register(namespace)
self.current_context = self.current_context._replace(symbol_ctx=file_context)
@property
def ctx_namespace(self):
"""
Gets the current namespace
"""
return self.current_context.symbol_ctx.search_path[0]
@property
def ctx_values(self):
"""
Returns the current context's value symbols.
"""
return self.current_context.symbol_ctx.values
@property
def ctx_types(self):
"""
Returns the current context's type symbols.
"""
return self.current_context.symbol_ctx.types
@property
def ctx_stack(self):
"""
Returns the current context's stack information.
"""
return self.current_context.func_stack
def _value_is_defined(self, name):
"""
Returns True if the given variable is defined in the current scope, or
False otherwise.
This is for the static expression processor function, var-def?
"""
return (name in self.ctx_values and
self.ctx_values.is_visible(name))
def _type_is_defined(self, name):
"""
Returns True if the given type is defined in the current scope, or
False otherwise.
This is for the static expression processor function, var-def?
"""
return (name in self.ctx_types and
self.ctx_types.is_visible(name))
def _make_func_stack(self):
raise NotImplementedError
def _push_context(self):
"""
Pushes a new binding context.
"""
old_context = self.current_context
self.parent_contexts.append(old_context)
self.current_context = NameContext(
self.current_context.symbol_ctx.enter(),
self._make_func_stack())
def _pop_context(self):
"""
Loads the previous binding context.
"""
self.current_context = self.parent_contexts.pop()
def _resolve_if_type_name(self, name):
"""
Resolves a type name into a concrete type.
"""
try:
return types.resolve_name(name, self.ctx_types)
except PermissionError as exn:
self.error(self.line, self.col,
'Cannot resolve hidden type "{}"', str(exn))
except RecursionError:
self.error(self.line, self.col,
'Type aliases too deep, when resolving "{}"', name)
except KeyError as exn:
self.error(self.line, self.col,
'Invalid type "{}"', str(exn))
def _verify_types(self):
"""
Verifies all the types across all this current context's symbols.
"""
self.verify_context.verify(self)
self.verify_context = VerificationContext()
class ThirtyTwoMixin:
"""
Defines some information about type sizes and alignment which 32-bit
platforms have in common.
Depends upon the user of this mixin to inherit from ContextMixin.
"""
def _type_alignment(self, type_obj):
"""
Returns alignment of the given type (1 for byte, 4 for word, etc.)
"""
type_obj = self._resolve_if_type_name(type_obj)
if type_obj is types.Integer:
return 4
elif type_obj is types.Byte:
return 1
elif isinstance(type_obj, (types.PointerTo, types.FunctionPointer)):
return 4
elif isinstance(type_obj, types.ArrayOf):
return self._type_alignment(type_obj.type)
elif isinstance(type_obj, types.Struct):
# The alignment only concerns the first element of the struct -
# the struct's internal alignment doesn't come into play
#
# Also, an OrderdDict's fields are not iterable, for whatever reason
struct_types = list(type_obj.fields.values())
return self._type_alignment(struct_types[0])
else:
raise TypeError('Not a compiler type: {}'.format(type_obj))
def _type_size(self, type_obj, depth=0):
"""
Returns the size of a type object in bytes.
"""
MAX_DEPTH = 100
if depth >= MAX_DEPTH:
self.error(self.line, self.col,
"Type nested too deeply - potential self-referential type")
type_obj = self._resolve_if_type_name(type_obj)
if type_obj is types.Integer:
return 4
elif type_obj is types.Byte:
return 1
elif isinstance(type_obj, (types.PointerTo, types.FunctionPointer)):
return 4
elif isinstance(type_obj, types.ArrayOf):
# To avoid wasting space on the last element, this pads all the
# elements but the last
base_size = self._type_size(type_obj.type, depth + 1)
return self._array_offset(type_obj, type_obj.count - 1) + base_size
elif isinstance(type_obj, types.Struct):
last_field = list(type_obj.fields)[-1]
last_field_type = type_obj.fields[last_field]
last_field_offset = self._field_offset(type_obj, last_field)
return last_field_offset + self._type_size(last_field_type, depth + 1)
else:
raise TypeError('Not a compiler type: {}'.format(type_obj))
class comment_after:
"""
Wraps a method - after the method executes, something is written to
the log.
"""
def __init__(self, fmt, *args, **kwargs):
self.fmt = fmt
self.args = args
self.kwargs = kwargs
def __call__(self, func):
def wrapper(parent, *args, **kwargs):
x = func(parent, *args, **kwargs)
parent._write_comment(self.fmt, *self.args, **self.kwargs)
return x
return wrapper
| 34.2625 | 93 | 0.623921 | 2,060 | 16,446 | 4.786893 | 0.195146 | 0.018457 | 0.021904 | 0.012778 | 0.366596 | 0.308488 | 0.229186 | 0.151303 | 0.137511 | 0.112767 | 0 | 0.004124 | 0.292229 | 16,446 | 479 | 94 | 34.334029 | 0.843041 | 0.310167 | 0 | 0.229358 | 0 | 0 | 0.040722 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.211009 | false | 0.004587 | 0.018349 | 0 | 0.40367 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f45d781494a8e177d3301348e5cd3f98b7503c8a | 1,925 | py | Python | 8/8_9.py | kopsh/python_cookbook | 298c092cd20404a0755e2170776c44a04e8648ad | [
"CNRI-Python"
] | null | null | null | 8/8_9.py | kopsh/python_cookbook | 298c092cd20404a0755e2170776c44a04e8648ad | [
"CNRI-Python"
] | null | null | null | 8/8_9.py | kopsh/python_cookbook | 298c092cd20404a0755e2170776c44a04e8648ad | [
"CNRI-Python"
] | null | null | null | class CheckType:
r"""
8.9 创建新的类或实例属性
使用描述器,实现参数类型检查
>>> @ParamAssert(a=int, b=list)
... class A:
... def __init__(self, a, b):
... self.a = a
... self.b = b
>>> a = A(1, [])
"""
def __init__(self, name, expected_type):
self.name = name
self.expected_type = expected_type
def __get__(self, instance, owner):
if instance is None:
return self
else:
return instance.__dict__[self.name]
def __set__(self, instance, value):
if not isinstance(value, self.expected_type):
raise TypeError("{} cannot be assigned by {!r}, it`s type is {!r}".format(self.name, value,
self.expected_type))
instance.__dict__[self.name] = value
class ParamAssert:
def __init__(self, **kwargs):
self.kwargs = kwargs
def __call__(self, cls):
for name, expected_type in self.kwargs.items():
setattr(cls, name, CheckType(name, expected_type))
return cls
class Integer:
def __init__(self, name):
self.name = name
def __get__(self, instance, cls):
if instance is None:
return self
else:
return instance.__dict__.get(self.name, None)
def __set__(self, instance, value):
if not isinstance(value, int):
raise TypeError("{} cannot be assigned by {!r}".format(self.name, value))
instance.__dict__[self.name] = value
class Point:
"""
>>> p = Point(0, 0)
>>> print(p.x)
0
>>> p.y = "1"
Traceback (most recent call last):
...
TypeError: y cannot be assigned by '1'
"""
x = Integer('x')
y = Integer('y')
def __init__(self, x, y):
self.x = x
self.y = y
if __name__ == '__main__':
import doctest
doctest.testmod() | 25.666667 | 106 | 0.535584 | 229 | 1,925 | 4.19214 | 0.279476 | 0.083333 | 0.057292 | 0.0625 | 0.361458 | 0.320833 | 0.258333 | 0.189583 | 0.189583 | 0.1 | 0 | 0.006284 | 0.338701 | 1,925 | 75 | 107 | 25.666667 | 0.74784 | 0.156364 | 0 | 0.27907 | 0 | 0 | 0.05642 | 0 | 0 | 0 | 0 | 0 | 0.023256 | 1 | 0.209302 | false | 0 | 0.023256 | 0 | 0.488372 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f45ec536c2f2748641c051d8785db2394218cb3f | 4,264 | py | Python | samples/RiskManagement/Verification/customer-match-denied-parties-list.py | snavinch/cybersource-rest-samples-python | adb7a6b4b55dff6ac833295192d6677b53003c16 | [
"MIT"
] | 21 | 2019-01-22T17:48:32.000Z | 2022-02-07T17:40:58.000Z | samples/RiskManagement/Verification/customer-match-denied-parties-list.py | broadpay/cybersource-rest-samples-python | f7af6f58c70ea3bf725d34929b40ee4b5fd4d77c | [
"MIT"
] | 10 | 2018-12-03T22:45:17.000Z | 2021-04-19T20:40:14.000Z | samples/RiskManagement/Verification/customer-match-denied-parties-list.py | broadpay/cybersource-rest-samples-python | f7af6f58c70ea3bf725d34929b40ee4b5fd4d77c | [
"MIT"
] | 29 | 2018-11-09T11:44:53.000Z | 2022-03-18T08:56:46.000Z | from CyberSource import *
import os
import json
from importlib.machinery import SourceFileLoader
config_file = os.path.join(os.getcwd(), "data", "Configuration.py")
configuration = SourceFileLoader("module.name", config_file).load_module()
# To delete None values in Input Request Json body
def del_none(d):
for key, value in list(d.items()):
if value is None:
del d[key]
elif isinstance(value, dict):
del_none(value)
return d
def customer_match_denied_parties_list():
clientReferenceInformationCode = "verification example"
clientReferenceInformationComments = "Export-basic"
clientReferenceInformationPartnerDeveloperId = "7891234"
clientReferenceInformationPartnerSolutionId = "89012345"
clientReferenceInformationPartner = Riskv1decisionsClientReferenceInformationPartner(
developer_id = clientReferenceInformationPartnerDeveloperId,
solution_id = clientReferenceInformationPartnerSolutionId
)
clientReferenceInformation = Riskv1decisionsClientReferenceInformation(
code = clientReferenceInformationCode,
comments = clientReferenceInformationComments,
partner = clientReferenceInformationPartner.__dict__
)
orderInformationBillToAddress1 = "901 Metro Centre Blvd"
orderInformationBillToAdministrativeArea = "CA"
orderInformationBillToCountry = "US"
orderInformationBillToLocality = "Foster City"
orderInformationBillToPostalCode = "94404"
orderInformationBillToCompanyName = "A & C International Trade, Inc"
orderInformationBillToCompany = Riskv1exportcomplianceinquiriesOrderInformationBillToCompany(
name = orderInformationBillToCompanyName
)
orderInformationBillToFirstName = "ANDREE"
orderInformationBillToLastName = "AGNESE"
orderInformationBillToEmail = "test@domain.com"
orderInformationBillTo = Riskv1exportcomplianceinquiriesOrderInformationBillTo(
address1 = orderInformationBillToAddress1,
administrative_area = orderInformationBillToAdministrativeArea,
country = orderInformationBillToCountry,
locality = orderInformationBillToLocality,
postal_code = orderInformationBillToPostalCode,
company = orderInformationBillToCompany.__dict__,
first_name = orderInformationBillToFirstName,
last_name = orderInformationBillToLastName,
email = orderInformationBillToEmail
)
orderInformationShipToCountry = "IN"
orderInformationShipToFirstName = "DumbelDore"
orderInformationShipToLastName = "Albus"
orderInformationShipTo = Riskv1exportcomplianceinquiriesOrderInformationShipTo(
country = orderInformationShipToCountry,
first_name = orderInformationShipToFirstName,
last_name = orderInformationShipToLastName
)
orderInformationLineItems = []
orderInformationLineItems1 = Riskv1exportcomplianceinquiriesOrderInformationLineItems(
unit_price = "120.50",
quantity = 3,
product_sku = "123456",
product_name = "Qwe",
product_code = "physical_software"
)
orderInformationLineItems.append(orderInformationLineItems1.__dict__)
orderInformation = Riskv1exportcomplianceinquiriesOrderInformation(
bill_to = orderInformationBillTo.__dict__,
ship_to = orderInformationShipTo.__dict__,
line_items = orderInformationLineItems
)
requestObj = ValidateExportComplianceRequest(
client_reference_information = clientReferenceInformation.__dict__,
order_information = orderInformation.__dict__
)
requestObj = del_none(requestObj.__dict__)
requestObj = json.dumps(requestObj)
try:
config_obj = configuration.Configuration()
client_config = config_obj.get_configuration()
api_instance = VerificationApi(client_config)
return_data, status, body = api_instance.validate_export_compliance(requestObj)
print("\nAPI RESPONSE CODE : ", status)
print("\nAPI RESPONSE BODY : ", body)
return return_data
except Exception as e:
print("\nException when calling VerificationApi->validate_export_compliance: %s\n" % e)
if __name__ == "__main__":
customer_match_denied_parties_list()
| 38.414414 | 97 | 0.754221 | 290 | 4,264 | 10.793103 | 0.565517 | 0.006709 | 0.012141 | 0.016613 | 0.019169 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013537 | 0.185741 | 4,264 | 110 | 98 | 38.763636 | 0.887961 | 0.011257 | 0 | 0 | 0 | 0 | 0.083294 | 0.010441 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022222 | false | 0 | 0.044444 | 0 | 0.088889 | 0.033333 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f45faefa310c1d7891d6abffc0a5f0a804569172 | 219 | py | Python | run.py | aarvanitii/adminWebsite | cf9a07c287571ebbc9954326806b578f6d19a11b | [
"MIT"
] | null | null | null | run.py | aarvanitii/adminWebsite | cf9a07c287571ebbc9954326806b578f6d19a11b | [
"MIT"
] | null | null | null | run.py | aarvanitii/adminWebsite | cf9a07c287571ebbc9954326806b578f6d19a11b | [
"MIT"
] | null | null | null | """
This is where the web application starts running
"""
from app.index import create_app
app = create_app()
if __name__ == "__main__":
app.secret_key = 'mysecret'
app.run(port=8080, host="0.0.0.0", debug=True) | 24.333333 | 50 | 0.694064 | 35 | 219 | 4.028571 | 0.742857 | 0.042553 | 0.042553 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043478 | 0.159817 | 219 | 9 | 50 | 24.333333 | 0.722826 | 0.219178 | 0 | 0 | 0 | 0 | 0.140244 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
f46ac6dc3031a12623e226f71b58aeded4ff617c | 440 | py | Python | config/api_urls.py | elcolie/battleship | 71b0a963c5b24ae243a193749813fec321d5f4d8 | [
"MIT"
] | null | null | null | config/api_urls.py | elcolie/battleship | 71b0a963c5b24ae243a193749813fec321d5f4d8 | [
"MIT"
] | 3 | 2018-04-22T04:40:25.000Z | 2020-06-05T19:10:08.000Z | config/api_urls.py | elcolie/battleship | 71b0a963c5b24ae243a193749813fec321d5f4d8 | [
"MIT"
] | null | null | null | from rest_framework import routers
from boards.api.viewsets import BoardViewSet
from fleets.api.viewsets import FleetViewSet
from missiles.api.viewsets import MissileViewSet
app_name = 'api'
router = routers.DefaultRouter()
router.register(r'boards', BoardViewSet, base_name='board')
router.register(r'fleets', FleetViewSet, base_name='fleet')
router.register(r'missiles', MissileViewSet, base_name='missile')
urlpatterns = router.urls
| 29.333333 | 65 | 0.811364 | 56 | 440 | 6.285714 | 0.446429 | 0.09375 | 0.144886 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086364 | 440 | 14 | 66 | 31.428571 | 0.875622 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
f47301fb50cbf2affb241d7c61d027660a0014ae | 24,598 | py | Python | messenger/client/messenger.py | marik348/python-messenger | 6c1916b0df439cd997cb6e9376221fe587c3f1c1 | [
"MIT"
] | 2 | 2021-05-24T08:44:51.000Z | 2022-03-17T10:41:48.000Z | messenger/client/messenger.py | marik348/python-messenger | 6c1916b0df439cd997cb6e9376221fe587c3f1c1 | [
"MIT"
] | 1 | 2020-11-28T12:08:25.000Z | 2020-11-28T12:08:25.000Z | messenger/client/messenger.py | marik348/python-messegner | 6c1916b0df439cd997cb6e9376221fe587c3f1c1 | [
"MIT"
] | 1 | 2021-05-24T08:50:42.000Z | 2021-05-24T08:50:42.000Z | from requests import get, post, exceptions
from datetime import datetime
from PyQt5 import QtWidgets, QtCore
from PyQt5.QtWidgets import QMessageBox
from PyQt5.QtGui import QFont
from qtwidgets import PasswordEdit
from client_commands import (help_client, online, status, myself, reg, role, ban, unban)
from client_content import (get_warning_messages, get_client_commands, get_message_box_text, get_message_style)
from click_label import clickable
from client_ui import Ui_Messenger
from preferences import Preferences
from style_sheet import load_stylesheet
class Messenger(QtWidgets.QMainWindow, Ui_Messenger):
"""
The messenger object acts as the main object and is managed by client.
Shows UI and is responsible for UX.
UI is separated on 3 main parts, which have their indexes: 0 - Login form, 1 - Registration form, 2 - Chat.
Every 5 seconds requests server status.
Every second shows new messages, if user logged in.
Under main label "Python Messenger" there is server status, which displays whether server is working,
if yes, you can hover on it to see full server status.
In case of disconnection from server it'll show server-off message and navigate to login form.
It's possible to change server IP address in preferences menu.
:param translate: properly shows all content
:param password_line1: input line with icons to show/hide password entries on login form
:param password_line2: input line with icons to show/hide password entries on registration form
:param username: user nickname string
:param password: user password string
:param last_message_time: last time of getting messages, defaults to 0
:param max_text_len: maximum text message length to send in chat, defaults to 250
:param server_IP: server IPv4 string
:param message_style: style for messages defined in :func:`get_message_style`
:param warning_messages: dict of warning messages defined in :func:`get_warning_messages`
:param message_box_text: dict of content for message box defined in :func:`get_message_box_text`
:param client_commands: list of dicts with client-side commands defined in :func:`get_client_commands`
:param run_client_command: dict, where key is the name of client command and value is the function of this command
:param server_commands: list of dicts with server-side commands defined in :func:`get_server_commands`
:param run_server_command: dict, where key is the name of server command and value is the function of this command
:param timer_get_messages: timer, which every second runs :func:`get_messages`
:param timer_get_status: timer, which every 5 seconds runs :func:`get_status`
"""
def __init__(self, parent=None):
"""Initialize messenger object."""
super().__init__(parent)
self.setupUi(self)
self.translate = QtCore.QCoreApplication.translate
self.password_line1 = PasswordEdit(True, self.login_page)
self.password_line2 = PasswordEdit(True, self.registration_page)
self.modify_password_lines()
# Connect buttons to the methods.
self.send_button.pressed.connect(self.send)
self.sign_up_button.pressed.connect(self.sign_up_user)
self.login_button.pressed.connect(self.login_user)
# Connect actions to the methods.
self.action_shortcuts.triggered.connect(self.show_shortcuts_box)
self.action_commands.triggered.connect(self.show_commands_box)
self.action_about.triggered.connect(self.show_about_box)
self.action_contacts.triggered.connect(self.show_contacts_box)
self.action_preferences.triggered.connect(self.open_preferences_window)
self.action_logout.triggered.connect(self.logout)
self.action_close.triggered.connect(self.close)
# Filter shortcuts and text overflow.
self.plain_text_edit.installEventFilter(self)
self.username = None
self.password = None
self.last_message_time = 0
self.max_text_len = 250
self.server_IP = '0.0.0.0:9000'
# Load client content.
self.message_style = get_message_style()
self.warning_messages = get_warning_messages()
self.message_box_text = get_message_box_text()
# Load commands.
self.client_commands = get_client_commands()
self.run_client_command = {'close': self.close,
'logout': self.logout,
'reload': self.reload}
self.server_commands = []
self.run_server_command = {}
self.timer_get_messages = QtCore.QTimer()
self.timer_get_messages.timeout.connect(self.get_messages)
self.timer_get_messages.start(1000)
self.timer_get_status = QtCore.QTimer()
self.timer_get_status.timeout.connect(self.get_status)
self.timer_get_status.start(5000)
clickable(self.go_to_sign_up).connect(self.go_to_registration_form)
clickable(self.go_to_login).connect(self.go_to_login_form)
self.get_status()
def eventFilter(self, obj, event):
"""
Filters Enter key press and message text length.
If Enter key pressed, sends user's message.
If length of message is above maximum, doesn't allow writing.
"""
if event.type() == QtCore.QEvent.KeyPress and obj is self.plain_text_edit:
text = self.plain_text_edit.toPlainText()
if event.key() == QtCore.Qt.Key_Return and self.plain_text_edit.hasFocus():
self.send()
return True
elif len(text) > self.max_text_len:
text = text[:self.max_text_len]
self.plain_text_edit.setPlainText(text)
cursor = self.plain_text_edit.textCursor()
cursor.setPosition(self.max_text_len)
self.plain_text_edit.setTextCursor(cursor)
return True
return super().eventFilter(obj, event)
def closeEvent(self, event):
"""
Shows question message box for acception or ignoring to close the messenger.
Asks user does he really wants to close the messenger, if yes,
than marks logout of user and closes the messenger.
Otherwise, ignores closing messenger event.
:param event: event to close the messenger
"""
reply = QMessageBox.question(self, 'Quit', self.message_box_text["close"],
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
# User closes the messenger and is logged in.
if reply == QMessageBox.Yes and self.stacked_widget.currentIndex() == 2:
try:
post(
f'http://{self.server_IP}/logout',
json={"username": self.username}, verify=False
)
except exceptions.RequestException as e:
raise SystemExit
event.accept()
# User closes the messenger and is logged out.
elif reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
def logout(self):
"""
Shows question message box for acception or ignoring to log out from account.
Asks user does he really wants to log out, if yes,
than marks logout and navigates to login form.
Otherwise, ignores logout event.
"""
reply = QMessageBox.question(self, 'Logout', self.message_box_text["logout"],
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
if reply == QMessageBox.Yes:
try:
post(
f'http://{self.server_IP}/logout',
json={"username": self.username}, verify=False
)
except exceptions.RequestException as e:
self.show_server_off_box()
self.clear_user_data()
return
self.go_to_login_form()
self.clear_user_data()
self.action_logout.setEnabled(False)
self.action_commands.setEnabled(False)
self.action_preferences.setEnabled(True)
else:
return
def modify_password_lines(self):
"""Modifies and appears password lines."""
geometry = QtCore.QRect(60, 200, 291, 41)
font = QFont()
font.setPointSize(14)
self.password_line1.setGeometry(geometry)
self.password_line1.setFont(font)
self.password_line1.setEchoMode(QtWidgets.QLineEdit.Password)
self.password_line1.setObjectName("password_line1")
self.password_line1.setPlaceholderText(self.translate("Messenger", "Password"))
self.password_line2.setGeometry(geometry)
self.password_line2.setFont(font)
self.password_line2.setEchoMode(QtWidgets.QLineEdit.Password)
self.password_line2.setObjectName("password_line2")
self.password_line2.setPlaceholderText(self.translate("Messenger", "Enter Your Password"))
def open_preferences_window(self):
"""Opens settings window."""
settings = Preferences(self)
if settings.exec():
self.server_IP = settings.server_IP.text()
def clear_user_data(self):
"""Clears user data after logout."""
self.username = None
self.plain_text_edit.clear()
self.text_browser.clear()
self.last_message_time = 0
def reload(self):
"""Reloads all messages and deletes commands output."""
self.text_browser.clear()
self.last_message_time = 0
def go_to_registration_form(self):
"""Navigates to registration menu."""
self.stacked_widget.setCurrentIndex(1)
def go_to_login_form(self):
"""Navigates to login menu."""
self.stacked_widget.setCurrentIndex(0)
def go_to_chat(self):
"""Navigates to chat."""
self.get_server_commands()
self.stacked_widget.setCurrentIndex(2)
self.action_logout.setEnabled(True)
self.action_commands.setEnabled(True)
self.action_preferences.setEnabled(False)
self.plain_text_edit.setFocus()
self.clear_credentials()
def clear_credentials(self):
"""Clears login and password lines after log in or sign up."""
self.password_line1.clear()
self.login_line1.clear()
self.password_line2.clear()
self.login_line2.clear()
self.password = None
def show_about_box(self):
"""Shows message box with content about messenger."""
QMessageBox.information(self, 'About', self.message_box_text["about"])
def show_contacts_box(self):
"""Shows message box with contacts information."""
QMessageBox.information(self, 'Contacts', self.message_box_text["contacts"])
def show_server_off_box(self):
"""Shows message box about server off information."""
QMessageBox.critical(self, 'Opsss...', self.message_box_text["server_is_off"])
self.go_to_login_form()
def show_shortcuts_box(self):
"""Shows message box with shortcuts."""
QMessageBox.information(self, 'Shortcuts', self.message_box_text["shortcuts"])
def show_commands_box(self):
"""Shows message box with available commands."""
output = help_client(self.client_commands, self.server_commands, [])
output = output.replace('=', '')
QMessageBox.information(self, 'Commands', output)
def sign_up_user(self):
"""
Registers user.
Verifies correctness of login and password input.
Sends request to sign up user.
"""
# Clear registration form.
self.login_error2.setText(self.translate("Messenger", self.warning_messages['empty_str']))
self.password_error2.setText(self.translate("Messenger", self.warning_messages['empty_str']))
self.login_line2.setStyleSheet("border: 1px solid #B8B5B2")
self.password_line2.setStyleSheet("border: 1px solid #B8B5B2")
self.username = self.login_line2.text()
self.password = self.password_line2.text()
# Check that form isn't empty.
if not self.username:
if not self.password:
self.login_error2.setText(self.translate("Messenger", self.warning_messages['login_required']))
self.password_error2.setText(self.translate("Messenger", self.warning_messages['password_required']))
self.login_line2.setStyleSheet("border: 1px solid red")
self.password_line2.setStyleSheet("border: 1px solid red")
return
else:
self.login_error2.setText(self.translate("Messenger", self.warning_messages['login_required']))
self.login_line2.setStyleSheet("border: 1px solid red")
return
else:
if not self.password:
self.password_error2.setText(self.translate("Messenger", self.warning_messages['password_required']))
self.password_line2.setStyleSheet("border: 1px solid red")
return
if not self.username.isalnum():
self.login_error2.setText(self.translate("Messenger", self.warning_messages['not_alphanumeric']))
self.login_error2.adjustSize()
self.login_line2.setStyleSheet("border: 1px solid red")
return
try:
response = post(
f'http://{self.server_IP}/sign_up',
auth=(self.username, self.password),
verify=False
)
except exceptions.RequestException as e:
self.show_server_off_box()
self.clear_credentials()
return
# Process bad request.
if response.json()['login_out_of_range']:
self.login_error2.setText(self.translate("Messenger", self.warning_messages['login_out_of_range']))
self.login_error2.adjustSize()
self.login_line2.setStyleSheet("border: 1px solid red")
return
elif response.json()['password_out_of_range']:
self.password_error2.setText(self.translate("Messenger", self.warning_messages['password_out_of_range']))
self.password_error2.adjustSize()
self.password_line2.setStyleSheet("border: 1px solid red")
return
elif not response.json()['ok']:
self.login_error2.setText(self.translate("Messenger", self.warning_messages['registered']))
self.login_error2.adjustSize()
self.login_line2.setStyleSheet("border: 1px solid red")
return
self.go_to_chat()
def login_user(self):
"""
Allows user to log in.
Verifies correctness of login and password input.
Sends request to authenticate user.
"""
# Clear login form.
self.login_error1.setText(self.translate("Messenger", self.warning_messages['empty_str']))
self.password_error1.setText(self.translate("Messenger", self.warning_messages['empty_str']))
self.login_line1.setStyleSheet("border: 1px solid #B8B5B2")
self.password_line1.setStyleSheet("border: 1px solid #B8B5B2")
self.username = self.login_line1.text()
self.password = self.password_line1.text()
# Check that form isn't empty.
if not self.username:
if not self.password:
self.login_error1.setText(self.translate("Messenger", self.warning_messages['login_required']))
self.password_error1.setText(self.translate("Messenger", self.warning_messages['password_required']))
self.login_line1.setStyleSheet("border: 1px solid red")
self.password_line1.setStyleSheet("border: 1px solid red")
return
else:
self.login_error1.setText(self.translate("Messenger", self.warning_messages['login_required']))
self.login_line1.setStyleSheet("border: 1px solid red")
return
else:
if not self.password:
self.password_error1.setText(self.translate("Messenger", self.warning_messages['password_required']))
self.password_line1.setStyleSheet("border: 1px solid red")
return
try:
response = post(
f'http://{self.server_IP}/auth',
auth=(self.username, self.password),
verify=False
)
except exceptions.RequestException as e:
self.show_server_off_box()
self.clear_credentials()
return
# Process bad request.
if not response.json()['exist']:
self.login_error1.setText(self.translate("Messenger", self.warning_messages['invalid_login']))
self.login_line1.setStyleSheet("border: 1px solid red")
return
if not response.json()['match']:
self.password_error1.setText(self.translate("Messenger", self.warning_messages['invalid_password']))
self.password_line1.setStyleSheet("border: 1px solid red")
return
if response.json()['banned']:
self.login_error1.setText(self.translate("Messenger", self.warning_messages['banned']))
self.login_line1.setStyleSheet("border: 1px solid red")
return
self.go_to_chat()
def get_server_commands(self):
"""Sends request to get available server-side commands for user."""
try:
response = post(
f'http://{self.server_IP}/command',
json={"username": self.username, "command": 'help'}, verify=False
)
except exceptions.RequestException as e:
self.clear_user_data()
self.show_server_off_box()
return
if not response.json()['ok']:
self.show_text(response.json()['output'] + "<br>")
self.plain_text_edit.clear()
return
self.server_commands = response.json()['output']
# Connect command name with function.
for cmd in self.server_commands:
if cmd['name'] != 'help': self.run_server_command[f"{cmd['name']}"] = globals()[cmd['name']]
def send(self):
"""Separates and directs messages & commands to relevant function."""
self.plain_text_edit.setFocus()
text = self.plain_text_edit.toPlainText()
text = text.strip()
# Validate text don't execute HTML.
text = text.replace('</', '')
text = text.replace('<', '')
text = text.replace('>', '')
if len(text) > self.max_text_len:
text = text[:self.max_text_len]
if not text:
return
elif text.startswith('/'):
self.send_command(text[1:])
else:
self.send_message(text)
def send_message(self, text):
"""
Stores message on the server.
:param text: text of message
"""
try:
post(
f'http://{self.server_IP}/send_message',
json={"username": self.username, "text": text},
verify=False
)
except exceptions.RequestException as e:
self.clear_user_data()
self.show_server_off_box()
return
self.plain_text_edit.clear()
self.plain_text_edit.repaint()
def send_command(self, cmd_string):
"""
Executes command.
If it's client-side command, executes directly from client.
If it's server-side command, sends command to execute
on the server and processes the output.
:param cmd_string: command with parameters to execute
"""
command = cmd_string.split()[0]
args = cmd_string.split()[1:] if len(cmd_string) > 1 else None
# Run client-side command.
if command in [cmd['name'] for cmd in self.client_commands]:
self.run_client_command.get(command)()
self.plain_text_edit.clear()
return
# Invalid command name.
elif command not in [cmd['name'] for cmd in self.server_commands]:
self.show_text(f"<b>Error:</b> Command '/{command}' not found.<br>"
f"Try '/help' to list all available commands :)<br>")
self.plain_text_edit.clear()
return
# Process 'help' command.
elif command == 'help':
output = help_client(self.client_commands, self.server_commands, args)
self.show_text(output)
self.plain_text_edit.clear()
return
try:
response = post(
f'http://{self.server_IP}/command',
json={"username": self.username, "command": cmd_string}, verify=False
)
except exceptions.RequestException as e:
self.clear_user_data()
self.show_server_off_box()
return
if not response.json()['ok']:
self.show_text("<b>Error:</b> " + response.json()['output'] + "<br>")
self.plain_text_edit.clear()
return
# Assign command function & run it with output from server.
run_command = self.run_server_command.get(command)
output = run_command(response.json()['output'], args)
self.show_text(output)
self.plain_text_edit.clear()
self.plain_text_edit.repaint()
def get_messages(self):
"""Sends request to get new messages and appears them in style."""
if not self.stacked_widget.currentIndex() == 2:
return
try:
response = get(
f'http://{self.server_IP}/get_messages',
params={'after': self.last_message_time},
verify=False
)
data = response.json()
except exceptions.RequestException as e:
self.clear_user_data()
self.show_server_off_box()
return
# Generate message.
for message in data['messages']:
# float -> datetime.
beauty_time = datetime.fromtimestamp(message['time'])
beauty_time = beauty_time.strftime('%d/%m %H:%M:%S')
# User will see his messages from the right side.
if message['username'] == self.username:
self.show_text(self.message_style['begin'] + beauty_time + ' ' + message['username']
+ self.message_style['middle'] + message['text'] + self.message_style['end'])
self.last_message_time = message['time']
else:
self.show_text(message['username'] + ' ' + beauty_time)
self.show_text(message['text'] + "<br>")
self.last_message_time = message['time']
def get_status(self):
"""Sends request to get server status."""
try:
response = get(
f'http://{self.server_IP}/status',
verify=False
)
status = response.json()
# Server is off.
except exceptions.RequestException as e:
self.server_status.setText(self.translate("Messenger", '<p style="font-size:12px">'
'<img src="images/server-is-off.png"> Offline</p>'))
tool_tip = f"Can't connect to the server<br>" \
f"Maybe server isn't run or you've entered an invalid IP address in Preferences"
self.server_status.setToolTip(tool_tip)
return
# Server is on.
self.server_status.setText(self.translate("Messenger", '<p style="font-size:12px">'
'<img src="images/server-is-on.png"> Online</p>'))
tool_tip = f"Server is working<br>" \
f"Users online: {status['users_online']}<br>" \
f"Date and time: {status['time']}<br>" \
f"Registered users: {status['users_count']}<br>" \
f"Written messages: {status['messages_count']}"
self.server_status.setToolTip(tool_tip)
def show_text(self, text):
"""Shows given text in messenger chat."""
self.text_browser.append(text)
self.text_browser.repaint()
app = QtWidgets.QApplication([])
window = Messenger()
app.setStyleSheet(load_stylesheet())
window.show()
app.exec_()
| 38.982567 | 120 | 0.622693 | 2,858 | 24,598 | 5.187544 | 0.138209 | 0.034804 | 0.034129 | 0.041076 | 0.462701 | 0.415756 | 0.361122 | 0.334345 | 0.32153 | 0.271887 | 0 | 0.008887 | 0.277218 | 24,598 | 630 | 121 | 39.044444 | 0.825018 | 0.186275 | 0 | 0.429293 | 0 | 0 | 0.112184 | 0.011809 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065657 | false | 0.113636 | 0.030303 | 0 | 0.179293 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
f482d9773506167246440d9307b62395f61caa1a | 2,353 | py | Python | ais3-pre-exam-2022-writeup/Misc/JeetQode/chall/problems/astmath.py | Jimmy01240397/balsn-2021-writeup | 91b71dfbddc1c214552280b12979a82ee1c3cb7e | [
"MIT"
] | null | null | null | ais3-pre-exam-2022-writeup/Misc/JeetQode/chall/problems/astmath.py | Jimmy01240397/balsn-2021-writeup | 91b71dfbddc1c214552280b12979a82ee1c3cb7e | [
"MIT"
] | null | null | null | ais3-pre-exam-2022-writeup/Misc/JeetQode/chall/problems/astmath.py | Jimmy01240397/balsn-2021-writeup | 91b71dfbddc1c214552280b12979a82ee1c3cb7e | [
"MIT"
] | null | null | null | from problem import Problem
from typing import Any, Tuple
from random import randint
import ast
import json
def gen_num():
return str(randint(1, 9))
def gen_op():
return "+-*/"[randint(0, 3)]
def gen_expr(depth):
if randint(0, depth) == 0:
l = gen_expr(depth + 1)
r = gen_expr(depth + 1)
op = gen_op()
return f"({l}{op}{r})"
return f"({gen_num()})"
class ASTMath(Problem):
@property
def name(self) -> str:
return "AST Math"
@property
def desciption(self) -> str:
return """
Input: An AST of Python's arithmetic expression (only +,-,*,/)
Output: Result number
Examples:
Input: {"body": {"left": {"value": 1, "kind": null, "lineno": 1, "col_offset": 0, "end_lineno": 1, "end_col_offset": 1}, "op": "<_ast.Add object at 0x7f0387ccde20>", "right": {"value": 2, "kind": null, "lineno": 1, "col_offset": 2, "end_lineno": 1, "end_col_offset": 3}, "lineno": 1, "col_offset": 0, "end_lineno": 1, "end_col_offset": 3}}
Output: 3
Input: {"body": {"left": {"left": {"value": 8, "kind": null, "lineno": 1, "col_offset": 1, "end_lineno": 1, "end_col_offset": 2}, "op": "<_ast.Mult object at 0x7f20eb76aee0>", "right": {"value": 7, "kind": null, "lineno": 1, "col_offset": 3, "end_lineno": 1, "end_col_offset": 4}, "lineno": 1, "col_offset": 1, "end_lineno": 1, "end_col_offset": 4}, "op": "<_ast.Sub object at 0x7f20eb76ae80>", "right": {"left": {"value": 6, "kind": null, "lineno": 1, "col_offset": 7, "end_lineno": 1, "end_col_offset": 8}, "op": "<_ast.Mult object at 0x7f20eb76aee0>", "right": {"value": 3, "kind": null, "lineno": 1, "col_offset": 9, "end_lineno": 1, "end_col_offset": 10}, "lineno": 1, "col_offset": 7, "end_lineno": 1, "end_col_offset": 10}, "lineno": 1, "col_offset": 0, "end_lineno": 1, "end_col_offset": 11}}
Output: 38
"""
@property
def rounds(self) -> int:
return 10
def dumps(self, x):
return json.dumps(
x, default=lambda x: x.__dict__ if len(x.__dict__) else str(x)
)
def generate_testcase(self) -> Tuple[bool, Any]:
l = gen_expr(1)
r = gen_expr(1)
op = gen_op()
expr = f"{l}{op}{r}"
try:
result = eval(expr)
except ZeroDivisionError:
return self.generate_testcase()
return ast.parse(expr, mode="eval"), result
| 37.349206 | 800 | 0.592435 | 344 | 2,353 | 3.866279 | 0.258721 | 0.105263 | 0.075188 | 0.120301 | 0.394737 | 0.394737 | 0.322556 | 0.286466 | 0.224812 | 0.224812 | 0 | 0.050674 | 0.211645 | 2,353 | 62 | 801 | 37.951613 | 0.666307 | 0 | 0 | 0.104167 | 0 | 0.041667 | 0.557161 | 0 | 0 | 0 | 0.023799 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.104167 | 0.125 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
be2e7ef040dc5a54cf6259bfaf5348f1c97d85ac | 2,061 | py | Python | prog_vae/prog_encoder/prog_encoder.py | Hanjun-Dai/sdvae | bd26ea949c496419634fd2cf4802fc8e19a9194c | [
"MIT"
] | 70 | 2018-02-24T07:50:59.000Z | 2021-12-27T02:42:37.000Z | prog_vae/prog_encoder/prog_encoder.py | Hanjun-Dai/sdvae | bd26ea949c496419634fd2cf4802fc8e19a9194c | [
"MIT"
] | 7 | 2018-05-31T00:50:19.000Z | 2021-09-28T11:58:22.000Z | prog_vae/prog_encoder/prog_encoder.py | Hanjun-Dai/sdvae | bd26ea949c496419634fd2cf4802fc8e19a9194c | [
"MIT"
] | 19 | 2019-01-11T10:56:00.000Z | 2022-03-23T23:09:39.000Z | #!/usr/bin/env python
from __future__ import print_function
import os
import sys
import csv
import numpy as np
import math
import random
from collections import defaultdict
import torch
from torch.autograd import Variable
from torch.nn.parameter import Parameter
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
sys.path.append( '%s/../prog_common' % os.path.dirname(os.path.realpath(__file__)) )
from prog_util import DECISION_DIM
from cmd_args import cmd_args
from pytorch_initializer import weights_init
sys.path.append( '%s/../cfg_parser' % os.path.dirname(os.path.realpath(__file__)) )
import cfg_parser as parser
class CNNEncoder(nn.Module):
def __init__(self, max_len, latent_dim):
super(CNNEncoder, self).__init__()
self.latent_dim = latent_dim
self.max_len = max_len
self.conv1 = nn.Conv1d(DECISION_DIM, cmd_args.c1, cmd_args.c1)
self.conv2 = nn.Conv1d(cmd_args.c1, cmd_args.c2, cmd_args.c2)
self.conv3 = nn.Conv1d(cmd_args.c2, cmd_args.c3, cmd_args.c3)
self.last_conv_size = max_len - cmd_args.c1 + 1 - cmd_args.c2 + 1 - cmd_args.c3 + 1
self.w1 = nn.Linear(self.last_conv_size * cmd_args.c3, cmd_args.dense)
self.mean_w = nn.Linear(cmd_args.dense, latent_dim)
self.log_var_w = nn.Linear(cmd_args.dense, latent_dim)
weights_init(self)
def forward(self, x_cpu):
if cmd_args.mode == 'cpu':
batch_input = Variable(torch.from_numpy(x_cpu))
else:
batch_input = Variable(torch.from_numpy(x_cpu).cuda())
h1 = self.conv1(batch_input)
h1 = F.relu(h1)
h2 = self.conv2(h1)
h2 = F.relu(h2)
h3 = self.conv3(h2)
h3 = F.relu(h3)
# h3 = torch.transpose(h3, 1, 2).contiguous()
flatten = h3.view(x_cpu.shape[0], -1)
h = self.w1(flatten)
h = F.relu(h)
z_mean = self.mean_w(h)
z_log_var = self.log_var_w(h)
return (z_mean, z_log_var)
if __name__ == '__main__':
pass
| 30.308824 | 91 | 0.6623 | 324 | 2,061 | 3.935185 | 0.305556 | 0.098824 | 0.028235 | 0.021961 | 0.216471 | 0.152157 | 0.152157 | 0.103529 | 0 | 0 | 0 | 0.027586 | 0.226104 | 2,061 | 67 | 92 | 30.761194 | 0.771787 | 0.031053 | 0 | 0 | 0 | 0 | 0.022055 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0.019231 | 0.346154 | 0 | 0.423077 | 0.019231 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
be357d6f3c1ddf5962bf29bb44f0430102e3f1c8 | 7,741 | py | Python | neutron_lbaas/drivers/driver_mixins.py | containers-kraken/neutron-lbaas | 43fbc34cc90512e33202bc4187ccf712dda6a782 | [
"Apache-2.0"
] | null | null | null | neutron_lbaas/drivers/driver_mixins.py | containers-kraken/neutron-lbaas | 43fbc34cc90512e33202bc4187ccf712dda6a782 | [
"Apache-2.0"
] | null | null | null | neutron_lbaas/drivers/driver_mixins.py | containers-kraken/neutron-lbaas | 43fbc34cc90512e33202bc4187ccf712dda6a782 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 A10 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from neutron.plugins.common import constants
from oslo_log import log as logging
import six
from neutron_lbaas.db.loadbalancer import models
from neutron_lbaas.services.loadbalancer import constants as lb_const
from neutron_lbaas.services.loadbalancer import data_models
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class BaseManagerMixin(object):
def __init__(self, driver):
self.driver = driver
@abc.abstractproperty
def db_delete_method(self):
pass
@abc.abstractmethod
def create(self, context, obj):
pass
@abc.abstractmethod
def update(self, context, obj_old, obj):
pass
@abc.abstractmethod
def delete(self, context, obj):
pass
def successful_completion(self, context, obj, delete=False,
lb_create=False):
"""
Sets the provisioning_status of the load balancer and obj to
ACTIVE. Should be called last in the implementor's BaseManagerMixin
methods for successful runs.
:param context: neutron context
:param obj: instance of a
neutron_lbaas.services.loadbalancer.data_model
:param delete: set True if being called from a delete method. Will
most likely result in the obj being deleted from the db.
:param lb_create: set True if this is being called after a successful
load balancer create.
"""
LOG.debug("Starting successful_completion method after a successful "
"driver action.")
obj_sa_cls = data_models.DATA_MODEL_TO_SA_MODEL_MAP[obj.__class__]
if delete:
# Check if driver is responsible for vip allocation. If the driver
# is responsible, then it is also responsible for cleaning it up.
# At this point, the VIP should already be cleaned up, so we are
# just doing neutron lbaas db cleanup.
if (obj == obj.root_loadbalancer and
self.driver.load_balancer.allocates_vip):
# NOTE(blogan): this is quite dumb to do but it is necessary
# so that a false negative pep8 error does not get thrown. An
# "unexpected-keyword-argument" pep8 error occurs bc
# self.db_delete_method is a @property method that returns a
# method.
kwargs = {'delete_vip_port': False}
self.db_delete_method(context, obj.id, **kwargs)
else:
self.db_delete_method(context, obj.id)
if obj == obj.root_loadbalancer and delete:
# Load balancer was deleted and no longer exists
return
lb_op_status = None
lb_p_status = constants.ACTIVE
if obj == obj.root_loadbalancer:
# only set the status to online if this an operation on the
# load balancer
lb_op_status = lb_const.ONLINE
# Update the load balancer's vip address and vip port id if the driver
# was responsible for allocating the vip.
if (self.driver.load_balancer.allocates_vip and lb_create and
isinstance(obj, data_models.LoadBalancer)):
self.driver.plugin.db.update_loadbalancer(
context, obj.id, {'vip_address': obj.vip_address,
'vip_port_id': obj.vip_port_id})
self.driver.plugin.db.update_status(
context, models.LoadBalancer, obj.root_loadbalancer.id,
provisioning_status=lb_p_status,
operating_status=lb_op_status)
if obj == obj.root_loadbalancer or delete:
# Do not want to update the status of the load balancer again
# Or the obj was deleted from the db so no need to update the
# statuses
return
obj_op_status = lb_const.ONLINE
if isinstance(obj, data_models.HealthMonitor):
# Health Monitor does not have an operating status
obj_op_status = None
LOG.debug("Updating object of type {0} with id of {1} to "
"provisioning_status = {2}, operating_status = {3}".format(
obj.__class__, obj.id, constants.ACTIVE, obj_op_status))
self.driver.plugin.db.update_status(
context, obj_sa_cls, obj.id,
provisioning_status=constants.ACTIVE,
operating_status=obj_op_status)
def failed_completion(self, context, obj):
"""
Sets the provisioning status of the obj to ERROR. If obj is a
loadbalancer it will be set to ERROR, otherwise set to ACTIVE. Should
be called whenever something goes wrong (raised exception) in an
implementor's BaseManagerMixin methods.
:param context: neutron context
:param obj: instance of a
neutron_lbaas.services.loadbalancer.data_model
"""
LOG.debug("Starting failed_completion method after a failed driver "
"action.")
if isinstance(obj, data_models.LoadBalancer):
LOG.debug("Updating load balancer {0} to provisioning_status = "
"{1}, operating_status = {2}.".format(
obj.root_loadbalancer.id, constants.ERROR,
lb_const.OFFLINE))
self.driver.plugin.db.update_status(
context, models.LoadBalancer, obj.root_loadbalancer.id,
provisioning_status=constants.ERROR,
operating_status=lb_const.OFFLINE)
return
obj_sa_cls = data_models.DATA_MODEL_TO_SA_MODEL_MAP[obj.__class__]
LOG.debug("Updating object of type {0} with id of {1} to "
"provisioning_status = {2}, operating_status = {3}".format(
obj.__class__, obj.id, constants.ERROR,
lb_const.OFFLINE))
self.driver.plugin.db.update_status(
context, obj_sa_cls, obj.id,
provisioning_status=constants.ERROR,
operating_status=lb_const.OFFLINE)
LOG.debug("Updating load balancer {0} to "
"provisioning_status = {1}".format(obj.root_loadbalancer.id,
constants.ACTIVE))
self.driver.plugin.db.update_status(
context, models.LoadBalancer, obj.root_loadbalancer.id,
provisioning_status=constants.ACTIVE)
def update_vip(self, context, loadbalancer_id, vip_address,
vip_port_id=None):
lb_update = {'vip_address': vip_address}
if vip_port_id:
lb_update['vip_port_id'] = vip_port_id
self.driver.plugin.db.update_loadbalancer(context, loadbalancer_id,
lb_update)
@six.add_metaclass(abc.ABCMeta)
class BaseRefreshMixin(object):
@abc.abstractmethod
def refresh(self, context, obj):
pass
@six.add_metaclass(abc.ABCMeta)
class BaseStatsMixin(object):
@abc.abstractmethod
def stats(self, context, obj):
pass
| 42.070652 | 79 | 0.628472 | 949 | 7,741 | 4.955743 | 0.239199 | 0.025516 | 0.03636 | 0.026791 | 0.456304 | 0.366575 | 0.277908 | 0.251967 | 0.243036 | 0.243036 | 0 | 0.004631 | 0.302674 | 7,741 | 183 | 80 | 42.300546 | 0.866617 | 0.301511 | 0 | 0.361111 | 0 | 0 | 0.099424 | 0.004031 | 0 | 0 | 0 | 0 | 0 | 1 | 0.092593 | false | 0.055556 | 0.064815 | 0 | 0.212963 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
be3e44160e188056687e999ee1b846a80b373896 | 1,819 | py | Python | build/generate_confirmed_cases_by_counties.py | jtagcat/koroonakaart | 16a6eb24a19b286589b063742b03a123315feefc | [
"CC0-1.0",
"MIT"
] | 1 | 2021-12-20T23:05:58.000Z | 2021-12-20T23:05:58.000Z | build/generate_confirmed_cases_by_counties.py | jtagcat/koroonakaart | 16a6eb24a19b286589b063742b03a123315feefc | [
"CC0-1.0",
"MIT"
] | null | null | null | build/generate_confirmed_cases_by_counties.py | jtagcat/koroonakaart | 16a6eb24a19b286589b063742b03a123315feefc | [
"CC0-1.0",
"MIT"
] | 1 | 2021-12-20T23:05:47.000Z | 2021-12-20T23:05:47.000Z | from build.chart_data_functions import get_confirmed_cases_by_county
from build.chart_data_functions import get_county_by_day
from build.constants import CONFIRMED_CASES_BY_COUNTIES_PATH
from build.constants import COUNTY_MAPPING
from build.constants import COUNTY_POPULATION
from build.constants import DATE_SETTINGS
from build.constants import TEST_RESULTS_PATH
from build.constants import TODAY_DMYHM
from build.constants import YESTERDAY_YMD
from build.utils import analyze_memory
from build.utils import analyze_time
from build.utils import logger
from build.utils import read_json_from_file
from build.utils import save_as_json
import pandas as pd
@analyze_time
@analyze_memory
def main():
# Log status
logger.info("Loading local data files")
test_results = read_json_from_file(TEST_RESULTS_PATH)
# Log status
logger.info("Calculating main statistics")
# Create date ranges for charts
case_dates = pd.date_range(start=DATE_SETTINGS["firstCaseDate"], end=YESTERDAY_YMD)
# Get data for each chart
logger.info("Calculating data for charts")
county_by_day = get_county_by_day(
test_results, case_dates, COUNTY_MAPPING, COUNTY_POPULATION
)
confirmed_cases_by_county = get_confirmed_cases_by_county(
test_results, COUNTY_MAPPING
)
del county_by_day["mapPlayback"]
del county_by_day["mapPlayback10k"]
# Create dictionary for final JSON
logger.info("Compiling final JSON")
final_json = {
"updatedOn": TODAY_DMYHM,
"dataConfirmedCasesByCounties": confirmed_cases_by_county,
"countyByDay": county_by_day,
}
# Dump JSON output
save_as_json(CONFIRMED_CASES_BY_COUNTIES_PATH, final_json)
# Log finish time
logger.info("Finished update process")
if __name__ == "__main__":
main()
| 30.316667 | 87 | 0.774601 | 248 | 1,819 | 5.33871 | 0.302419 | 0.095166 | 0.095166 | 0.126888 | 0.239426 | 0.054381 | 0.054381 | 0 | 0 | 0 | 0 | 0.001318 | 0.166025 | 1,819 | 59 | 88 | 30.830508 | 0.871457 | 0.077515 | 0 | 0 | 0 | 0 | 0.128743 | 0.016766 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02439 | false | 0 | 0.365854 | 0 | 0.390244 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
be4201706e45a3d4dd6cd9622ea3645d54ac325f | 440 | py | Python | users/models.py | makutas/CocktailWebsite | c5192e5fc2b750a32500f5c3421ed07e89c9c7e1 | [
"MIT"
] | null | null | null | users/models.py | makutas/CocktailWebsite | c5192e5fc2b750a32500f5c3421ed07e89c9c7e1 | [
"MIT"
] | null | null | null | users/models.py | makutas/CocktailWebsite | c5192e5fc2b750a32500f5c3421ed07e89c9c7e1 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
user_description = models.CharField(max_length=200, null=True)
user_avatar = models.ImageField(null=True, blank=True)
user_uploaded_recipes = models.IntegerField() # Increment by 1 on upload
def __str__(self):
return f"{self.user.username}"
| 31.428571 | 77 | 0.747727 | 59 | 440 | 5.40678 | 0.644068 | 0.062696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010753 | 0.154545 | 440 | 13 | 78 | 33.846154 | 0.846774 | 0.054545 | 0 | 0 | 0 | 0 | 0.048309 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.222222 | 0.111111 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
be47030ab919977e3706aa43ef448dd537100bbd | 2,702 | py | Python | torch/_prims/context.py | EikanWang/pytorch | 823ddb6e87e8111c9b5a99523503172e5bf62c49 | [
"Intel"
] | null | null | null | torch/_prims/context.py | EikanWang/pytorch | 823ddb6e87e8111c9b5a99523503172e5bf62c49 | [
"Intel"
] | 1 | 2022-01-10T18:39:28.000Z | 2022-01-10T19:15:57.000Z | torch/_prims/context.py | HaoZeke/pytorch | 4075972c2675ef34fd85efd60c9bad75ad06d386 | [
"Intel"
] | null | null | null | from typing import Callable, Sequence, Any, Dict
import functools
import torch
import torch.overrides
from torch._prims.utils import torch_function_passthrough
import torch._refs as refs
import torch._refs
import torch._refs.nn
import torch._refs.nn.functional
import torch._refs.special
import torch._prims
# TODO: automap torch operations to references
# (need to throw a good assertion if the mapping doesn't exist)
_torch_to_reference_map = {
torch.add: refs.add,
# torch.div: refs.div,
torch.mul: refs.mul,
torch.ge: refs.ge,
torch.gt: refs.gt,
torch.le: refs.le,
torch.lt: refs.lt,
}
@functools.lru_cache(None)
def torch_to_refs_map():
"""
Mapping of torch API functions to torch._refs functions.
E.g. torch_to_refs_map()[torch.add] == torch._refs.add
"""
modules = [
(torch, torch._refs),
(torch.nn, torch._refs.nn),
(torch.nn.functional, torch._refs.nn.functional),
(torch.special, torch._refs.special),
]
r = {}
for mod_torch, mod_refs in modules:
for s in mod_refs.__all__: # type: ignore[attr-defined]
r[mod_torch.__dict__.get(s)] = mod_refs.__dict__.get(s)
return r
@functools.lru_cache(None)
def all_prims():
"""
Set of all prim functions, e.g., torch._prims.add in all_prims()
"""
return {torch._prims.__dict__.get(s) for s in torch._prims.__all__}
class TorchRefsMode(torch.overrides.TorchFunctionMode):
"""
Switches the interpretation of torch.* functions and Tensor methods to
use PrimTorch refs in torch._refs. (Direct calls to _refs are unaffected.)
>>> with TorchRefsMode.push():
... torch.add(x, y) # calls torch._refs.add(x, y)
By default, this context manager will fall back on the torch.* if the
ref does not exist; set strict=True to error if this occurs.
"""
def __init__(self, strict=False):
self.strict = strict
def __torch_function__(
self,
orig_func: Callable,
types: Sequence,
args: Sequence[Any] = (),
kwargs: Dict = None,
):
if kwargs is None:
kwargs = {}
# For primitive operations, run them as is without interception
if orig_func in torch_function_passthrough or orig_func in all_prims():
return orig_func(*args, **kwargs)
mapping = torch_to_refs_map()
func = mapping.get(orig_func, None)
if func is not None:
return func(*args, **kwargs)
if self.strict:
raise RuntimeError(
f"no _refs support for {torch.overrides.resolve_name(orig_func)}"
)
return orig_func(*args, **kwargs)
| 28.442105 | 81 | 0.650259 | 370 | 2,702 | 4.521622 | 0.335135 | 0.069934 | 0.04483 | 0.025105 | 0.057382 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.245744 | 2,702 | 94 | 82 | 28.744681 | 0.820903 | 0.281643 | 0 | 0.068966 | 0 | 0 | 0.033208 | 0.02196 | 0 | 0 | 0 | 0.010638 | 0 | 1 | 0.068966 | false | 0.034483 | 0.189655 | 0 | 0.362069 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
be5dd7bfd950d236cdb2d9db1cde1c0dbae6c636 | 5,250 | py | Python | tests/functional/controllers/test_group_controller_superuser.py | roscisz/TensorHive | 4a680f47a0ee1ce366dc82ad9964e229d9749c4e | [
"Apache-2.0"
] | 129 | 2017-08-25T11:45:15.000Z | 2022-03-29T05:11:25.000Z | tests/functional/controllers/test_group_controller_superuser.py | roscisz/TensorHive | 4a680f47a0ee1ce366dc82ad9964e229d9749c4e | [
"Apache-2.0"
] | 251 | 2017-07-27T10:05:58.000Z | 2022-03-02T12:46:13.000Z | tests/functional/controllers/test_group_controller_superuser.py | roscisz/TensorHive | 4a680f47a0ee1ce366dc82ad9964e229d9749c4e | [
"Apache-2.0"
] | 20 | 2017-08-13T13:05:14.000Z | 2022-03-19T02:21:37.000Z | from tensorhive.models.Group import Group
from fixtures.controllers import API_URI as BASE_URI, HEADERS
from http import HTTPStatus
from importlib import reload
import json
import auth_patcher
ENDPOINT = BASE_URI + '/groups'
def setup_module(_):
auth_patches = auth_patcher.get_patches(superuser=True)
for auth_patch in auth_patches:
auth_patch.start()
for module in auth_patcher.CONTROLLER_MODULES:
reload(module)
for auth_patch in auth_patches:
auth_patch.stop()
# POST /groups
def test_create_group(tables, client):
group_name = 'TestGroup'
data = {'name': group_name}
resp = client.post(ENDPOINT, headers=HEADERS, data=json.dumps(data))
resp_json = json.loads(resp.data.decode('utf-8'))
assert resp.status_code == HTTPStatus.CREATED
assert resp_json['group']['id'] is not None
assert resp_json['group']['name'] == group_name
assert Group.get(int(resp_json['group']['id'])) is not None
# PUT /groups/{id}
def test_update_group(tables, client, new_group):
new_group.save()
new_group_name = new_group.name + '111'
resp = client.put(ENDPOINT + '/' + str(new_group.id), headers=HEADERS, data=json.dumps({'name': new_group_name}))
resp_json = json.loads(resp.data.decode('utf-8'))
assert resp.status_code == HTTPStatus.OK
assert resp_json['group']['name'] == new_group_name
assert Group.get(new_group.id).name == new_group_name
# PUT /groups/{id} - nonexistent id
def test_update_group_that_doesnt_exist(tables, client):
non_existent_id = '777'
resp = client.put(ENDPOINT + '/' + non_existent_id, headers=HEADERS, data=json.dumps({'name': 'test'}))
assert resp.status_code == HTTPStatus.NOT_FOUND
# DELETE /groups/{id}
def test_delete_group(tables, client, new_group):
new_group.save()
resp = client.delete(ENDPOINT + '/' + str(new_group.id), headers=HEADERS)
assert resp.status_code == HTTPStatus.OK
# Let's get all groups to verify
resp = client.get(ENDPOINT, headers=HEADERS)
resp_json = json.loads(resp.data.decode('utf-8'))
assert len(resp_json) == 0
# DELETE /groups/{id} - nonexistent id
def test_delete_group_that_doesnt_exist(tables, client):
non_existent_id = '777'
resp = client.delete(ENDPOINT + '/' + non_existent_id, headers=HEADERS)
assert resp.status_code == HTTPStatus.NOT_FOUND
# PUT /groups/{id}/users/{id}
def test_add_user_to_a_group(tables, client, new_group, new_user):
new_group.save()
new_user.save()
resp = client.put(ENDPOINT + '/{}/users/{}'.format(new_group.id, new_user.id), headers=HEADERS)
assert resp.status_code == HTTPStatus.OK
assert new_group in new_user.groups
assert new_user in new_group.users
# DELETE /groups/{id}/users/{id}
def test_remove_user_from_a_group(tables, client, new_group_with_member):
new_group_with_member.save()
user = new_group_with_member.users[0]
resp = client.delete(ENDPOINT + '/{}/users/{}'.format(new_group_with_member.id, user.id), headers=HEADERS)
assert resp.status_code == HTTPStatus.OK
assert new_group_with_member not in user.groups
assert user not in new_group_with_member.users
# PUT /groups/{id}/users/{id} - nonexistent user id
def test_add_nonexistent_user_to_a_group(tables, client, new_group):
new_group.save()
nonexistent_user_id = '777'
resp = client.put(ENDPOINT + '/{}/users/{}'.format(new_group.id, nonexistent_user_id), headers=HEADERS)
assert resp.status_code == HTTPStatus.NOT_FOUND
# PUT /groups/{id}/users/{id} - nonexistent group id
def test_add_user_to_nonexistent_group(tables, client, new_user):
new_user.save()
nonexistent_group_id = '777'
resp = client.put(ENDPOINT + '/{}/users/{}'.format(nonexistent_group_id, new_user.id), headers=HEADERS)
assert resp.status_code == HTTPStatus.NOT_FOUND
# DELETE /groups/{id}/users/{id} - nonexistent user id
def test_remove_nonexistent_user_from_a_group(tables, client, new_group):
new_group.save()
nonexistent_user_id = '777'
resp = client.delete(ENDPOINT + '/{}/users/{}'.format(new_group.id, nonexistent_user_id), headers=HEADERS)
assert resp.status_code == HTTPStatus.NOT_FOUND
# DELETE /groups/{id}/users/{id} - nonexistent group id
def test_remove_user_from_a_nonexistent_group(tables, client, new_user):
new_user.save()
nonexistent_group_id = '777'
resp = client.delete(ENDPOINT + '/{}/users/{}'.format(nonexistent_group_id, new_user.id), headers=HEADERS)
assert resp.status_code == HTTPStatus.NOT_FOUND
# PUT /groups/{id}
def test_set_group_as_a_default(tables, client, new_group):
new_group.save()
resp = client.put(ENDPOINT + '/{}'.format(new_group.id), data=json.dumps({'isDefault': True}), headers=HEADERS)
assert resp.status_code == HTTPStatus.OK
assert Group.get(new_group.id).is_default
# PUT /groups/{id}
def test_mark_default_group_as_non_default(tables, client, new_group):
new_group.is_default = True
new_group.save()
resp = client.put(ENDPOINT + '/{}'.format(new_group.id), data=json.dumps({'isDefault': False}),
headers=HEADERS)
assert resp.status_code == HTTPStatus.OK
assert Group.get(new_group.id).is_default is False
| 32.012195 | 117 | 0.717714 | 756 | 5,250 | 4.720899 | 0.124339 | 0.085178 | 0.05828 | 0.07285 | 0.774727 | 0.703839 | 0.658448 | 0.593444 | 0.534604 | 0.45615 | 0 | 0.005849 | 0.153333 | 5,250 | 163 | 118 | 32.208589 | 0.797075 | 0.086095 | 0 | 0.354839 | 0 | 0 | 0.041815 | 0 | 0 | 0 | 0 | 0 | 0.268817 | 1 | 0.150538 | false | 0 | 0.064516 | 0 | 0.215054 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
be62c8d9e725078536f0891cffbcc08c85ff6f54 | 979 | py | Python | my_general_helpers.py | arminbahl/drosophila_phototaxis_paper | e01dc95675f835926c9104b34bf6cfd7244dee2b | [
"MIT"
] | null | null | null | my_general_helpers.py | arminbahl/drosophila_phototaxis_paper | e01dc95675f835926c9104b34bf6cfd7244dee2b | [
"MIT"
] | null | null | null | my_general_helpers.py | arminbahl/drosophila_phototaxis_paper | e01dc95675f835926c9104b34bf6cfd7244dee2b | [
"MIT"
] | null | null | null | from scipy.signal import butter,filtfilt
from numba import jit
import bisect
def is_number_in_sorted_vector(sorted_vector, num):
index = bisect.bisect_left(sorted_vector, num)
return index != len(sorted_vector) and sorted_vector[index] == num
# def butter_lowpass(cutoff, fs, order=5):
# nyq = 0.5 * fs
# normal_cutoff = cutoff / nyq
# b, a = butter(order, normal_cutoff, btype='low', analog=False)
# return b, a
def butter_lowpass_filter(data, cutoff, fs, order):
nyq = 0.5 * fs # Nyquist Frequency
normal_cutoff = cutoff / nyq
# Get the filter coefficients
b, a = butter(order, normal_cutoff, btype='low', analog=False)
y = filtfilt(b, a, data)
return y
@jit
def first_order_lowpass_filter(signal_in, signal_out, tau, dt):
alpha_lowpass = dt / (tau + dt)
signal_out[0] = signal_in[0]
for i in range(1, len(signal_in)):
signal_out[i] = alpha_lowpass*signal_in[i] + (1-alpha_lowpass)*signal_out[i-1]
| 27.971429 | 86 | 0.684372 | 151 | 979 | 4.238411 | 0.344371 | 0.09375 | 0.046875 | 0.021875 | 0.1375 | 0.1375 | 0.1375 | 0.1375 | 0.1375 | 0.1375 | 0 | 0.012771 | 0.200204 | 979 | 34 | 87 | 28.794118 | 0.804598 | 0.225741 | 0 | 0 | 0 | 0 | 0.004 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0.222222 | 0.166667 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
be6d27c87017d3ff2b758a9a1954cf3e265b550c | 554 | py | Python | iocms/iocms/urls.py | Gaurav-Zaiswal/iw-acad-iocms-be | a133f120eed93433925608f08c5145d2d0d1db39 | [
"MIT"
] | null | null | null | iocms/iocms/urls.py | Gaurav-Zaiswal/iw-acad-iocms-be | a133f120eed93433925608f08c5145d2d0d1db39 | [
"MIT"
] | null | null | null | iocms/iocms/urls.py | Gaurav-Zaiswal/iw-acad-iocms-be | a133f120eed93433925608f08c5145d2d0d1db39 | [
"MIT"
] | 2 | 2021-09-16T04:44:59.000Z | 2021-09-16T05:45:31.000Z | from django.contrib import admin
from django.urls import include, path
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('class/', include('classroom.urls')),
path('assignment-api/', include('assignment.urls', namespace='assignment')),
path('feed/', include('feed.urls', namespace='feed')),
path('users/', include('users.urls'), name="user-register")
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 36.933333 | 80 | 0.720217 | 70 | 554 | 5.657143 | 0.414286 | 0.10101 | 0.070707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117329 | 554 | 14 | 81 | 39.571429 | 0.809816 | 0 | 0 | 0 | 0 | 0 | 0.203971 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.307692 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
be6f16523ef2463524119c42f75567ed0f66d560 | 1,905 | py | Python | src/security/__init__.py | slippers/blogging_security_flatpage | 53644978b798c66369416b1e5625cc04d89c0a87 | [
"MIT"
] | 1 | 2018-12-31T05:30:13.000Z | 2018-12-31T05:30:13.000Z | src/security/__init__.py | slippers/blogging_security_flatpage | 53644978b798c66369416b1e5625cc04d89c0a87 | [
"MIT"
] | null | null | null | src/security/__init__.py | slippers/blogging_security_flatpage | 53644978b798c66369416b1e5625cc04d89c0a87 | [
"MIT"
] | null | null | null | from src import app, db
from .models import User, Role, RoleUsers
from .security_admin import UserAdmin, RoleAdmin
from flask_security import Security, SQLAlchemyUserDatastore, \
login_required, roles_accepted
from flask_security.utils import encrypt_password
def config_security_admin(admin):
admin.add_view(UserAdmin(db.session))
admin.add_view(RoleAdmin(db.session))
def configure_security():
# Create the Roles "admin" and "end-user" -- unless they already exist
user_datastore.find_or_create_role(name='admin', description='Administrator')
user_datastore.find_or_create_role(name='end-user', description='End user')
user_datastore.find_or_create_role(name='blogger', description='Blogger')
# Create two Users for testing purposes -- unless they already exists.
# In each case, use Flask-Security utility function to encrypt the password.
pw = encrypt_password('password')
# pw = 'password'
if not user_datastore.get_user('someone@example.com'):
user_datastore.create_user(email='someone@example.com', password=pw)
if not user_datastore.get_user('admin@example.com'):
user_datastore.create_user(email='admin@example.com', password=pw)
# Give one User has the "end-user" role, while the other has the "admin" role.
#(This will have no effect if the
# Users already have these Roles.) Again, commit any database changes.
user_datastore.add_role_to_user('someone@example.com', 'end-user')
user_datastore.add_role_to_user('someone@example.com', 'blogger')
user_datastore.add_role_to_user('admin@example.com', 'admin')
user_datastore.add_role_to_user('admin@example.com', 'blogger')
db.session.commit()
# Setup Flask-Security
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(app, user_datastore)
# Create any database tables that don't exist yet.
db.create_all()
| 40.531915 | 83 | 0.752756 | 265 | 1,905 | 5.218868 | 0.328302 | 0.122198 | 0.049168 | 0.057845 | 0.284165 | 0.284165 | 0.248012 | 0.121475 | 0.121475 | 0 | 0 | 0 | 0.143307 | 1,905 | 46 | 84 | 41.413043 | 0.847426 | 0.251969 | 0 | 0 | 0 | 0 | 0.160537 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0.153846 | 0.192308 | 0 | 0.269231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
be876cf3ef298b948a6559bdc7b9b04da2062463 | 589 | py | Python | 0201-0300/0251-Flatten 2D Vector/0251-Flatten 2D Vector.py | jiadaizhao/LeetCode | 4ddea0a532fe7c5d053ffbd6870174ec99fc2d60 | [
"MIT"
] | 49 | 2018-05-05T02:53:10.000Z | 2022-03-30T12:08:09.000Z | 0201-0300/0251-Flatten 2D Vector/0251-Flatten 2D Vector.py | jolly-fellow/LeetCode | ab20b3ec137ed05fad1edda1c30db04ab355486f | [
"MIT"
] | 11 | 2017-12-15T22:31:44.000Z | 2020-10-02T12:42:49.000Z | 0201-0300/0251-Flatten 2D Vector/0251-Flatten 2D Vector.py | jolly-fellow/LeetCode | ab20b3ec137ed05fad1edda1c30db04ab355486f | [
"MIT"
] | 28 | 2017-12-05T10:56:51.000Z | 2022-01-26T18:18:27.000Z | class Vector2D:
def __init__(self, v: List[List[int]]):
def getIt():
for row in v:
for val in row:
yield val
self.it = iter(getIt())
self.val = next(self.it, None)
def next(self) -> int:
result = self.val
self.val = next(self.it, None)
return result
def hasNext(self) -> bool:
return self.val is not None
# Your Vector2D object will be instantiated and called as such:
# obj = Vector2D(v)
# param_1 = obj.next()
# param_2 = obj.hasNext()
| 22.653846 | 63 | 0.519525 | 77 | 589 | 3.896104 | 0.480519 | 0.093333 | 0.073333 | 0.1 | 0.14 | 0.14 | 0 | 0 | 0 | 0 | 0 | 0.013587 | 0.375212 | 589 | 25 | 64 | 23.56 | 0.80163 | 0.210526 | 0 | 0.142857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0 | 0.071429 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
be917ccdfeb7754dd0eabc0327954755752723d8 | 425 | py | Python | Estrutura_Decisao/who.py | M3nin0/supreme-broccoli | 186c1ea3b839ba3139f9301660dec8fbd27a162e | [
"Apache-2.0"
] | null | null | null | Estrutura_Decisao/who.py | M3nin0/supreme-broccoli | 186c1ea3b839ba3139f9301660dec8fbd27a162e | [
"Apache-2.0"
] | null | null | null | Estrutura_Decisao/who.py | M3nin0/supreme-broccoli | 186c1ea3b839ba3139f9301660dec8fbd27a162e | [
"Apache-2.0"
] | null | null | null | prod1 = float(input("Insira o valor do produto A: "))
prod2 = float(input("Insira o valor do produto B: "))
prod3 = float(input("Insira o valor do produto C: "))
if prod1 < prod2 and prod1 < prod3:
print ("Escolha o produto A é o mais barato")
elif prod2 < prod1 and prod2 < prod3:
print ("Escolha o produto B é o mais barato")
elif prod3 < prod1 and prod3 < prod2:
print ("Escolha o produto C é o mais barato")
| 38.636364 | 53 | 0.68 | 72 | 425 | 4.013889 | 0.291667 | 0.103806 | 0.16609 | 0.176471 | 0.605536 | 0.321799 | 0.321799 | 0 | 0 | 0 | 0 | 0.04491 | 0.214118 | 425 | 10 | 54 | 42.5 | 0.820359 | 0 | 0 | 0 | 0 | 0 | 0.451765 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
be99d62141111a8ad89510bea1e2a527e33cf08b | 478 | py | Python | autodiff/debug_vjp.py | Jakob-Unfried/msc-legacy | 2c41f3f714936c25dd534bd66da802c26176fcfa | [
"MIT"
] | 1 | 2021-03-22T14:16:43.000Z | 2021-03-22T14:16:43.000Z | autodiff/debug_vjp.py | Jakob-Unfried/msc-legacy | 2c41f3f714936c25dd534bd66da802c26176fcfa | [
"MIT"
] | null | null | null | autodiff/debug_vjp.py | Jakob-Unfried/msc-legacy | 2c41f3f714936c25dd534bd66da802c26176fcfa | [
"MIT"
] | null | null | null | import pdb
import warnings
from jax import custom_vjp
@custom_vjp
def debug_identity(x):
"""
acts as identity, but inserts a pdb trace on the backwards pass
"""
warnings.warn('Using a module intended for debugging')
return x
def _debug_fwd(x):
warnings.warn('Using a module intended for debugging')
return x, x
# noinspection PyUnusedLocal
def _debug_bwd(x, g):
pdb.set_trace()
return g
debug_identity.defvjp(_debug_fwd, _debug_bwd)
| 17.071429 | 67 | 0.713389 | 71 | 478 | 4.619718 | 0.492958 | 0.073171 | 0.103659 | 0.109756 | 0.310976 | 0.310976 | 0.310976 | 0.310976 | 0.310976 | 0.310976 | 0 | 0 | 0.209205 | 478 | 27 | 68 | 17.703704 | 0.867725 | 0.190377 | 0 | 0.142857 | 0 | 0 | 0.199461 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0.214286 | 0 | 0.642857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bea3fce840a92d3dac26a2f605494f57192e6efe | 1,217 | py | Python | pyscf/nao/test/test_0037_aos.py | fdmalone/pyscf | 021b17ac721e292b277d2b740e2ff8ab38bb6a4a | [
"Apache-2.0"
] | 1 | 2019-07-01T12:39:45.000Z | 2019-07-01T12:39:45.000Z | pyscf/nao/test/test_0037_aos.py | fdmalone/pyscf | 021b17ac721e292b277d2b740e2ff8ab38bb6a4a | [
"Apache-2.0"
] | null | null | null | pyscf/nao/test/test_0037_aos.py | fdmalone/pyscf | 021b17ac721e292b277d2b740e2ff8ab38bb6a4a | [
"Apache-2.0"
] | null | null | null | # Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division
import os,unittest,numpy as np
class KnowValues(unittest.TestCase):
def test_aos_libnao(self):
""" Computing of the atomic orbitals """
from pyscf.nao import system_vars_c
from pyscf.tools.cubegen import Cube
sv = system_vars_c().init_siesta_xml(label='water', cd=os.path.dirname(os.path.abspath(__file__)))
cc = Cube(sv, nx=20, ny=20, nz=20)
aos = sv.comp_aos_den(cc.get_coords())
self.assertEqual(aos.shape[0], cc.nx*cc.ny*cc.nz)
self.assertEqual(aos.shape[1], sv.norbs)
if __name__ == "__main__": unittest.main()
| 38.03125 | 102 | 0.739523 | 191 | 1,217 | 4.565445 | 0.633508 | 0.068807 | 0.029817 | 0.036697 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019569 | 0.16023 | 1,217 | 31 | 103 | 39.258065 | 0.833659 | 0.508628 | 0 | 0 | 0 | 0 | 0.022453 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0.083333 | false | 0 | 0.333333 | 0 | 0.5 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
bea8aa6132f2274610cc25a57ec0c74c8765342d | 371 | py | Python | students/K33402/Komarov_Georgy/LAB2/elevennote/src/api/urls.py | aglaya-pill/ITMO_ICT_WebDevelopment_2021-2022 | a63691317a72fb9b29ae537bc3d7766661458c22 | [
"MIT"
] | null | null | null | students/K33402/Komarov_Georgy/LAB2/elevennote/src/api/urls.py | aglaya-pill/ITMO_ICT_WebDevelopment_2021-2022 | a63691317a72fb9b29ae537bc3d7766661458c22 | [
"MIT"
] | null | null | null | students/K33402/Komarov_Georgy/LAB2/elevennote/src/api/urls.py | aglaya-pill/ITMO_ICT_WebDevelopment_2021-2022 | a63691317a72fb9b29ae537bc3d7766661458c22 | [
"MIT"
] | null | null | null | from django.urls import path, include
from rest_framework_jwt.views import obtain_jwt_token
from rest_framework.routers import DefaultRouter
from .views import NoteViewSet
app_name = 'api'
router = DefaultRouter(trailing_slash=False)
router.register('notes', NoteViewSet)
urlpatterns = [
path('jwt-auth/', obtain_jwt_token),
path('', include(router.urls)),
]
| 23.1875 | 53 | 0.77628 | 48 | 371 | 5.8125 | 0.541667 | 0.078853 | 0.121864 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121294 | 371 | 15 | 54 | 24.733333 | 0.855828 | 0 | 0 | 0 | 0 | 0 | 0.045822 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.363636 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
beb313eb5f64fc657c1686ad77dc2225b87a4889 | 570 | py | Python | viewer_examples/plugins/median_filter.py | atemysemicolon/scikit-image | a48cf5822f9539c6602b9327c18253aed14fa692 | [
"BSD-3-Clause"
] | null | null | null | viewer_examples/plugins/median_filter.py | atemysemicolon/scikit-image | a48cf5822f9539c6602b9327c18253aed14fa692 | [
"BSD-3-Clause"
] | null | null | null | viewer_examples/plugins/median_filter.py | atemysemicolon/scikit-image | a48cf5822f9539c6602b9327c18253aed14fa692 | [
"BSD-3-Clause"
] | null | null | null | from skimage import data
from skimage.filter.rank import median
from skimage.morphology import disk
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider, OKCancelButtons, SaveButtons
from skimage.viewer.plugins.base import Plugin
def median_filter(image, radius):
return median(image, selem=disk(radius))
image = data.coins()
viewer = ImageViewer(image)
plugin = Plugin(image_filter=median_filter)
plugin += Slider('radius', 2, 10, value_type='int')
plugin += SaveButtons()
plugin += OKCancelButtons()
viewer += plugin
viewer.show()
| 25.909091 | 71 | 0.784211 | 74 | 570 | 5.986486 | 0.405405 | 0.148984 | 0.115124 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005952 | 0.115789 | 570 | 21 | 72 | 27.142857 | 0.873016 | 0 | 0 | 0 | 0 | 0 | 0.015789 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.375 | 0.0625 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
beb680071d94ed8dd93dc11b2e313714df1f9b83 | 1,727 | py | Python | dingtalk/message/conversation.py | kangour/dingtalk-python | b37b9dac3ca3ff9d727308fb120a8fd05e11eaa5 | [
"Apache-2.0"
] | 88 | 2017-12-28T05:23:15.000Z | 2021-12-20T13:44:18.000Z | dingtalk/message/conversation.py | niulinlnc/dingtalk-python | c4209658f88344e8f0890137ed7c887c8b740a6c | [
"Apache-2.0"
] | 8 | 2018-04-28T05:41:49.000Z | 2021-06-01T21:51:11.000Z | dingtalk/message/conversation.py | niulinlnc/dingtalk-python | c4209658f88344e8f0890137ed7c887c8b740a6c | [
"Apache-2.0"
] | 43 | 2017-12-07T09:43:48.000Z | 2021-12-03T01:19:52.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/11/30 下午3:02
# @Author : Matrix
# @Github : https://github.com/blackmatrix7/
# @Blog : http://www.cnblogs.com/blackmatrix/
# @File : messages.py
# @Software: PyCharm
import json
from ..foundation import *
from json import JSONDecodeError
__author__ = 'blackmatrix'
__all__ = ['async_send_msg', 'get_msg_send_result', 'get_msg_send_progress']
@dingtalk_resp
def async_send_msg(access_token, msgtype, agent_id, msgcontent, userid_list=None, dept_id_list=None, to_all_user=False):
try:
msgcontent = json.dumps(msgcontent)
except JSONDecodeError:
# 如果传入的msgcontent不能转换为json格式,依旧传给钉钉,由钉钉处理
pass
if not isinstance(userid_list, str):
userid_list = ','.join(userid_list)
args = locals().copy()
payload = {}
# 请求参数整理
for k, v in args.items():
if k in ('msgtype', 'agent_id', 'msgcontent', 'userid_list', 'dept_id_list'):
if v is not None:
payload.update({k: v})
resp = call_dingtalk_webapi(access_token, 'dingtalk.corp.message.corpconversation.asyncsend', **payload)
return resp
@dingtalk_resp
def get_msg_send_result(access_token, agent_id, task_id):
url = get_request_url(access_token, 'dingtalk.corp.message.corpconversation.getsendresult')
payload = {'task_id': task_id, 'agent_id': agent_id}
return requests.get(url, params=payload)
@dingtalk_resp
def get_msg_send_progress(access_token, agent_id, task_id):
url = get_request_url(access_token, 'dingtalk.corp.message.corpconversation.getsendprogress')
payload = {'task_id': task_id, 'agent_id': agent_id}
return requests.get(url, params=payload)
if __name__ == '__main__':
pass
| 31.981481 | 120 | 0.70469 | 227 | 1,727 | 5.044053 | 0.427313 | 0.048908 | 0.034935 | 0.060262 | 0.408734 | 0.408734 | 0.265502 | 0.265502 | 0.265502 | 0.265502 | 0 | 0.009059 | 0.169079 | 1,727 | 53 | 121 | 32.584906 | 0.78885 | 0.149392 | 0 | 0.272727 | 0 | 0 | 0.209733 | 0.119945 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0.060606 | 0.090909 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
bebe436d87bb3f3a76cbb71e91dc6e70bb5b2e46 | 475 | py | Python | test/test_hex_line.py | bicobus/Hexy | e75d58e66546c278fb648af85e3f9dae53127826 | [
"MIT"
] | 72 | 2017-08-30T03:02:51.000Z | 2022-03-11T23:15:15.000Z | test/test_hex_line.py | bicobus/Hexy | e75d58e66546c278fb648af85e3f9dae53127826 | [
"MIT"
] | 10 | 2019-03-14T08:04:33.000Z | 2021-08-10T09:36:45.000Z | test/test_hex_line.py | bicobus/Hexy | e75d58e66546c278fb648af85e3f9dae53127826 | [
"MIT"
] | 15 | 2017-11-08T05:37:06.000Z | 2021-08-05T19:16:48.000Z | import numpy as np
import hexy as hx
def test_get_hex_line():
expected = [
[-3, 3, 0],
[-2, 2, 0],
[-1, 2, -1],
[0, 2, -2],
[1, 1, -2],
]
start = np.array([-3, 3, 0])
end = np.array([1, 1, -2])
print(hx.get_hex_line(start, end))
print(expected);
assert(np.array_equal(
hx.get_hex_line(start, end),
expected));
if __name__ == "__main__":
test_get_hex_line()
| 21.590909 | 38 | 0.471579 | 68 | 475 | 3.014706 | 0.367647 | 0.117073 | 0.195122 | 0.136585 | 0.195122 | 0.195122 | 0 | 0 | 0 | 0 | 0 | 0.068627 | 0.355789 | 475 | 21 | 39 | 22.619048 | 0.601307 | 0 | 0 | 0 | 0 | 0 | 0.016842 | 0 | 0 | 0 | 0 | 0 | 0.052632 | 1 | 0.052632 | false | 0 | 0.105263 | 0 | 0.157895 | 0.105263 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bebe5670df71295bc98ec96c4bde4a3c31d4fb66 | 6,747 | py | Python | wofry/propagator/propagators2D/integral.py | PaNOSC-ViNYL/wofry | 779b5a738ee7738e959a58aafe01e7e49b03894a | [
"MIT"
] | null | null | null | wofry/propagator/propagators2D/integral.py | PaNOSC-ViNYL/wofry | 779b5a738ee7738e959a58aafe01e7e49b03894a | [
"MIT"
] | 1 | 2021-02-16T12:12:10.000Z | 2021-02-16T12:12:10.000Z | wofryimpl/propagator/propagators2D/integral.py | oasys-kit/wofryimpl | f300b714b038110987783c40d2c3af8dca7e54eb | [
"MIT"
] | null | null | null | # propagate_2D_integral: Simplification of the Kirchhoff-Fresnel integral. TODO: Very slow and give some problems
import numpy
from wofry.propagator.wavefront2D.generic_wavefront import GenericWavefront2D
from wofry.propagator.propagator import Propagator2D
# TODO: check resulting amplitude normalization (fft and srw likely agree, convolution gives too high amplitudes, so needs normalization)
class Integral2D(Propagator2D):
HANDLER_NAME = "INTEGRAL_2D"
def get_handler_name(self):
return self.HANDLER_NAME
def do_specific_progation_after(self, wavefront, propagation_distance, parameters, element_index=None):
return self.do_specific_progation(wavefront, propagation_distance, parameters, element_index=element_index)
def do_specific_progation_before(self, wavefront, propagation_distance, parameters, element_index=None):
return self.do_specific_progation( wavefront, propagation_distance, parameters, element_index=element_index)
"""
2D Fresnel-Kirchhoff propagator via simplified integral
NOTE: this propagator is experimental and much less performant than the ones using Fourier Optics
Therefore, it is not recommended to use.
:param wavefront:
:param propagation_distance: propagation distance
:param shuffle_interval: it is known that this method replicates the central diffraction spot
The distace of the replica is proportional to 1/pixelsize
To avoid that, it is possible to change a bit (randomly) the coordinates
of the wavefront. shuffle_interval controls this shift: 0=No shift. A typical
value can be 1e5.
The result shows a diffraction pattern without replica but with much noise.
:param calculate_grid_only: if set, it calculates only the horizontal and vertical profiles, but returns the
full image with the other pixels to zero. This is useful when calculating large arrays,
so it is set as the default.
:return: a new 2D wavefront object with propagated wavefront
"""
def do_specific_progation(self, wavefront, propagation_distance, parameters, element_index=None):
shuffle_interval = self.get_additional_parameter("shuffle_interval",False,parameters,element_index=element_index)
calculate_grid_only = self.get_additional_parameter("calculate_grid_only",True,parameters,element_index=element_index)
return self.propagate_wavefront(wavefront,propagation_distance,shuffle_interval=shuffle_interval,
calculate_grid_only=calculate_grid_only)
@classmethod
def propagate_wavefront(cls,wavefront,propagation_distance,shuffle_interval=False,calculate_grid_only=True):
#
# Fresnel-Kirchhoff integral (neglecting inclination factor)
#
if not calculate_grid_only:
#
# calculation over the whole detector area
#
p_x = wavefront.get_coordinate_x()
p_y = wavefront.get_coordinate_y()
wavelength = wavefront.get_wavelength()
amplitude = wavefront.get_complex_amplitude()
det_x = p_x.copy()
det_y = p_y.copy()
p_X = wavefront.get_mesh_x()
p_Y = wavefront.get_mesh_y()
det_X = p_X
det_Y = p_Y
amplitude_propagated = numpy.zeros_like(amplitude,dtype='complex')
wavenumber = 2 * numpy.pi / wavelength
for i in range(det_x.size):
for j in range(det_y.size):
if not shuffle_interval:
rd_x = 0.0
rd_y = 0.0
else:
rd_x = (numpy.random.rand(p_x.size,p_y.size)-0.5)*shuffle_interval
rd_y = (numpy.random.rand(p_x.size,p_y.size)-0.5)*shuffle_interval
r = numpy.sqrt( numpy.power(p_X + rd_x - det_X[i,j],2) +
numpy.power(p_Y + rd_y - det_Y[i,j],2) +
numpy.power(propagation_distance,2) )
amplitude_propagated[i,j] = (amplitude / r * numpy.exp(1.j * wavenumber * r)).sum()
output_wavefront = GenericWavefront2D.initialize_wavefront_from_arrays(det_x,det_y,amplitude_propagated)
else:
x = wavefront.get_coordinate_x()
y = wavefront.get_coordinate_y()
X = wavefront.get_mesh_x()
Y = wavefront.get_mesh_y()
wavenumber = 2 * numpy.pi / wavefront.get_wavelength()
amplitude = wavefront.get_complex_amplitude()
used_indices = wavefront.get_mask_grid(width_in_pixels=(1,1),number_of_lines=(1,1))
indices_x = wavefront.get_mesh_indices_x()
indices_y = wavefront.get_mesh_indices_y()
indices_x_flatten = indices_x[numpy.where(used_indices == 1)].flatten()
indices_y_flatten = indices_y[numpy.where(used_indices == 1)].flatten()
X_flatten = X[numpy.where(used_indices == 1)].flatten()
Y_flatten = Y[numpy.where(used_indices == 1)].flatten()
complex_amplitude_propagated = amplitude*0
print("propagate_2D_integral: Calculating %d points from a total of %d x %d = %d"%(
X_flatten.size,amplitude.shape[0],amplitude.shape[1],amplitude.shape[0]*amplitude.shape[1]))
for i in range(X_flatten.size):
r = numpy.sqrt( numpy.power(wavefront.get_mesh_x() - X_flatten[i],2) +
numpy.power(wavefront.get_mesh_y() - Y_flatten[i],2) +
numpy.power(propagation_distance,2) )
complex_amplitude_propagated[int(indices_x_flatten[i]),int(indices_y_flatten[i])] = (amplitude / r * numpy.exp(1.j * wavenumber * r)).sum()
output_wavefront = GenericWavefront2D.initialize_wavefront_from_arrays(x_array=x,
y_array=y,
z_array=complex_amplitude_propagated,
wavelength=wavefront.get_wavelength())
# added srio@esrf.eu 2018-03-23 to conserve energy - TODO: review method!
output_wavefront.rescale_amplitude( numpy.sqrt(wavefront.get_intensity().sum() /
output_wavefront.get_intensity().sum()))
return output_wavefront
| 49.977778 | 156 | 0.621165 | 784 | 6,747 | 5.100765 | 0.276786 | 0.060015 | 0.032008 | 0.047512 | 0.35859 | 0.249062 | 0.218555 | 0.188547 | 0.144536 | 0.144536 | 0 | 0.011702 | 0.303394 | 6,747 | 134 | 157 | 50.350746 | 0.839149 | 0.062102 | 0 | 0.109589 | 0 | 0 | 0.024619 | 0.004299 | 0 | 0 | 0 | 0.014925 | 0 | 1 | 0.068493 | false | 0 | 0.041096 | 0.041096 | 0.205479 | 0.013699 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bebf8ceeebe9e29c2c913232279dc6462e901f90 | 334 | py | Python | Desafio051.py | GabrielSanchesRosa/Python | 3a129e27e076b2a91af03d68ede50b9c45c50217 | [
"MIT"
] | null | null | null | Desafio051.py | GabrielSanchesRosa/Python | 3a129e27e076b2a91af03d68ede50b9c45c50217 | [
"MIT"
] | null | null | null | Desafio051.py | GabrielSanchesRosa/Python | 3a129e27e076b2a91af03d68ede50b9c45c50217 | [
"MIT"
] | null | null | null | # Desenvolva um programa que leia o primeiro termo e a razão de uma PA. No final mostre, os 10 primeiros termos dessa prograssão.
primeiro = int(input("Primeiro Termo: "))
razao = int(input("Razão: "))
decimo = primeiro + (10 - 1) * razao
for c in range(primeiro, decimo + razao, razao):
print(f"{c}", end=" -> ")
print("Acabou")
| 33.4 | 129 | 0.679641 | 51 | 334 | 4.45098 | 0.705882 | 0.114537 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018182 | 0.176647 | 334 | 9 | 130 | 37.111111 | 0.807273 | 0.38024 | 0 | 0 | 0 | 0 | 0.17561 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bec6becd26fa525cff31dffaad9d3ab5e8f46f15 | 11,873 | py | Python | lib/fbuild/builders/__init__.py | felix-lang/fbuild | 9595fbfd6d3ceece31fda2f96c35d4a241f0129b | [
"PSF-2.0",
"BSD-2-Clause"
] | 40 | 2015-02-07T00:44:12.000Z | 2021-04-02T13:41:08.000Z | lib/fbuild/builders/__init__.py | felix-lang/fbuild | 9595fbfd6d3ceece31fda2f96c35d4a241f0129b | [
"PSF-2.0",
"BSD-2-Clause"
] | 30 | 2015-02-06T17:45:15.000Z | 2019-01-10T16:34:29.000Z | lib/fbuild/builders/__init__.py | felix-lang/fbuild | 9595fbfd6d3ceece31fda2f96c35d4a241f0129b | [
"PSF-2.0",
"BSD-2-Clause"
] | 3 | 2015-09-03T06:38:02.000Z | 2019-10-24T14:26:57.000Z | import abc
import contextlib
import os
import sys
from functools import partial
from itertools import chain
import fbuild
import fbuild.db
import fbuild.path
import fbuild.temp
from . import platform
# ------------------------------------------------------------------------------
class MissingProgram(fbuild.ConfigFailed):
def __init__(self, programs=None):
self.programs = programs
def __str__(self):
if self.programs is None:
return 'cannot find program'
else:
return 'cannot find any of the programs %s' % \
' '.join(repr(str(p)) for p in self.programs)
# ------------------------------------------------------------------------------
@fbuild.db.caches
def find_program(ctx, names, paths=None, *, quieter=0):
"""L{find_program} is a test that searches the paths for one of the
programs in I{name}. If one is found, it is returned. If not, the next
name in the list is searched for."""
if paths is None:
paths = os.environ['PATH'].split(os.pathsep)
# If we're running on windows, we need to append '.exe' to the filenames
# that we're searching for.
if sys.platform == 'win32':
new_names = []
for name in names:
if \
not name.endswith('.exe') or \
not name.endswith('.cmd') or \
not name.endswith('.bat'):
new_names.append(name + '.exe')
new_names.append(name + '.cmd')
new_names.append(name + '.bat')
new_names.append(name)
names = new_names
for name in names:
filename = fbuild.path.Path(name)
ctx.logger.check('looking for ' + filename.name, verbose=quieter)
if filename.exists() and filename.isfile():
ctx.logger.passed('ok %s' % filename, verbose=quieter)
return fbuild.path.Path(name)
else:
for path in paths:
filename = fbuild.path.Path(path, name)
if filename.exists() and filename.isfile():
ctx.logger.passed('ok %s' % filename, verbose=quieter)
return fbuild.path.Path(filename)
ctx.logger.failed(verbose=quieter)
raise MissingProgram(names)
# ------------------------------------------------------------------------------
def check_version(ctx, builder, version_function, *,
requires_version=None,
requires_at_least_version=None,
requires_at_most_version=None):
"""Helper function to simplify checking the version of a builder."""
if any(v is not None for v in (
requires_version,
requires_at_least_version,
requires_at_most_version)):
ctx.logger.check('checking %s version' % builder)
version_str = version_function()
# Convert the version into a tuple
version = []
for i in version_str.split('.'):
try:
version.append(int(i))
except ValueError:
# The subversion isn't a number, so just convert it to a
# string.
version.append(i)
version = tuple(version)
if requires_version is not None and requires_version != version:
msg = 'version %s required; found %s' % (
'.'.join(str(i) for i in requires_version), version_str)
ctx.logger.failed(msg)
raise fbuild.ConfigFailed(msg)
if requires_at_least_version is not None and \
requires_at_least_version > version:
msg = 'at least version %s required; found %s' % (
'.'.join(str(i) for i in requires_at_least_version),
version_str)
ctx.logger.failed(msg)
raise fbuild.ConfigFailed(msg)
if requires_at_most_version is not None and \
requires_at_most_version < version:
msg = 'at most version %s required; found %s' % (
'.'.join(str(i) for i in requires_at_most_version),
version_str)
ctx.logger.failed(msg)
raise fbuild.ConfigFailed(msg)
ctx.logger.passed(version_str)
# ------------------------------------------------------------------------------
class AbstractCompiler(fbuild.db.PersistentObject):
def __init__(self, *args, src_suffix, **kwargs):
super().__init__(*args, **kwargs)
self.src_suffix = src_suffix
@fbuild.db.cachemethod
def compile(self, src:fbuild.db.SRC, *args, **kwargs) -> fbuild.db.DST:
return self.uncached_compile(src, *args, **kwargs)
@abc.abstractmethod
def uncached_compile(self, src, *args, **kwargs):
pass
@fbuild.db.cachemethod
@platform.auto_platform_options()
def build_objects(self, srcs:fbuild.db.SRCS, *args, **kwargs) -> \
fbuild.db.DSTS:
"""Compile all of the passed in L{srcs} in parallel."""
# When a object has extra external dependencies, such as .c files
# depending on .h changes, depending on library changes, we need to add
# the dependencies in build_objects. Unfortunately, the db doesn't
# know about these new files and so it can't tell when a function
# really needs to be rerun. So, we'll just not cache this function.
# We need to add extra dependencies to our call.
objs = []
src_deps = []
dst_deps = []
for o, s, d in self.ctx.scheduler.map(
partial(self.compile.call, *args, **kwargs),
srcs):
objs.append(o)
src_deps.extend(s)
dst_deps.extend(d)
self.ctx.db.add_external_dependencies_to_call(
srcs=src_deps,
dsts=dst_deps)
return objs
# --------------------------------------------------------------------------
def tempfile(self, code):
return fbuild.temp.tempfile(code, self.src_suffix)
@contextlib.contextmanager
def tempfile_compile(self, code='', *, quieter=1, **kwargs):
with self.tempfile(code) as src:
yield self.uncached_compile(src, quieter=quieter, **kwargs)
@platform.auto_platform_options()
def try_compile(self, *args, **kwargs):
try:
with self.tempfile_compile(*args, **kwargs):
return True
except fbuild.ExecutionError:
return False
@platform.auto_platform_options()
def check_compile(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_compile(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
# ------------------------------------------------------------------------------
class AbstractLibLinker(AbstractCompiler):
@fbuild.db.cachemethod
@platform.auto_platform_options()
def link_lib(self, dst, srcs:fbuild.db.SRCS, *args,
libs:fbuild.db.SRCS=(),
**kwargs) -> fbuild.db.DST:
"""Link compiled files into a library and caches the results."""
return self.uncached_link_lib(dst, srcs, *args, libs=libs, **kwargs)
@abc.abstractmethod
def uncached_link_lib(self, *args, **kwargs):
pass
@platform.auto_platform_options()
def build_lib(self, dst, srcs, *, objs=(), libs=(), ckwargs={}, lkwargs={}):
"""Compile all of the passed in L{srcs} in parallel, then link them
into a library."""
objs = tuple(chain(objs, self.build_objects(srcs, **ckwargs)))
return self.link_lib(dst, objs, libs=libs, **lkwargs)
# --------------------------------------------------------------------------
@contextlib.contextmanager
@platform.auto_platform_options()
def tempfile_link_lib(self, code='', *, quieter=1, ckwargs={}, **kwargs):
with self.tempfile(code) as src:
dst = src.parent / 'temp'
obj = self.uncached_compile(src, quieter=quieter, **ckwargs)
yield self.uncached_link_lib(dst, [obj], quieter=quieter, **kwargs)
def try_link_lib(self, *args, **kwargs):
try:
with self.tempfile_link_lib(*args, **kwargs):
return True
except fbuild.ExecutionError:
return False
def check_link_lib(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_link_lib(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
# ------------------------------------------------------------------------------
class AbstractRunner(fbuild.db.PersistentObject):
@abc.abstractmethod
def tempfile_run(self, *args, **kwargs):
pass
def try_run(self, code='', quieter=1, **kwargs):
try:
self.tempfile_run(code, quieter=quieter, **kwargs)
except fbuild.ExecutionError:
return False
else:
return True
def check_run(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_run(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
# ------------------------------------------------------------------------------
class AbstractExeLinker(AbstractCompiler, AbstractRunner):
@fbuild.db.cachemethod
@platform.auto_platform_options()
def link_exe(self, dst, srcs:fbuild.db.SRCS, *args,
libs:fbuild.db.SRCS=(),
**kwargs) -> fbuild.db.DST:
"""Link compiled files into an executable."""
return self.uncached_link_exe(dst, srcs, *args, libs=libs, **kwargs)
@abc.abstractmethod
def uncached_link_exe(self, *args, **kwargs):
pass
@platform.auto_platform_options()
def build_exe(self, dst, srcs, *, objs=(), libs=(), ckwargs={}, lkwargs={}):
"""Compile all of the passed in L{srcs} in parallel, then link them
into an executable."""
objs = tuple(chain(objs, self.build_objects(srcs, **ckwargs)))
return self.link_exe(dst, objs, libs=libs, **lkwargs)
# --------------------------------------------------------------------------
@contextlib.contextmanager
@platform.auto_platform_options()
def tempfile_link_exe(self, code='', *, quieter=1, ckwargs={}, **kwargs):
with self.tempfile(code) as src:
dst = src.parent / 'temp'
obj = self.uncached_compile(src, quieter=quieter, **ckwargs)
yield self.uncached_link_exe(dst, [obj], quieter=quieter, **kwargs)
@platform.auto_platform_options()
def try_link_exe(self, *args, **kwargs):
try:
with self.tempfile_link_exe(*args, **kwargs):
return True
except fbuild.ExecutionError:
return False
@platform.auto_platform_options()
def check_link_exe(self, code, msg, *args, **kwargs):
self.ctx.logger.check(msg)
if self.try_link_exe(code, *args, **kwargs):
self.ctx.logger.passed()
return True
else:
self.ctx.logger.failed()
return False
@platform.auto_platform_options()
def tempfile_run(self, *args, quieter=1, ckwargs={}, lkwargs={}, **kwargs):
with self.tempfile_link_exe(*args,
quieter=quieter,
ckwargs=ckwargs,
**lkwargs) as exe:
return self.ctx.execute([exe],
quieter=quieter,
cwd=exe.parent,
**kwargs)
# ------------------------------------------------------------------------------
class AbstractCompilerBuilder(AbstractLibLinker, AbstractExeLinker):
pass
| 35.762048 | 80 | 0.553609 | 1,326 | 11,873 | 4.831825 | 0.156863 | 0.035898 | 0.037459 | 0.05057 | 0.536288 | 0.482285 | 0.461058 | 0.437334 | 0.418136 | 0.376151 | 0 | 0.000925 | 0.271456 | 11,873 | 331 | 81 | 35.870091 | 0.739769 | 0.166596 | 0 | 0.422594 | 0 | 0 | 0.024862 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117155 | false | 0.050209 | 0.046025 | 0.008368 | 0.305439 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
bec81857d7e4af0801337540f4b978497c5536f9 | 1,897 | py | Python | tuprolog/solve/exception/error/existence/__init__.py | DavideEva/2ppy | 55609415102f8116165a42c8e33e029c4906e160 | [
"Apache-2.0"
] | 1 | 2021-08-07T06:29:28.000Z | 2021-08-07T06:29:28.000Z | tuprolog/solve/exception/error/existence/__init__.py | DavideEva/2ppy | 55609415102f8116165a42c8e33e029c4906e160 | [
"Apache-2.0"
] | 14 | 2021-09-16T13:25:12.000Z | 2022-01-03T10:12:22.000Z | tuprolog/solve/exception/error/existence/__init__.py | DavideEva/2ppy | 55609415102f8116165a42c8e33e029c4906e160 | [
"Apache-2.0"
] | 1 | 2021-12-22T00:25:32.000Z | 2021-12-22T00:25:32.000Z | from typing import Union
from tuprolog import logger
# noinspection PyUnresolvedReferences
import jpype.imports
# noinspection PyUnresolvedReferences
import it.unibo.tuprolog.solve.exception.error as errors
from tuprolog.core import Term, Atom
from tuprolog.solve import ExecutionContext, Signature
ExistenceError = errors.ExistenceError
ObjectType = ExistenceError.ObjectType
OBJECT_PROCEDURE = ObjectType.PROCEDURE
OBJECT_SOURCE_SINK = ObjectType.SOURCE_SINK
OBJECT_RESOURCE = ObjectType.RESOURCE
OBJECT_STREAM = ObjectType.STREAM
OBJECT_OOP_ALIAS = ObjectType.OOP_ALIAS
OBJECT_OOP_METHOD = ObjectType.OOP_METHOD
OBJECT_OOP_CONSTRUCTOR = ObjectType.OOP_CONSTRUCTOR
OBJECT_OOP_PROPERTY = ObjectType.OOP_PROPERTY
def existence_error(
context: ExecutionContext,
type: ObjectType,
culprit: Term,
message: str
) -> ExistenceError:
return ExistenceError.of(context, type, culprit, message)
def existence_error_for_source_sink(
context: ExecutionContext,
alias: Union[Atom, str]
) -> ExistenceError:
return ExistenceError.forSourceSink(context, alias)
def existence_error_for_procedure(
context: ExecutionContext,
procedure: Signature
) -> ExistenceError:
return ExistenceError.forProcedure(context, procedure)
def existence_error_for_stream(
context: ExecutionContext,
stream: Term
) -> ExistenceError:
return ExistenceError.forStream(context, stream)
def existence_error_for_resource(
context: ExecutionContext,
name: str
) -> ExistenceError:
return ExistenceError.forResource(context, name)
def object_type(name: Union[str, Term]) -> ObjectType:
if isinstance(name, str):
return ObjectType.of(name)
else:
return ObjectType.fromTerm(name)
logger.debug("Loaded JVM classes from it.unibo.tuprolog.solve.exception.error.ExistenceError.*")
| 24.960526 | 96 | 0.765946 | 200 | 1,897 | 7.1 | 0.29 | 0.042254 | 0.059859 | 0.056338 | 0.047887 | 0.047887 | 0 | 0 | 0 | 0 | 0 | 0 | 0.163416 | 1,897 | 75 | 97 | 25.293333 | 0.89477 | 0.037428 | 0 | 0.204082 | 0 | 0 | 0.043884 | 0.030719 | 0 | 0 | 0 | 0 | 0 | 1 | 0.122449 | false | 0 | 0.122449 | 0.102041 | 0.387755 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
fe2070ac8557cbd4275cc5e584c79388af700674 | 2,510 | py | Python | detection/contor.py | chika626/chainer_rep | a1d4fd32a8cfcab753269455d08c1918f273388d | [
"MIT"
] | null | null | null | detection/contor.py | chika626/chainer_rep | a1d4fd32a8cfcab753269455d08c1918f273388d | [
"MIT"
] | 7 | 2020-03-13T08:29:46.000Z | 2020-05-27T17:34:14.000Z | detection/contor.py | chika626/chainer_rep | a1d4fd32a8cfcab753269455d08c1918f273388d | [
"MIT"
] | null | null | null | import json
import math
from PIL import Image,ImageDraw
import pandas as pd
import glob
import argparse
import copy
import numpy as np
import matplotlib.pyplot as plt
import pickle
import cv2
from PIL import ImageEnhance
import chainer
from chainer.datasets import ConcatenatedDataset
from chainer.datasets import TransformDataset
from chainer.optimizer_hooks import WeightDecay
from chainer import serializers
from chainer import training
from chainer.training import extensions
from chainer.training import triggers
from chainercv.datasets import voc_bbox_label_names
from chainercv.datasets import VOCBboxDataset
from chainercv.extensions import DetectionVOCEvaluator
from chainercv.links.model.ssd import GradientScaling
from chainercv.links.model.ssd import multibox_loss
from chainercv.links import SSD300
from chainercv.links import SSD512
from chainercv import transforms
from chainercv.utils import read_image
from chainercv.links.model.ssd import random_crop_with_bbox_constraints
from chainercv.links.model.ssd import random_distort
from chainercv.links.model.ssd import resize_with_random_interpolation
import queue
def run(img):
# c , H , W = img.shape
H,W = img.size
img = np.asarray(img)
# 変換後データ配列
transed = Image.new('RGB',(H,W))
for x in range(H):
for y in range(W):
transed.putpixel((x,y),(255,255,255))
for x in range(H):
for y in range(W):
if x + 1 == H or y + 1 == W:
break
if img[y][x][0] != img[y][x+1][0]:
transed.putpixel((x,y),(0,0,0))
for y in range(W):
for x in range(H):
if x + 1 == H or y + 1 == W:
break
if img[y][x][0] != img[y+1][x][0]:
transed.putpixel((x,y),(0,0,0))
return transed
def main():
# # 単一の場合のコード
# img = Image.open('cont/transed/X.jpg')
# img=img.convert('L')
# img=np.asarray(img)
# ret2, img = cv2.threshold(img, 0, 255, cv2.THRESH_OTSU)
# img=Image.fromarray(img)
# img=img.convert('RGB')
# transed = run(img)
# transed.save('transec_0.png')
# return
# 大量変換機
img_path=glob.glob("cont/crop/*")
counter=0
for path in img_path:
img = Image.open(path)
transed = run(img)
transed.save('transec_{}.png'.format(counter))
counter+=1
if __name__ == '__main__':
main() | 26.989247 | 72 | 0.640239 | 348 | 2,510 | 4.54023 | 0.298851 | 0.098734 | 0.079747 | 0.072785 | 0.255063 | 0.248101 | 0.139241 | 0.091139 | 0.064557 | 0.064557 | 0 | 0.022605 | 0.259761 | 2,510 | 93 | 73 | 26.989247 | 0.827772 | 0.114343 | 0 | 0.190476 | 0 | 0 | 0.017005 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.031746 | false | 0 | 0.52381 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
fe21f2c89737b3c4d120cba724974597cb079bc4 | 1,675 | py | Python | src/boot.py | johngtrs/krux | 7b6c6d410e29c16ab5d3c05a5aafab618f13a86f | [
"MIT"
] | null | null | null | src/boot.py | johngtrs/krux | 7b6c6d410e29c16ab5d3c05a5aafab618f13a86f | [
"MIT"
] | null | null | null | src/boot.py | johngtrs/krux | 7b6c6d410e29c16ab5d3c05a5aafab618f13a86f | [
"MIT"
] | null | null | null | # The MIT License (MIT)
# Copyright (c) 2021 Tom J. Sun
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import machine
from pmu import axp192
from context import Context
from login import Login
from home import Home
import settings
pmu = axp192()
# Enable power management so that if power button is held down 6 secs,
# it shuts off as expected
pmu.enablePMICSleepMode(True)
ctx = Context()
ctx.display.flash_text(settings.load('splash', ( 'Krux' ), strip=False))
while True:
if not Login(ctx).run():
break
if not Home(ctx).run():
break
ctx.display.flash_text(( 'Shutting down..' ))
ctx.clear()
pmu.setEnterSleepMode()
machine.reset()
| 32.211538 | 79 | 0.755224 | 254 | 1,675 | 4.972441 | 0.543307 | 0.069675 | 0.020586 | 0.030087 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007971 | 0.176119 | 1,675 | 51 | 80 | 32.843137 | 0.907246 | 0.696119 | 0 | 0.105263 | 0 | 0 | 0.051125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.315789 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
fe2900b93b3b942d3363b1695eb5a7b3920a90d6 | 1,913 | py | Python | app.py | Nishanth-Gobi/Da-Vinci-Code | b44a2d0c553e4f9cf9e2bb3283ebb5f6eaecea4a | [
"MIT"
] | null | null | null | app.py | Nishanth-Gobi/Da-Vinci-Code | b44a2d0c553e4f9cf9e2bb3283ebb5f6eaecea4a | [
"MIT"
] | null | null | null | app.py | Nishanth-Gobi/Da-Vinci-Code | b44a2d0c553e4f9cf9e2bb3283ebb5f6eaecea4a | [
"MIT"
] | null | null | null | from flask import Flask, render_template, request, redirect, url_for
from os.path import join
from stego import Steganography
app = Flask(__name__)
UPLOAD_FOLDER = 'static/files/'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg'}
@app.route("/")
def home():
return render_template('home.html')
@app.route("/encrypt", methods=['GET', 'POST'])
def get_image():
if request.method == 'GET':
return render_template('encrypt.html')
# Check if the user has entered the secret message
if 'file' in request.files and 'Secret' in request.values:
uploaded_image = request.files['file']
message = request.values.get('Secret')
password = request.values.get("key")
filepath = join(app.config['UPLOAD_FOLDER'], "cover_image.png")
uploaded_image.save(filepath)
im = Steganography(filepath=app.config['UPLOAD_FOLDER'], key=password)
im.encode(message=message)
return render_template('encrypt.html', value=filepath, image_flag=True, secret_flag=True)
return redirect(url_for('encrypt'))
@app.route("/decrypt", methods=['GET', 'POST'])
def get_image_to_decrypt():
if request.method == 'GET':
return render_template('decrypt.html')
if 'key' in request.values:
password = request.values.get('key')
filepath = join(app.config['UPLOAD_FOLDER'], "stego_image.png")
im = Steganography(filepath=app.config['UPLOAD_FOLDER'], key=password)
message = im.decode()
return render_template('decrypt.html', value=filepath, message=message)
if 'file' in request.files:
uploaded_image = request.files['file']
filepath = join(app.config['UPLOAD_FOLDER'], "stego_image.png")
uploaded_image.save(filepath)
return render_template('decrypt.html', value=filepath)
if __name__ == '__main__':
app.run(debug=True)
| 31.360656 | 97 | 0.67747 | 239 | 1,913 | 5.242678 | 0.276151 | 0.076616 | 0.071828 | 0.100559 | 0.579409 | 0.490822 | 0.361532 | 0.230646 | 0.230646 | 0.09577 | 0 | 0 | 0.182436 | 1,913 | 60 | 98 | 31.883333 | 0.801151 | 0.025091 | 0 | 0.243902 | 0 | 0 | 0.163178 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.073171 | false | 0.097561 | 0.073171 | 0.02439 | 0.317073 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
fe3002f8ab77d8668df51f08f7789bc9628e8c1f | 2,370 | py | Python | EC2 Auto Clean Room Forensics/Lambda-Functions/snapshotForRemediation.py | spartantri/aws-security-automation | a3904931220111022d12e71a3d79e4a85fc82173 | [
"Apache-2.0"
] | null | null | null | EC2 Auto Clean Room Forensics/Lambda-Functions/snapshotForRemediation.py | spartantri/aws-security-automation | a3904931220111022d12e71a3d79e4a85fc82173 | [
"Apache-2.0"
] | null | null | null | EC2 Auto Clean Room Forensics/Lambda-Functions/snapshotForRemediation.py | spartantri/aws-security-automation | a3904931220111022d12e71a3d79e4a85fc82173 | [
"Apache-2.0"
] | null | null | null | # MIT No Attribution
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import boto3
import os
def lambda_handler(event, context):
# TODO implement
print(event)
client = boto3.client('ec2')
instanceID = event.get('instanceID')
response = client.describe_instances(
InstanceIds=[
instanceID
]
)
volumeID = response['Reservations'][0]['Instances'][0]['BlockDeviceMappings'][0]['Ebs']['VolumeId']
print(volumeID)
SnapShotDetails = client.create_snapshot(
Description='Isolated Instance',
VolumeId=volumeID
)
client.create_tags(Resources=[SnapShotDetails['SnapshotId']], Tags=[{'Key': 'Name', 'Value': instanceID}])
# TODO Dump Response into S3 - response
# TODO Dump Response details into Snapshot - SnapShotDetails['SnapshotId']
print(response)
print(SnapShotDetails['SnapshotId'])
response = client.modify_instance_attribute(
Groups=[
os.environ['ISOLATED_SECUTRITYGROUP'],
],
InstanceId=instanceID
)
tagresponse = client.create_tags(
Resources=[
instanceID,
],
Tags=[
{
'Key': 'IsIsolated',
'Value': 'InstanceIsolated'
},
]
)
waiter = client.get_waiter('snapshot_completed')
waiter.wait(
SnapshotIds=[
SnapShotDetails['SnapshotId'],
]
)
# event['SnapshotId'] = SnapShotDetails['SnapshotId']
return SnapShotDetails['SnapshotId']
| 33.857143 | 110 | 0.670042 | 261 | 2,370 | 6.045977 | 0.521073 | 0.048796 | 0.016477 | 0.031686 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00387 | 0.236709 | 2,370 | 69 | 111 | 34.347826 | 0.868436 | 0.444726 | 0 | 0.045455 | 0 | 0 | 0.160123 | 0.017706 | 0 | 0 | 0 | 0.014493 | 0 | 1 | 0.022727 | false | 0 | 0.045455 | 0 | 0.090909 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe317187c1c12b8c77ea5e51802f388e760744e4 | 1,324 | py | Python | tests/test_intbounds.py | alex/optimizer-model | 0e40a0763082f5fe0bd596e8e77ebccbcd7f4a98 | [
"BSD-3-Clause"
] | 4 | 2015-04-29T22:49:25.000Z | 2018-02-16T09:06:08.000Z | tests/test_intbounds.py | alex/optimizer-model | 0e40a0763082f5fe0bd596e8e77ebccbcd7f4a98 | [
"BSD-3-Clause"
] | null | null | null | tests/test_intbounds.py | alex/optimizer-model | 0e40a0763082f5fe0bd596e8e77ebccbcd7f4a98 | [
"BSD-3-Clause"
] | null | null | null | from optimizer.utils.intbounds import IntBounds
class TestIntBounds(object):
def test_make_gt(self):
i0 = IntBounds()
i1 = i0.make_gt(IntBounds(10, 10))
assert i1.lower == 11
def test_make_gt_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_gt(IntBounds(10, 10)).make_gt(IntBounds(0, 0))
assert i1.lower == 11
def test_make_lt(self):
i0 = IntBounds()
i1 = i0.make_lt(IntBounds(10, 10))
assert i1.upper == 9
def test_make_lt_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_lt(IntBounds(0, 0)).make_lt(IntBounds(10, 10))
assert i1.upper == -1
def test_both_bounds(self):
i0 = IntBounds()
i1 = i0.make_lt(IntBounds(10, 10)).make_gt(IntBounds(0, 0))
assert i1.upper == 9
assert i1.lower == 1
i2 = i0.make_gt(IntBounds(0, 0)).make_lt(IntBounds(10, 10))
assert i2.lower == 1
assert i2.upper == 9
def test_make_le_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_le(IntBounds(0, 0)).make_le(IntBounds(2, 2))
assert i1.upper == 0
def test_make_ge_already_bounded(self):
i0 = IntBounds()
i1 = i0.make_ge(IntBounds(10, 10)).make_ge(IntBounds(0, 0))
assert i1.lower == 10
| 23.22807 | 67 | 0.5929 | 192 | 1,324 | 3.901042 | 0.166667 | 0.064085 | 0.140187 | 0.158879 | 0.699599 | 0.65287 | 0.620828 | 0.562083 | 0.365821 | 0.269693 | 0 | 0.092147 | 0.278701 | 1,324 | 56 | 68 | 23.642857 | 0.692147 | 0 | 0 | 0.323529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.294118 | 1 | 0.205882 | false | 0 | 0.029412 | 0 | 0.264706 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe358e9590f17c8d7c10eb92232dc2f7d4b20167 | 235 | py | Python | config.py | volgachen/Chinese-Tokenization | 467e08da6fe271b6e33258d5aa6682c0405a3f32 | [
"Apache-2.0"
] | null | null | null | config.py | volgachen/Chinese-Tokenization | 467e08da6fe271b6e33258d5aa6682c0405a3f32 | [
"Apache-2.0"
] | null | null | null | config.py | volgachen/Chinese-Tokenization | 467e08da6fe271b6e33258d5aa6682c0405a3f32 | [
"Apache-2.0"
] | 1 | 2020-07-12T10:38:34.000Z | 2020-07-12T10:38:34.000Z | class Config:
ngram = 2
train_set = "data/rmrb.txt"
modified_train_set = "data/rmrb_modified.txt"
test_set = ""
model_file = ""
param_file = ""
word_max_len = 10
proposals_keep_ratio = 1.0
use_re = 1
subseq_num = 15 | 21.363636 | 47 | 0.67234 | 37 | 235 | 3.918919 | 0.756757 | 0.110345 | 0.165517 | 0.22069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043478 | 0.217021 | 235 | 11 | 48 | 21.363636 | 0.744565 | 0 | 0 | 0 | 0 | 0 | 0.154867 | 0.097345 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe3c354a94b9bc97c332f504c7fb8dc959b31224 | 7,019 | py | Python | manila/tests/share/test_snapshot_access.py | gouthampacha/manila | 4b7ba9b99d272663f519b495668715fbf979ffbc | [
"Apache-2.0"
] | 3 | 2016-06-06T13:05:00.000Z | 2021-05-05T04:29:24.000Z | manila/tests/share/test_snapshot_access.py | gouthampacha/manila | 4b7ba9b99d272663f519b495668715fbf979ffbc | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | manila/tests/share/test_snapshot_access.py | gouthampacha/manila | 4b7ba9b99d272663f519b495668715fbf979ffbc | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # Copyright (c) 2016 Hitachi Data Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import ddt
import mock
from manila.common import constants
from manila import context
from manila import db
from manila import exception
from manila.share import snapshot_access
from manila import test
from manila.tests import db_utils
from manila import utils
@ddt.ddt
class SnapshotAccessTestCase(test.TestCase):
def setUp(self):
super(SnapshotAccessTestCase, self).setUp()
self.driver = self.mock_class("manila.share.driver.ShareDriver",
mock.Mock())
self.snapshot_access = snapshot_access.ShareSnapshotInstanceAccess(
db, self.driver)
self.context = context.get_admin_context()
share = db_utils.create_share()
self.snapshot = db_utils.create_snapshot(share_id=share['id'])
self.snapshot_instance = db_utils.create_snapshot_instance(
snapshot_id=self.snapshot['id'],
share_instance_id=self.snapshot['share']['instance']['id'])
@ddt.data(constants.ACCESS_STATE_QUEUED_TO_APPLY,
constants.ACCESS_STATE_QUEUED_TO_DENY)
def test_update_access_rules(self, state):
rules = []
for i in range(2):
rules.append({
'id': 'id-%s' % i,
'state': state,
'access_id': 'rule_id%s' % i
})
all_rules = copy.deepcopy(rules)
all_rules.append({
'id': 'id-3',
'state': constants.ACCESS_STATE_ERROR,
'access_id': 'rule_id3'
})
snapshot_instance_get = self.mock_object(
db, 'share_snapshot_instance_get',
mock.Mock(return_value=self.snapshot_instance))
snap_get_all_for_snap_instance = self.mock_object(
db, 'share_snapshot_access_get_all_for_snapshot_instance',
mock.Mock(return_value=all_rules))
self.mock_object(db, 'share_snapshot_instance_access_update')
self.mock_object(self.driver, 'snapshot_update_access')
self.mock_object(self.snapshot_access, '_check_needs_refresh',
mock.Mock(return_value=False))
self.mock_object(db, 'share_snapshot_instance_access_delete')
self.snapshot_access.update_access_rules(self.context,
self.snapshot_instance['id'])
snapshot_instance_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'], with_share_data=True)
snap_get_all_for_snap_instance.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'])
if state == constants.ACCESS_STATE_QUEUED_TO_APPLY:
self.driver.snapshot_update_access.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance, rules, add_rules=rules,
delete_rules=[], share_server=None)
else:
self.driver.snapshot_update_access.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance, [], add_rules=[],
delete_rules=rules, share_server=None)
def test_update_access_rules_delete_all_rules(self):
rules = []
for i in range(2):
rules.append({
'id': 'id-%s' % i,
'state': constants.ACCESS_STATE_QUEUED_TO_DENY,
'access_id': 'rule_id%s' % i
})
snapshot_instance_get = self.mock_object(
db, 'share_snapshot_instance_get',
mock.Mock(return_value=self.snapshot_instance))
snap_get_all_for_snap_instance = self.mock_object(
db, 'share_snapshot_access_get_all_for_snapshot_instance',
mock.Mock(side_effect=[rules, []]))
self.mock_object(db, 'share_snapshot_instance_access_update')
self.mock_object(self.driver, 'snapshot_update_access')
self.mock_object(db, 'share_snapshot_instance_access_delete')
self.snapshot_access.update_access_rules(self.context,
self.snapshot_instance['id'],
delete_all_rules=True)
snapshot_instance_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'], with_share_data=True)
snap_get_all_for_snap_instance.assert_called_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'])
self.driver.snapshot_update_access.assert_called_with(
utils.IsAMatcher(context.RequestContext), self.snapshot_instance,
[], add_rules=[], delete_rules=rules, share_server=None)
def test_update_access_rules_exception(self):
rules = []
for i in range(2):
rules.append({
'id': 'id-%s' % i,
'state': constants.ACCESS_STATE_APPLYING,
'access_id': 'rule_id%s' % i
})
snapshot_instance_get = self.mock_object(
db, 'share_snapshot_instance_get',
mock.Mock(return_value=self.snapshot_instance))
snap_get_all_for_snap_instance = self.mock_object(
db, 'share_snapshot_access_get_all_for_snapshot_instance',
mock.Mock(return_value=rules))
self.mock_object(db, 'share_snapshot_instance_access_update')
self.mock_object(self.driver, 'snapshot_update_access',
mock.Mock(side_effect=exception.NotFound))
self.assertRaises(exception.NotFound,
self.snapshot_access.update_access_rules,
self.context, self.snapshot_instance['id'])
snapshot_instance_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'], with_share_data=True)
snap_get_all_for_snap_instance.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.snapshot_instance['id'])
self.driver.snapshot_update_access.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), self.snapshot_instance,
rules, add_rules=rules, delete_rules=[], share_server=None)
| 41.532544 | 78 | 0.648383 | 812 | 7,019 | 5.273399 | 0.172414 | 0.13078 | 0.079402 | 0.041102 | 0.662774 | 0.65717 | 0.626576 | 0.626576 | 0.626576 | 0.626576 | 0 | 0.002511 | 0.262431 | 7,019 | 168 | 79 | 41.779762 | 0.824609 | 0.087762 | 0 | 0.53125 | 0 | 0 | 0.108172 | 0.080776 | 0 | 0 | 0 | 0 | 0.085938 | 1 | 0.03125 | false | 0 | 0.085938 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe3e90a0352653677e5f89aa3d6275c22d3a1048 | 470 | py | Python | tests/test1.py | SaijC/manhwaDownloader | f6e97cfe25355598e42633a3796d84b666d5302f | [
"MIT"
] | null | null | null | tests/test1.py | SaijC/manhwaDownloader | f6e97cfe25355598e42633a3796d84b666d5302f | [
"MIT"
] | null | null | null | tests/test1.py | SaijC/manhwaDownloader | f6e97cfe25355598e42633a3796d84b666d5302f | [
"MIT"
] | null | null | null | import requests
import logging
import cfscrape
import os
from manhwaDownloader.constants import CONSTANTS as CONST
logging.basicConfig(level=logging.DEBUG)
folderPath = os.path.join(CONST.OUTPUTPATH, 'serious-taste-of-forbbiden-fruit')
logging.info(len([file for file in os.walk(folderPath)]))
walkList = [file for file in os.walk(folderPath)]
chapterDicts = dict()
for folder, _, files in walkList[1:]:
chapterDicts.update({folder: files})
print(chapterDicts) | 24.736842 | 79 | 0.778723 | 63 | 470 | 5.793651 | 0.571429 | 0.038356 | 0.060274 | 0.071233 | 0.158904 | 0.158904 | 0.158904 | 0 | 0 | 0 | 0 | 0.002392 | 0.110638 | 470 | 19 | 80 | 24.736842 | 0.870813 | 0 | 0 | 0 | 0 | 0 | 0.067941 | 0.067941 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.384615 | 0 | 0.384615 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
fe48c3bafe5a6523868023e377aa4dab0192c3a1 | 407 | py | Python | solutions/python3/894.py | sm2774us/amazon_interview_prep_2021 | f580080e4a6b712b0b295bb429bf676eb15668de | [
"MIT"
] | 42 | 2020-08-02T07:03:49.000Z | 2022-03-26T07:50:15.000Z | solutions/python3/894.py | ajayv13/leetcode | de02576a9503be6054816b7444ccadcc0c31c59d | [
"MIT"
] | null | null | null | solutions/python3/894.py | ajayv13/leetcode | de02576a9503be6054816b7444ccadcc0c31c59d | [
"MIT"
] | 40 | 2020-02-08T02:50:24.000Z | 2022-03-26T15:38:10.000Z | class Solution:
def allPossibleFBT(self, N):
def constr(N):
if N == 1: yield TreeNode(0)
for i in range(1, N, 2):
for l in constr(i):
for r in constr(N - i - 1):
m = TreeNode(0)
m.left = l
m.right = r
yield m
return list(constr(N)) | 33.916667 | 47 | 0.371007 | 49 | 407 | 3.081633 | 0.489796 | 0.139073 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.031915 | 0.538084 | 407 | 12 | 48 | 33.916667 | 0.771277 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe4e2e11b7395e1b93b6ba3044a09072e2e8f08b | 1,230 | py | Python | Modules/Phylogenetic.py | DaneshMoradigaravand/PlasmidPerm | 7a84c1d4dbf7320dd5ba821ff0e715a89fe4b3e4 | [
"MIT"
] | null | null | null | Modules/Phylogenetic.py | DaneshMoradigaravand/PlasmidPerm | 7a84c1d4dbf7320dd5ba821ff0e715a89fe4b3e4 | [
"MIT"
] | null | null | null | Modules/Phylogenetic.py | DaneshMoradigaravand/PlasmidPerm | 7a84c1d4dbf7320dd5ba821ff0e715a89fe4b3e4 | [
"MIT"
] | null | null | null | import os
from Bio import AlignIO, Phylo
from Bio.Phylo.TreeConstruction import DistanceCalculator, DistanceTreeConstructor
class Phylogenetic:
def __init__(self, PATH):
self.PATH=PATH
def binary_sequence_generator(self, input_kmer_pattern, label):
string_inp="".join([ 'A' if x==0 else 'C' for x in input_kmer_pattern])
return([">"+label,string_inp])
def multifasta_fille_generator(self, converted_sequences_phyolgenetic):
file_output = open(os.path.join(self.PATH,"binary_presence_absence_kmers.fasta"), "w")
file_output.writelines('\n'.join(converted_sequences_phyolgenetic) + '\n' )
file_output.close()
def distance_matrix_generator(self):
align = AlignIO.read(os.path.join(self.PATH,"binary_presence_absence_kmers.fasta"), "fasta")
calculator = DistanceCalculator('identity')
distMatrix = calculator.get_distance(align)
return(distMatrix)
def distance_tree_file_generator(self,distance_matrix):
constructor = DistanceTreeConstructor()
UPGMATree = constructor.upgma(distance_matrix)
Phylo.write(UPGMATree, os.path.join(self.PATH,"binary_presence_absence_kmers.tre") , "newick") | 45.555556 | 102 | 0.71626 | 143 | 1,230 | 5.895105 | 0.454545 | 0.04745 | 0.035587 | 0.049822 | 0.168446 | 0.168446 | 0.168446 | 0.168446 | 0.168446 | 0.116251 | 0 | 0.000988 | 0.177236 | 1,230 | 27 | 102 | 45.555556 | 0.832016 | 0 | 0 | 0 | 0 | 0 | 0.105605 | 0.083672 | 0 | 0 | 0 | 0 | 0 | 1 | 0.227273 | false | 0 | 0.136364 | 0 | 0.409091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe52c241e006580225be521c666de64401063758 | 410 | py | Python | lib/models/bn_helper.py | hongrui16/naic2020_B | 9321bdd19e7d2d47ac9c711eb8437cd364e25f44 | [
"MIT"
] | null | null | null | lib/models/bn_helper.py | hongrui16/naic2020_B | 9321bdd19e7d2d47ac9c711eb8437cd364e25f44 | [
"MIT"
] | null | null | null | lib/models/bn_helper.py | hongrui16/naic2020_B | 9321bdd19e7d2d47ac9c711eb8437cd364e25f44 | [
"MIT"
] | null | null | null | import torch
import functools
if torch.__version__.startswith('0'):
from .sync_bn.inplace_abn.bn import InPlaceABNSync
BatchNorm2d = functools.partial(InPlaceABNSync, activation='none')
BatchNorm2d_class = InPlaceABNSync
relu_inplace = False
else:
# BatchNorm2d_class = BatchNorm2d = torch.nn.SyncBatchNorm
BatchNorm2d_class = BatchNorm2d = torch.nn.BatchNorm2d
relu_inplace = True | 34.166667 | 70 | 0.770732 | 45 | 410 | 6.777778 | 0.533333 | 0.157377 | 0.177049 | 0.209836 | 0.222951 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023055 | 0.153659 | 410 | 12 | 71 | 34.166667 | 0.855908 | 0.136585 | 0 | 0 | 0 | 0 | 0.014164 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.3 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe56b7b7af27f780f7fa9407871404e8b6436b3d | 1,587 | py | Python | app/services/base.py | grace1307/lan_mapper | 5d244078732b86a2e38a5b21436ffca83c689eeb | [
"MIT"
] | null | null | null | app/services/base.py | grace1307/lan_mapper | 5d244078732b86a2e38a5b21436ffca83c689eeb | [
"MIT"
] | null | null | null | app/services/base.py | grace1307/lan_mapper | 5d244078732b86a2e38a5b21436ffca83c689eeb | [
"MIT"
] | null | null | null | from app.db import db
# Ignore it if db can't find the row when updating/deleting
# Todo: not ignore it, raise some error, remove checkers in view
class BaseService:
__abstract__ = True
model = None
# Create
def add_one(self, **kwargs):
new_row = self.model(**kwargs)
db.session.add(new_row)
db.session.commit() # sqlalchemy auto flushes so maybe this just need commit ?
return new_row
# Read
def select_one(self, id):
return self.model.query.filter(self.model.id == id).one_or_none()
def select_all(self, conditions: list = None, sort_by=None, is_asc=None):
query = db.session.query(self.model)
if conditions is not None:
for condition in conditions:
query = query.filter(condition)
if sort_by is not None and is_asc is not None:
sort_column = self.model.__table__._columns[sort_by]
is_asc = is_asc == 'true'
if sort_column is not None:
query = query.order_by(sort_column.asc() if is_asc else sort_column.desc())
return query.all()
# Update
def update_one(self, id, updated):
row = self.model.query.filter(self.model.id == id)
row_result = row.one_or_none()
if row_result is not None:
row.update(updated)
db.session.commit()
return row.one_or_none()
# Delete
def delete_one(self, id):
row = self.select_one(id)
if row is not None:
db.session.delete(row)
db.session.commit()
| 28.339286 | 91 | 0.609326 | 225 | 1,587 | 4.124444 | 0.328889 | 0.067888 | 0.05819 | 0.038793 | 0.071121 | 0.071121 | 0.071121 | 0.071121 | 0 | 0 | 0 | 0 | 0.297417 | 1,587 | 55 | 92 | 28.854545 | 0.832287 | 0.127914 | 0 | 0.088235 | 0 | 0 | 0.002907 | 0 | 0 | 0 | 0 | 0.018182 | 0 | 1 | 0.147059 | false | 0 | 0.029412 | 0.029412 | 0.382353 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe5b28c8c0a814b5544650e3dacd259358d5495e | 4,972 | py | Python | sbpy/photometry/bandpass.py | jianyangli/sbpy | 6b79cbea9bada89207fba17d02dc0c321fa46bf4 | [
"BSD-3-Clause"
] | 1 | 2017-11-28T02:58:51.000Z | 2017-11-28T02:58:51.000Z | sbpy/photometry/bandpass.py | jianyangli/sbpy | 6b79cbea9bada89207fba17d02dc0c321fa46bf4 | [
"BSD-3-Clause"
] | null | null | null | sbpy/photometry/bandpass.py | jianyangli/sbpy | 6b79cbea9bada89207fba17d02dc0c321fa46bf4 | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
sbpy bandpass Module
"""
__all__ = [
'bandpass'
]
import os
from astropy.utils.data import get_pkg_data_filename
def bandpass(name):
"""Retrieve bandpass transmission spectrum from sbpy.
Parameters
----------
name : string
Name of the bandpass, case insensitive. See notes for
available filters.
Returns
-------
bp : `~synphot.SpectralElement`
Notes
-----
Available filters:
+-------------+---------------------------+
| Name | Source |
+=============+===========================+
| 2MASS J | Cohen et al. 2003 |
+-------------+---------------------------+
| 2MASS H | Cohen et al. 2003 |
+-------------+---------------------------+
| 2MASS Ks | Cohen et al. 2003 |
+-------------+---------------------------+
| Cousins R | STScI CDBS, v4 |
+-------------+---------------------------+
| Cousins I | STScI CDBS, v4 |
+-------------+---------------------------+
| Johnson U | STScI CDBS, v4 |
+-------------+---------------------------+
| Johnson B | STScI CDBS, v4 |
+-------------+---------------------------+
| Johnson V | STScI CDBS, v4 |
+-------------+---------------------------+
| PS1 g | Tonry et al. 2012 |
+-------------+---------------------------+
| PS1 r | Tonry et al. 2012 |
+-------------+---------------------------+
| PS1 i | Tonry et al. 2012 |
+-------------+---------------------------+
| PS1 w | Tonry et al. 2012 |
+-------------+---------------------------+
| PS1 y | Tonry et al. 2012 |
+-------------+---------------------------+
| PS1 z | Tonry et al. 2012 |
+-------------+---------------------------+
| SDSS u | SDSS, dated 2001 |
+-------------+---------------------------+
| SDSS g | SDSS, dated 2001 |
+-------------+---------------------------+
| SDSS r | SDSS, dated 2001 |
+-------------+---------------------------+
| SDSS i | SDSS, dated 2001 |
+-------------+---------------------------+
| SDSS z | SDSS, dated 2001 |
+-------------+---------------------------+
| WFC3 F438W | HST/WFC3 UVIS, v4 |
+-------------+---------------------------+
| WFC3 F606W | HST/WFC3 UVIS, v4 |
+-------------+---------------------------+
| WISE W1 | Jarrett et al. 2011 |
+-------------+---------------------------+
| WISE W2 | Jarrett et al. 2011 |
+-------------+---------------------------+
| WISE W3 | Jarrett et al. 2011 |
+-------------+---------------------------+
| WISE W4 | Jarrett et al. 2011 |
+-------------+---------------------------+
References
----------
.. [CDBS] Space Telescope Science Institute. HST Calibration Reference
Data System. https://hst-crds.stsci.edu/ .
.. [COH03] Cohen, M. et al. 2003. Spectral Irradiance Calibration
in the Infrared. XIV. The Absolute Calibration of 2MASS. AJ
126, 1090.
.. [JAR11] Jarrett, T. H. et al. 2011. The Spitzer-WISE Survey of
the Ecliptic Poles. ApJ 735, 112.
.. [SDSS] Sloan Digital Sky Survey. Camera.
www.sdss.org/instruments/camera .
.. [TON12] Tonry, J. L. et al. 2012. The Pan-STARRS1 Photometric
System. ApJ 750, 99.
"""
try:
import synphot
except ImportError:
raise ImportError('synphot is required.')
name2file = {
'2mass j': '2mass-j-rsr.txt',
'2mass h': '2mass-h-rsr.txt',
'2mass ks': '2mass-ks-rsr.txt',
'cousins r': 'cousins_r_004_syn.fits',
'cousins i': 'cousins_i_004_syn.fits',
'johnson u': 'johnson_u_004_syn.fits',
'johnson b': 'johnson_b_004_syn.fits',
'johnson v': 'johnson_v_004_syn.fits',
'ps1 g': 'ps1-gp1.txt',
'ps1 r': 'ps1-rp1.txt',
'ps1 i': 'ps1-ip1.txt',
'ps1 w': 'ps1-wp1.txt',
'ps1 y': 'ps1-yp1.txt',
'ps1 z': 'ps1-zp1.txt',
'sdss u': 'sdss-u.fits',
'sdss g': 'sdss-g.fits',
'sdss r': 'sdss-r.fits',
'sdss i': 'sdss-i.fits',
'sdss z': 'sdss-z.fits',
'wfc3 f438w': 'wfc3_uvis_f438w_004_syn.fits',
'wfc3 f606w': 'wfc3_uvis_f606w_004_syn.fits',
'wise w1': 'WISE-RSR-W1.EE.txt',
'wise w2': 'WISE-RSR-W2.EE.txt',
'wise w3': 'WISE-RSR-W3.EE.txt',
'wise w4': 'WISE-RSR-W4.EE.txt',
}
fn = get_pkg_data_filename(os.path.join(
'..', 'photometry', 'data', name2file[name.lower()]))
bp = synphot.SpectralElement.from_file(fn)
return bp
| 34.054795 | 75 | 0.377715 | 464 | 4,972 | 3.974138 | 0.331897 | 0.034707 | 0.030369 | 0.042299 | 0.093818 | 0 | 0 | 0 | 0 | 0 | 0 | 0.060244 | 0.292237 | 4,972 | 145 | 76 | 34.289655 | 0.463768 | 0.651649 | 0 | 0 | 0 | 0 | 0.444444 | 0.118234 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02439 | false | 0.04878 | 0.121951 | 0 | 0.170732 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe6407132244604eabc2321eb05eb24333b3bd82 | 669 | py | Python | pyTorch/utils.py | rajasekar-venkatesan/Deep_Learning | c375dab303f44043a4dc30ea53b298d7eca1d5a7 | [
"MIT"
] | null | null | null | pyTorch/utils.py | rajasekar-venkatesan/Deep_Learning | c375dab303f44043a4dc30ea53b298d7eca1d5a7 | [
"MIT"
] | null | null | null | pyTorch/utils.py | rajasekar-venkatesan/Deep_Learning | c375dab303f44043a4dc30ea53b298d7eca1d5a7 | [
"MIT"
] | null | null | null | import pandas as pd, numpy as np
from sklearn.preprocessing import OneHotEncoder
author_int_dict = {'EAP':0,'HPL':1,'MWS':2}
def load_train_test_data (num_samples=None):
train_data = pd.read_csv('../data/train.csv')
train_data['author'] = [author_int_dict[a] for a in train_data['author'].tolist()]
test_data = pd.read_csv('../data/test.csv')
return train_data[:num_samples],test_data[:num_samples]
def categorical_labeler (labels):
labels = labels.reshape(-1, 1)
#labels = OneHotEncoder().fit_transform(labels).todense()
labels = np.array(labels, dtype=np.int64)
return labels
if __name__ == '__main__':
pass | 33.45 | 87 | 0.689088 | 96 | 669 | 4.510417 | 0.5 | 0.083141 | 0.096998 | 0.083141 | 0.078522 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012567 | 0.167414 | 669 | 20 | 88 | 33.45 | 0.764811 | 0.083707 | 0 | 0 | 0 | 0 | 0.104377 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0.071429 | 0.142857 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
fe6806edfc8769087714d9060a7456450c7a5f90 | 1,608 | py | Python | tests/env_config/test_base.py | DAtek/datek-app-utils | 4783345d548bd85b1f6f99679be30b978e368e0e | [
"MIT"
] | null | null | null | tests/env_config/test_base.py | DAtek/datek-app-utils | 4783345d548bd85b1f6f99679be30b978e368e0e | [
"MIT"
] | 2 | 2022-02-05T12:15:03.000Z | 2022-03-27T09:55:51.000Z | tests/env_config/test_base.py | DAtek/datek-app-utils | 4783345d548bd85b1f6f99679be30b978e368e0e | [
"MIT"
] | null | null | null | from pytest import raises
from datek_app_utils.env_config.base import BaseConfig
from datek_app_utils.env_config.errors import InstantiationForbiddenError
class SomeOtherMixinWhichDoesntRelateToEnvConfig:
color = "red"
class TestConfig:
def test_iter(self, monkeypatch, key_volume, base_config_class):
volume = 5
monkeypatch.setenv(key_volume, str(volume))
class Config(SomeOtherMixinWhichDoesntRelateToEnvConfig, base_config_class):
TYPE: str
items = [item for item in Config]
assert len(items) == 5
assert Config.color == "red"
assert items[0].name == "TYPE"
assert items[0].value is None
assert items[0].type == str
assert items[1].name == "FIELD_WITH_DEFAULT_VALUE"
assert items[1].value == "C"
assert items[1].type == str
assert items[2].name == "NON_MANDATORY_FIELD"
assert items[2].value is None
assert items[2].type == str
assert items[3].name == "TYPED_NON_MANDATORY_FIELD"
assert items[3].value is None
assert items[3].type == str
assert items[4].name == "VOLUME"
assert items[4].value == volume
assert items[4].type == int
def test_get(self, monkeypatch, key_volume, base_config_class):
volume = 10
monkeypatch.setenv(key_volume, str(volume))
assert getattr(base_config_class, "VOLUME") == volume
def test_constructor_is_forbidden(self):
class Config(BaseConfig):
pass
with raises(InstantiationForbiddenError):
Config()
| 28.714286 | 84 | 0.651741 | 193 | 1,608 | 5.26943 | 0.300518 | 0.162242 | 0.058997 | 0.070796 | 0.328417 | 0.208456 | 0.088496 | 0.088496 | 0 | 0 | 0 | 0.015847 | 0.254353 | 1,608 | 55 | 85 | 29.236364 | 0.83236 | 0 | 0 | 0.052632 | 0 | 0 | 0.056592 | 0.030473 | 0 | 0 | 0 | 0 | 0.473684 | 1 | 0.078947 | false | 0.026316 | 0.078947 | 0 | 0.289474 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe759c78dfaceadf537006e1685f7155df52a71a | 10,073 | py | Python | vm_setup/pmevo/measurement-server/PITE/register_file.py | qcjiang/pmevo-artifact | bf5da1788f9ede42086c31b3996d9e41363cc7ee | [
"MIT"
] | 6 | 2020-04-21T12:16:19.000Z | 2022-02-10T09:18:08.000Z | vm_setup/pmevo/measurement-server/PITE/register_file.py | qcjiang/pmevo-artifact | bf5da1788f9ede42086c31b3996d9e41363cc7ee | [
"MIT"
] | 1 | 2021-12-07T13:09:53.000Z | 2021-12-07T13:09:53.000Z | vm_setup/pmevo/measurement-server/PITE/register_file.py | qcjiang/pmevo-artifact | bf5da1788f9ede42086c31b3996d9e41363cc7ee | [
"MIT"
] | 2 | 2021-03-30T12:40:01.000Z | 2021-11-23T15:49:50.000Z | #! /usr/bin/env python3
# vim: et:ts=4:sw=4:fenc=utf-8
from abc import ABC, abstractmethod
from collections import defaultdict
import re
class RegisterFile(ABC):
registers = NotImplemented
def __init__(self):
# for each register kind an index pointing to the next register to use
self.reset_indices()
def reset_indices(self):
self.next_indices = defaultdict(lambda:0)
def get_memory_base(self):
return self.registers["MEM"][0]["64"]
def get_div_register(self):
return self.registers["DIV"][0]["64"]
def get_clobber_list(self):
res = []
for k, v in self.registers.items():
for regset in v:
reg = regset["repr"]
if reg is not None:
res.append(reg)
return res
class X86_64_RegisterFile(RegisterFile):
registers = {
"G": # general purpose registers
[
# {"64": "rax", "32": "eax", "repr": "rax"},
# {"64": "rcx", "32": "ecx", "repr": "rcx"},
# {"64": "rdx", "32": "edx", "repr": "rdx"},
{"64": "rbx", "32": "ebx", "repr": "rbx"}, # used by gcc
# {"64": "rsp", "32": "esp", "repr": "rsp"}, # used by gcc
# {"64": "rbp", "32": "ebp", "repr": "rbp"}, # used by gcc
{"64": "rsi", "32": "esi", "repr": "rsi"}, # used for string instructions
{"64": "rdi", "32": "edi", "repr": "rdi"}, # used for string instructions
{"64": "r8", "32": "r8d", "repr": "r8"},
{"64": "r9", "32": "r9d", "repr": "r9"},
{"64": "r10", "32": "r10d", "repr": "r10"},
{"64": "r11", "32": "r11d", "repr": "r11"},
{"64": "r12", "32": "r12d", "repr": "r12"},
# {"64": "r13", "32": "r13d", "repr": "r13"}, # used as divisor register
# {"64": "r14", "32": "r14d", "repr": "r14"}, # used as memory register
# {"64": "r15", "32": "r15d", "repr": "r15"}, # used by program frame
],
"V": # vector registers
[
{"256": "ymm0", "128": "xmm0", "repr": "ymm0"},
{"256": "ymm1", "128": "xmm1", "repr": "ymm1"},
{"256": "ymm2", "128": "xmm2", "repr": "ymm2"},
{"256": "ymm3", "128": "xmm3", "repr": "ymm3"},
{"256": "ymm4", "128": "xmm4", "repr": "ymm4"},
{"256": "ymm5", "128": "xmm5", "repr": "ymm5"},
{"256": "ymm6", "128": "xmm6", "repr": "ymm6"},
{"256": "ymm7", "128": "xmm7", "repr": "ymm7"},
{"256": "ymm8", "128": "xmm8", "repr": "ymm8"},
{"256": "ymm9", "128": "xmm9", "repr": "ymm9"},
{"256": "ymm10", "128": "xmm10", "repr": "ymm10"},
{"256": "ymm11", "128": "xmm11", "repr": "ymm11"},
{"256": "ymm12", "128": "xmm12", "repr": "ymm12"},
{"256": "ymm13", "128": "xmm13", "repr": "ymm13"},
{"256": "ymm14", "128": "xmm14", "repr": "ymm14"},
{"256": "ymm15", "128": "xmm15", "repr": "ymm15"},
],
"DIV": # register for non-zero divisor
[
{"64": "r13", "32": "r13d", "repr": None},
# no need to represent this in the clobber list as it is
# hardwired to a this register anyway
],
"MEM": # base register for memory operands
[
{"64": "r14", "32": "r14d", "repr": None}
# no need to represent this in the clobber list as it is
# hardwired to a this register anyway
],
}
def __init__(self):
super().__init__()
class AArch64_RegisterFile(RegisterFile):
registers = {
"G": # general puprose registers
[
# {"64": "x0", "32": "w0", "repr": "x0"}, # used for frame
# {"64": "x1", "32": "w1", "repr": "x1"}, # used for frame
{"64": "x2", "32": "w2", "repr": "x2"},
{"64": "x3", "32": "w3", "repr": "x3"},
{"64": "x4", "32": "w4", "repr": "x4"},
{"64": "x5", "32": "w5", "repr": "x5"},
{"64": "x6", "32": "w6", "repr": "x6"},
{"64": "x7", "32": "w7", "repr": "x7"},
{"64": "x8", "32": "w8", "repr": "x8"},
{"64": "x9", "32": "w9", "repr": "x9"},
{"64": "x10", "32": "w10", "repr": "x10"},
{"64": "x11", "32": "w11", "repr": "x11"},
{"64": "x12", "32": "w12", "repr": "x12"},
{"64": "x13", "32": "w13", "repr": "x13"},
{"64": "x14", "32": "w14", "repr": "x14"},
{"64": "x15", "32": "w15", "repr": "x15"},
{"64": "x16", "32": "w16", "repr": "x16"},
{"64": "x17", "32": "w17", "repr": "x17"},
{"64": "x18", "32": "w18", "repr": "x18"},
{"64": "x19", "32": "w19", "repr": "x19"},
{"64": "x20", "32": "w20", "repr": "x20"},
{"64": "x21", "32": "w21", "repr": "x21"},
{"64": "x22", "32": "w22", "repr": "x22"},
{"64": "x23", "32": "w23", "repr": "x23"},
{"64": "x24", "32": "w24", "repr": "x24"},
{"64": "x25", "32": "w25", "repr": "x25"},
{"64": "x26", "32": "w26", "repr": "x26"},
{"64": "x27", "32": "w27", "repr": "x27"},
# {"64": "x28", "32": "w28", "repr": "x28"}, # used for memory
# {"64": "x29", "32": "w29", "repr": "x29"}, # used for divisor
# {"64": "x30", "32": "w30", "repr": "x30"}, # link register
# {"64": "x31", "32": "w31", "repr": "x31"}, # zero/sp register
],
"F": # vector/floating point registers
[
{"VEC": "v0", "128": "q0", "64": "d0", "32": "s0", "16": "h0", "8": "b0", "repr": "v0"},
{"VEC": "v1", "128": "q1", "64": "d1", "32": "s1", "16": "h1", "8": "b1", "repr": "v1"},
{"VEC": "v2", "128": "q2", "64": "d2", "32": "s2", "16": "h2", "8": "b2", "repr": "v2"},
{"VEC": "v3", "128": "q3", "64": "d3", "32": "s3", "16": "h3", "8": "b3", "repr": "v3"},
{"VEC": "v4", "128": "q4", "64": "d4", "32": "s4", "16": "h4", "8": "b4", "repr": "v4"},
{"VEC": "v5", "128": "q5", "64": "d5", "32": "s5", "16": "h5", "8": "b5", "repr": "v5"},
{"VEC": "v6", "128": "q6", "64": "d6", "32": "s6", "16": "h6", "8": "b6", "repr": "v6"},
{"VEC": "v7", "128": "q7", "64": "d7", "32": "s7", "16": "h7", "8": "b7", "repr": "v7"},
{"VEC": "v8", "128": "q8", "64": "d8", "32": "s8", "16": "h8", "8": "b8", "repr": "v8"},
{"VEC": "v9", "128": "q9", "64": "d9", "32": "s9", "16": "h9", "8": "b9", "repr": "v9"},
{"VEC": "v10", "128": "q10", "64": "d10", "32": "s10", "16": "h10", "8": "b10", "repr": "v10"},
{"VEC": "v11", "128": "q11", "64": "d11", "32": "s11", "16": "h11", "8": "b11", "repr": "v11"},
{"VEC": "v12", "128": "q12", "64": "d12", "32": "s12", "16": "h12", "8": "b12", "repr": "v12"},
{"VEC": "v13", "128": "q13", "64": "d13", "32": "s13", "16": "h13", "8": "b13", "repr": "v13"},
{"VEC": "v14", "128": "q14", "64": "d14", "32": "s14", "16": "h14", "8": "b14", "repr": "v14"},
{"VEC": "v15", "128": "q15", "64": "d15", "32": "s15", "16": "h15", "8": "b15", "repr": "v15"},
{"VEC": "v16", "128": "q16", "64": "d16", "32": "s16", "16": "h16", "8": "b16", "repr": "v16"},
{"VEC": "v17", "128": "q17", "64": "d17", "32": "s17", "16": "h17", "8": "b17", "repr": "v17"},
{"VEC": "v18", "128": "q18", "64": "d18", "32": "s18", "16": "h18", "8": "b18", "repr": "v18"},
{"VEC": "v19", "128": "q19", "64": "d19", "32": "s19", "16": "h19", "8": "b19", "repr": "v19"},
{"VEC": "v20", "128": "q20", "64": "d20", "32": "s20", "16": "h20", "8": "b20", "repr": "v20"},
{"VEC": "v21", "128": "q21", "64": "d21", "32": "s21", "16": "h21", "8": "b21", "repr": "v21"},
{"VEC": "v22", "128": "q22", "64": "d22", "32": "s22", "16": "h22", "8": "b22", "repr": "v22"},
{"VEC": "v23", "128": "q23", "64": "d23", "32": "s23", "16": "h23", "8": "b23", "repr": "v23"},
{"VEC": "v24", "128": "q24", "64": "d24", "32": "s24", "16": "h24", "8": "b24", "repr": "v24"},
{"VEC": "v25", "128": "q25", "64": "d25", "32": "s25", "16": "h25", "8": "b25", "repr": "v25"},
{"VEC": "v26", "128": "q26", "64": "d26", "32": "s26", "16": "h26", "8": "b26", "repr": "v26"},
{"VEC": "v27", "128": "q27", "64": "d27", "32": "s27", "16": "h27", "8": "b27", "repr": "v27"},
{"VEC": "v28", "128": "q28", "64": "d28", "32": "s28", "16": "h28", "8": "b28", "repr": "v28"},
{"VEC": "v29", "128": "q29", "64": "d29", "32": "s29", "16": "h29", "8": "b29", "repr": "v29"},
{"VEC": "v30", "128": "q30", "64": "d30", "32": "s30", "16": "h30", "8": "b30", "repr": "v30"},
{"VEC": "v31", "128": "q31", "64": "d31", "32": "s31", "16": "h31", "8": "b31", "repr": "v31"},
],
"DIV": # register for non-zero divisor
[
{"64": "x29", "32": "w29", "repr": None},
# no need to represent this in the clobber list as it is
# hardwired to a this register anyway
],
"MEM": # base register for memory operands
[
{"64": "x28", "32": "w28", "repr": None},
# no need to represent this in the clobber list as it is
# hardwired to a this register anyway
],
}
def __init__(self):
super().__init__()
| 54.448649 | 111 | 0.37119 | 1,145 | 10,073 | 3.237555 | 0.367686 | 0.01133 | 0.01079 | 0.015107 | 0.194227 | 0.132722 | 0.132722 | 0.116536 | 0.116536 | 0.116536 | 0 | 0.194602 | 0.33426 | 10,073 | 184 | 112 | 54.744565 | 0.358187 | 0.157054 | 0 | 0.148936 | 0 | 0 | 0.255478 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.049645 | false | 0 | 0.021277 | 0.014184 | 0.134752 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe79e20af0abaadf27ef0edd6010a9d9587df465 | 2,019 | py | Python | test/test_basic_functions.py | azagajewski/ColiCoords | fa26e46971e24ff582c4d33331c5b8181f605c9f | [
"MIT"
] | 18 | 2018-09-11T01:14:31.000Z | 2021-12-27T10:21:59.000Z | test/test_basic_functions.py | azagajewski/ColiCoords | fa26e46971e24ff582c4d33331c5b8181f605c9f | [
"MIT"
] | 77 | 2018-09-19T09:28:33.000Z | 2021-11-12T13:31:50.000Z | test/test_basic_functions.py | azagajewski/ColiCoords | fa26e46971e24ff582c4d33331c5b8181f605c9f | [
"MIT"
] | 8 | 2019-06-17T16:02:32.000Z | 2021-06-30T23:31:17.000Z | import hashlib
import unittest
from colicoords.cell import Cell, CellList
from colicoords.preprocess import data_to_cells
from test import testcase
from test.test_functions import load_testdata
class DataTest(testcase.ArrayTestCase):
def setUp(self):
self.data = load_testdata('ds1')
def test_data_slicing(self):
sl1 = self.data[2:5, :, :]
self.assertEqual(sl1.shape, (3, 512, 512))
sl2 = self.data[:, 20:40, 100:200]
self.assertEqual(sl2.shape, (10, 20, 100))
def test_data_copy(self):
m0 = self.data.binary_img.mean()
data_copy = self.data.copy()
self.assertEqual(m0, self.data.binary_img.mean())
data_copy.data_dict['binary'] += 20
self.assertEqual(m0, self.data.binary_img.mean())
self.assertEqual(data_copy.binary_img.mean(), m0 + 20)
def _test_cell_list(self):
#todo check order
print(hashlib.md5(self.data).hexdigest())
cell_list = data_to_cells(self.data, initial_crop=2, cell_frac=0.5, rotate='binary')
print(hashlib.md5(self.data).hexdigest())
cell_list = data_to_cells(self.data, initial_crop=2, cell_frac=0.5, rotate='binary')
print(hashlib.md5(self.data).hexdigest())
d = self.data.copy()
print(d == self.data)
cl = CellList(cell_list)
self.assertEqual(len(cl), 48)
c5 = cl[5]
self.assertIsInstance(c5, Cell)
del cl[5]
self.assertEqual(len(cl), 47)
self.assertTrue(cl[3] in cl)
cl.append(c5)
self.assertTrue(c5 in cl)
vol = cl.volume
self.assertEqual(len(vol), 48)
class CellListTest(testcase.ArrayTestCase):
def setUp(self):
data = load_testdata('ds1')
self.cell_list = data_to_cells(data)
def test_slicing(self):
sliced = self.cell_list[:5]
self.assertIsInstance(sliced, CellList)
if __name__ == '__main__':
unittest.main() | 30.590909 | 93 | 0.618128 | 266 | 2,019 | 4.522556 | 0.274436 | 0.099751 | 0.036575 | 0.0399 | 0.382377 | 0.276808 | 0.276808 | 0.276808 | 0.181214 | 0.181214 | 0 | 0.042056 | 0.258049 | 2,019 | 66 | 94 | 30.590909 | 0.761015 | 0.007925 | 0 | 0.183673 | 0 | 0 | 0.016512 | 0 | 0 | 0 | 0 | 0.015152 | 0.244898 | 1 | 0.122449 | false | 0 | 0.122449 | 0 | 0.285714 | 0.081633 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe7d3ca44a30c1b45cb010d74a7365ccccfb69bc | 691 | py | Python | app.py | aosjehdgus/transliteration | 1934999385863009cdf9f8806e949157d653a9f4 | [
"Apache-2.0"
] | null | null | null | app.py | aosjehdgus/transliteration | 1934999385863009cdf9f8806e949157d653a9f4 | [
"Apache-2.0"
] | null | null | null | app.py | aosjehdgus/transliteration | 1934999385863009cdf9f8806e949157d653a9f4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import sys
import tensorflow as tf
import numpy as np
import data_utils
from translate import Transliteration
from flask import Flask, request, jsonify
transliteration = Transliteration()
app = Flask(__name__) # Flask 객체 선언, 파라미터로 어플리케이션 패키지의 이름을 넣어 준다.
app.config['JSON_AS_ASCII'] = False # 한글 데이터 전송을 위해서 설정해 준다.
@app.route("/transliterate", methods=['GET'])
def transliterate():
input = request.args.get('input')
output = transliteration.run(input)
learned = transliteration.is_learned(input)
print(input, learned)
return jsonify(output)
if __name__ == "__main__":
app.run(debug = True, host='0.0.0.0', port=80, use_reloader=False)
| 24.678571 | 68 | 0.727931 | 98 | 691 | 4.959184 | 0.622449 | 0.012346 | 0.012346 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011925 | 0.150507 | 691 | 27 | 69 | 25.592593 | 0.816014 | 0.124457 | 0 | 0 | 0 | 0 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.368421 | 0 | 0.473684 | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
fe8a256140b6390c55cadca6d58880f260544702 | 5,253 | py | Python | OmegaErp/Apps/base/forms/__init__.py | OMAR-EHAB777/FerpMenu | 6aee4616bc9bc7801023fe51acfa28e1e1267b66 | [
"BSD-3-Clause"
] | null | null | null | OmegaErp/Apps/base/forms/__init__.py | OMAR-EHAB777/FerpMenu | 6aee4616bc9bc7801023fe51acfa28e1e1267b66 | [
"BSD-3-Clause"
] | null | null | null | OmegaErp/Apps/base/forms/__init__.py | OMAR-EHAB777/FerpMenu | 6aee4616bc9bc7801023fe51acfa28e1e1267b66 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Global app forms
"""
# Standard Library
import re
# Django Library
from django import forms
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from django.utils.translation import ugettext_lazy as _
# Thirdparty Library
from dal import autocomplete
# Localfolder Library
from ..models import PyCompany, PyCountry, PyUser
from .partner import PartnerForm
class PerfilForm(forms.ModelForm):
"""Class to update the user profile on the system
"""
class Meta:
model = PyUser
fields = (
'first_name',
'last_name',
'celular',
)
labels = {
'first_name': _('Name'),
'last_name': _('Last Name'),
'celular': _('Mobile Phone'),
}
widgets = {
'first_name': forms.TextInput(attrs={'class': 'form-control'}),
'last_name': forms.TextInput(attrs={'class': 'form-control'}),
'celular': forms.TextInput(attrs={'class': 'form-control'}),
}
class PersonaChangeForm(UserChangeForm):
"""for something will be
"""
class Meta(UserChangeForm.Meta):
model = PyUser
fields = (
'email',
'is_superuser',
'is_staff',
'is_active',
'last_login',
'date_joined',
'first_name',
'last_name',
)
# ========================================================================== #
class PasswordRecoveryForm(forms.ModelForm):
"""To send the account recovery correction
"""
class Meta():
model = PyUser
fields = (
'email',
)
widgets = {
'email': forms.EmailInput(
attrs={'class': 'form-control', 'placeholder': _('Email')}
),
}
# ========================================================================== #
class PasswordSetForm(forms.Form):
"""To send the account recovery correction
"""
password1 = forms.CharField(
widget=forms.PasswordInput(
attrs={'class': 'form-control', 'placeholder': _('Password')}
)
)
password2 = forms.CharField(
widget=forms.PasswordInput(
attrs={'class': 'form-control', 'placeholder': _('Retype password')}
)
)
def clean(self):
super().clean()
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
print('entre8888')
if password1 != password2:
raise forms.ValidationError(
_('The two password fields didn\'t match.')
)
if password1 != password2:
raise forms.ValidationError(
_('The two password fields didn\'t match.')
)
class PersonaCreationForm(UserCreationForm):
"""This form class renders the record sheet of
users
"""
class Meta(UserCreationForm.Meta):
model = PyUser
fields = (
'email',
)
widgets = {
'email': forms.EmailInput(
attrs={'class': 'form-control', 'placeholder': _('Email')}
),
}
class AvatarForm(forms.ModelForm):
"""Class to update the user profile on the system
"""
class Meta:
model = PyUser
fields = (
'avatar',
)
class InitForm(forms.ModelForm):
"""From of OMegaERP initializacion
"""
email = forms.EmailField(
widget=forms.EmailInput(
attrs={
'placeholder': _('Admin email')
}
)
)
password = forms.CharField(
max_length=100,
widget=forms.PasswordInput(
attrs={
'placeholder': _('Admin Password')
}
)
)
class Meta:
model = PyCompany
fields = [
'name',
'country',
'email',
'password'
]
labels = {
'name': _('Company Name'),
'country': _('Country'),
'email': _('Admin user email'),
'password': _('Password'),
}
widgets = {
'name': forms.TextInput(
attrs={
'class': 'form-control',
'data-placeholder': _('Company Name'),
'style': 'width: 100%',
},
),
'country': autocomplete.ModelSelect2(
url='PyCountry:autocomplete',
attrs={
'class': 'form-control',
'data-placeholder': _('Select a country...'),
'style': 'width: 100%',
},
),
'email': forms.EmailInput(
attrs={
'class': 'form-control',
'data-placeholder': _('Admin user email'),
'style': 'width: 100%',
},
),
}
class ActivateForm(forms.Form):
"""To activate or deactivate an object in OmegaERP
"""
object_name = forms.CharField(max_length=100, widget=forms.HiddenInput)
object_pk = forms.IntegerField(widget=forms.HiddenInput) | 26.938462 | 80 | 0.491148 | 421 | 5,253 | 6.035629 | 0.320665 | 0.039355 | 0.055096 | 0.082645 | 0.420307 | 0.408107 | 0.341598 | 0.250295 | 0.250295 | 0.250295 | 0 | 0.009204 | 0.358843 | 5,253 | 195 | 81 | 26.938462 | 0.745249 | 0.118409 | 0 | 0.387755 | 0 | 0 | 0.187035 | 0.004818 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006803 | false | 0.129252 | 0.047619 | 0 | 0.190476 | 0.006803 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
fe8c212fdb626e028311eb927a139fd3cc7bba51 | 1,455 | py | Python | tests/unit/dataactvalidator/test_fabs38_detached_award_financial_assistance_2.py | COEJKnight/one | 6a5f8cd9468ab368019eb2597821b7837f74d9e2 | [
"CC0-1.0"
] | 1 | 2018-10-29T12:54:44.000Z | 2018-10-29T12:54:44.000Z | tests/unit/dataactvalidator/test_fabs38_detached_award_financial_assistance_2.py | COEJKnight/one | 6a5f8cd9468ab368019eb2597821b7837f74d9e2 | [
"CC0-1.0"
] | null | null | null | tests/unit/dataactvalidator/test_fabs38_detached_award_financial_assistance_2.py | COEJKnight/one | 6a5f8cd9468ab368019eb2597821b7837f74d9e2 | [
"CC0-1.0"
] | null | null | null | from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory
from tests.unit.dataactvalidator.utils import number_of_errors, query_columns
_FILE = 'fabs38_detached_award_financial_assistance_2'
def test_column_headers(database):
expected_subset = {"row_number", "awarding_office_code"}
actual = set(query_columns(_FILE, database))
assert expected_subset == actual
def test_success(database):
""" AwardingOfficeCode must be six characters long. """
det_award_1 = DetachedAwardFinancialAssistanceFactory(awarding_office_code='AAAAAA')
det_award_2 = DetachedAwardFinancialAssistanceFactory(awarding_office_code='111111')
det_award_3 = DetachedAwardFinancialAssistanceFactory(awarding_office_code='AAA111')
det_award_4 = DetachedAwardFinancialAssistanceFactory(awarding_office_code='')
det_award_5 = DetachedAwardFinancialAssistanceFactory(awarding_office_code=None)
errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2, det_award_3, det_award_4, det_award_5])
assert errors == 0
def test_failure(database):
""" AwardingOfficeCode must be six characters long. """
det_award_1 = DetachedAwardFinancialAssistanceFactory(awarding_office_code='AAAA1')
det_award_2 = DetachedAwardFinancialAssistanceFactory(awarding_office_code='AAAAAAA')
errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2])
assert errors == 2
| 45.46875 | 120 | 0.808935 | 164 | 1,455 | 6.762195 | 0.347561 | 0.100992 | 0.129847 | 0.359784 | 0.427412 | 0.427412 | 0.427412 | 0.308386 | 0.308386 | 0.308386 | 0 | 0.022446 | 0.112027 | 1,455 | 31 | 121 | 46.935484 | 0.835913 | 0.065979 | 0 | 0 | 0 | 0 | 0.077323 | 0.032714 | 0 | 0 | 0 | 0 | 0.15 | 1 | 0.15 | false | 0 | 0.1 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe8e1c215219c1805761ef6232ba7b858bfbd7b4 | 3,641 | py | Python | src/conv/convertManifest2Curation.py | nakamura196/i3 | 16d7695e5412b45dc8e0192d9ca285723ac9f788 | [
"Apache-2.0"
] | 3 | 2020-04-21T11:36:10.000Z | 2022-02-01T00:46:59.000Z | src/conv/convertManifest2Curation.py | nakamura196/i3 | 16d7695e5412b45dc8e0192d9ca285723ac9f788 | [
"Apache-2.0"
] | 17 | 2021-01-08T17:20:38.000Z | 2021-06-29T05:55:47.000Z | src/conv/convertManifest2Curation.py | nakamura196/i3 | 16d7695e5412b45dc8e0192d9ca285723ac9f788 | [
"Apache-2.0"
] | null | null | null | import urllib.request
from bs4 import BeautifulSoup
import csv
import requests
import os
import json
import time
import glob
files = glob.glob("/Users/nakamura/git/d_iiif/iiif/src/collections/nijl/data/json/*.json")
for i in range(len(files)):
file = files[i]
file_id = file.split("/")[-1].replace(".json", "")
opath = "/Users/nakamura/git/d_iiif/iiif/src/collections/nijl/data/curation/"+file_id+".json"
if not os.path.exists(opath):
fw = open(opath, 'w')
curation_data = {}
curation_uri = "curation:"+file_id+".json"
with open(file) as f:
try:
df = json.load(f)
except:
continue
anno_count = 1
if "sequences" in df:
print(file)
members = []
canvases = df["sequences"][0]["canvases"]
for j in range(len(canvases)):
canvas = canvases[j]
if "otherContent" in canvas:
id = canvas["otherContent"][0]["@id"]
headers = {"content-type": "application/json"}
# time.sleep(0.5)
r = requests.get(id, headers=headers)
data = r.json()
print(id)
resources = data["resources"]
for resource in resources:
member_id = resource["on"]
res = resource["resource"]
chars = res["chars"]
member = {
"@id": member_id,
"@type": "sc:Canvas",
"label": "[Annotation " + str(anno_count) + "]",
"description": chars,
"metadata": [
{
"label": res["@type"],
"value": chars
}
]
}
anno_count += 1
members.append(member)
if len(members) > 0:
label = ""
if "label" in df:
label = df["label"]
curation_data = {
"@context": [
"http://iiif.io/api/presentation/2/context.json",
"http://codh.rois.ac.jp/iiif/curation/1/context.json"
],
"@type": "cr:Curation",
"@id": curation_uri,
"label": "Automatic curation by IIIF Converter",
"selections": [
{
"@id": curation_uri + "/range1",
"@type": "sc:Range",
"label": "Automatic curation by IIIF Converter",
"members": members,
"within": {
"@id": df["@id"],
"@type": "sc:Manifest",
"label": label
}
}
]
}
json.dump(curation_data, fw, ensure_ascii=False, indent=4, sort_keys=True, separators=(',', ': '))
| 31.938596 | 106 | 0.355397 | 275 | 3,641 | 4.64 | 0.4 | 0.014107 | 0.025078 | 0.026646 | 0.131661 | 0.131661 | 0.073668 | 0.073668 | 0.073668 | 0.073668 | 0 | 0.007688 | 0.535567 | 3,641 | 113 | 107 | 32.221239 | 0.746895 | 0.00412 | 0 | 0.024691 | 0 | 0.024691 | 0.173289 | 0.037528 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.098765 | 0 | 0.098765 | 0.024691 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fe98a505a6e3e05977900098d14a4c4efb60654a | 502 | py | Python | Day_5/highest_score.py | ecanro/100DaysOfCode_Python | a86ebe5a793fd4743e0de87454ba76925efdd23d | [
"MIT"
] | null | null | null | Day_5/highest_score.py | ecanro/100DaysOfCode_Python | a86ebe5a793fd4743e0de87454ba76925efdd23d | [
"MIT"
] | null | null | null | Day_5/highest_score.py | ecanro/100DaysOfCode_Python | a86ebe5a793fd4743e0de87454ba76925efdd23d | [
"MIT"
] | null | null | null | ## Highest Score
# 🚨 Don't change the code below 👇
student_scores = input("Input a list of student scores: ").split()
for n in range(0, len(student_scores)):
student_scores[n] = int(student_scores[n])
print(student_scores)
# 🚨 Don't change the code above 👆
# Write your code below this row 👇
highest_score = 0
for scores in student_scores:
if scores > highest_score:
highest_score = scores
print(f'The highest score is: {highest_score}')
# functional code
print(max(student_scores)) | 26.421053 | 66 | 0.721116 | 82 | 502 | 4.341463 | 0.439024 | 0.292135 | 0.02809 | 0.061798 | 0.101124 | 0.101124 | 0 | 0 | 0 | 0 | 0 | 0.004819 | 0.173307 | 502 | 19 | 67 | 26.421053 | 0.840964 | 0.250996 | 0 | 0 | 0 | 0 | 0.186486 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.3 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
fea585d93413c287bd31eaa0525d97e26cbdcd0b | 742 | py | Python | codeforces.com/1669F/solution.py | zubtsov/competitive-programming | 919d63130144347d7f6eddcf8f5bc2afb85fddf3 | [
"MIT"
] | null | null | null | codeforces.com/1669F/solution.py | zubtsov/competitive-programming | 919d63130144347d7f6eddcf8f5bc2afb85fddf3 | [
"MIT"
] | null | null | null | codeforces.com/1669F/solution.py | zubtsov/competitive-programming | 919d63130144347d7f6eddcf8f5bc2afb85fddf3 | [
"MIT"
] | null | null | null | for i in range(int(input())):
number_of_candies = int(input())
candies_weights = list(map(int, input().split()))
bob_pos = number_of_candies - 1
alice_pos = 0
bob_current_weight = 0
alice_current_weight = 0
last_equal_candies_total_number = 0
while alice_pos <= bob_pos:
if alice_current_weight <= bob_current_weight:
alice_current_weight += candies_weights[alice_pos]
alice_pos += 1
else:
bob_current_weight += candies_weights[bob_pos]
bob_pos -= 1
if alice_current_weight == bob_current_weight:
last_equal_candies_total_number = alice_pos + (number_of_candies - bob_pos - 1)
print(last_equal_candies_total_number)
| 29.68 | 91 | 0.665768 | 100 | 742 | 4.47 | 0.27 | 0.232662 | 0.143177 | 0.14094 | 0.342282 | 0.161074 | 0.161074 | 0 | 0 | 0 | 0 | 0.014467 | 0.254717 | 742 | 24 | 92 | 30.916667 | 0.793852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.055556 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
228122dba71ea421f33f3e5c51b862184d5fc4c8 | 205 | py | Python | hubcare/metrics/community_metrics/issue_template/urls.py | aleronupe/2019.1-hubcare-api | 3f031eac9559a10fdcf70a88ee4c548cf93e4ac2 | [
"MIT"
] | 7 | 2019-03-31T17:58:45.000Z | 2020-02-29T22:44:27.000Z | hubcare/metrics/community_metrics/issue_template/urls.py | aleronupe/2019.1-hubcare-api | 3f031eac9559a10fdcf70a88ee4c548cf93e4ac2 | [
"MIT"
] | 90 | 2019-03-26T01:14:54.000Z | 2021-06-10T21:30:25.000Z | hubcare/metrics/community_metrics/issue_template/urls.py | aleronupe/2019.1-hubcare-api | 3f031eac9559a10fdcf70a88ee4c548cf93e4ac2 | [
"MIT"
] | null | null | null | from django.urls import path
from issue_template.views import IssueTemplateView
urlpatterns = [
path(
'<str:owner>/<str:repo>/<str:token_auth>/',
IssueTemplateView.as_view()
),
]
| 18.636364 | 51 | 0.668293 | 23 | 205 | 5.826087 | 0.73913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.204878 | 205 | 10 | 52 | 20.5 | 0.822086 | 0 | 0 | 0 | 0 | 0 | 0.195122 | 0.195122 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2285470cfe61c3208efb829c668012f4eb4c042d | 196 | py | Python | classifier/cross_validation.py | ahmdrz/spam-classifier | a9cc3916a7c22545c82f0bfae7e4b95f3b36248f | [
"MIT"
] | 1 | 2019-08-05T12:02:53.000Z | 2019-08-05T12:02:53.000Z | classifier/cross_validation.py | ahmdrz/spam-classifier | a9cc3916a7c22545c82f0bfae7e4b95f3b36248f | [
"MIT"
] | null | null | null | classifier/cross_validation.py | ahmdrz/spam-classifier | a9cc3916a7c22545c82f0bfae7e4b95f3b36248f | [
"MIT"
] | null | null | null | from sklearn.model_selection import KFold
def kfold_cross_validation(data, k=10):
kfold = KFold(n_splits=k)
for train, test in kfold.split(data):
yield data[train], data[test] | 32.666667 | 41 | 0.704082 | 30 | 196 | 4.466667 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012658 | 0.193878 | 196 | 6 | 42 | 32.666667 | 0.835443 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
228b1c94896beb15138918d15679461767abdb01 | 3,238 | py | Python | examples/nlp/language_modeling/megatron_gpt_ckpt_to_nemo.py | rilango/NeMo | 6f23ff725c596f25fab6043d95e7c0b4a5f56331 | [
"Apache-2.0"
] | null | null | null | examples/nlp/language_modeling/megatron_gpt_ckpt_to_nemo.py | rilango/NeMo | 6f23ff725c596f25fab6043d95e7c0b4a5f56331 | [
"Apache-2.0"
] | null | null | null | examples/nlp/language_modeling/megatron_gpt_ckpt_to_nemo.py | rilango/NeMo | 6f23ff725c596f25fab6043d95e7c0b4a5f56331 | [
"Apache-2.0"
] | 1 | 2021-12-07T08:15:36.000Z | 2021-12-07T08:15:36.000Z | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from argparse import ArgumentParser
import torch.multiprocessing as mp
from pytorch_lightning.trainer.trainer import Trainer
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector
from nemo.utils import AppState, logging
def get_args():
parser = ArgumentParser()
parser.add_argument(
"--checkpoint_folder",
type=str,
default=None,
required=True,
help="Path to PTL checkpoints saved during training. Ex: /raid/nemo_experiments/megatron_gpt/checkpoints",
)
parser.add_argument(
"--checkpoint_name",
type=str,
default=None,
required=True,
help="Name of checkpoint to be used. Ex: megatron_gpt--val_loss=6.34-step=649-last.ckpt",
)
parser.add_argument(
"--hparams_file",
type=str,
default=None,
required=False,
help="Path config for restoring. It's created during training and may need to be modified during restore if restore environment is different than training. Ex: /raid/nemo_experiments/megatron_gpt/hparams.yaml",
)
parser.add_argument("--nemo_file_path", type=str, default=None, required=True, help="Path to output .nemo file.")
parser.add_argument("--tensor_model_parallel_size", type=int, required=True, default=None)
args = parser.parse_args()
return args
def convert(rank, world_size, args):
app_state = AppState()
app_state.data_parallel_rank = 0
trainer = Trainer(gpus=args.tensor_model_parallel_size)
# TODO: reach out to PTL For an API-safe local rank override
trainer.accelerator.training_type_plugin._local_rank = rank
if args.tensor_model_parallel_size is not None and args.tensor_model_parallel_size > 1:
# inject model parallel rank
checkpoint_path = os.path.join(args.checkpoint_folder, f'mp_rank_{rank:02d}', args.checkpoint_name)
else:
checkpoint_path = os.path.join(args.checkpoint_folder, args.checkpoint_name)
model = MegatronGPTModel.load_from_checkpoint(checkpoint_path, hparams_file=args.hparams_file, trainer=trainer)
model._save_restore_connector = NLPSaveRestoreConnector()
model.save_to(args.nemo_file_path)
logging.info(f'NeMo model saved to: {args.nemo_file_path}')
def main() -> None:
args = get_args()
world_size = args.tensor_model_parallel_size
mp.spawn(convert, args=(world_size, args), nprocs=world_size, join=True)
if __name__ == '__main__':
main() # noqa pylint: disable=no-value-for-parameter
| 37.218391 | 218 | 0.734713 | 444 | 3,238 | 5.177928 | 0.414414 | 0.026098 | 0.036973 | 0.050022 | 0.196607 | 0.122662 | 0.122662 | 0.073075 | 0.034798 | 0 | 0 | 0.006749 | 0.176343 | 3,238 | 86 | 219 | 37.651163 | 0.855268 | 0.220198 | 0 | 0.207547 | 0 | 0.037736 | 0.226874 | 0.075758 | 0 | 0 | 0 | 0.011628 | 0 | 1 | 0.056604 | false | 0 | 0.132075 | 0 | 0.207547 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.